From d2c98f02f3d871f37697aea0406e0b98db17b7c2 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 28 Apr 2025 10:03:19 -0700 Subject: [PATCH 001/201] add upgrade guide to 7.0.0 branch (#13790) --- .../guides/version_7_upgrade.html.markdown | 109 ++++++++++++++++++ 1 file changed, 109 insertions(+) create mode 100644 mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown new file mode 100644 index 000000000000..2b7ba1125ecb --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -0,0 +1,109 @@ +--- +page_title: "Terraform provider for Google Cloud 7.0.0 Upgrade Guide" +description: |- + Terraform provider for Google Cloud 7.0.0 Upgrade Guide +--- + +# Terraform Google Provider 7.0.0 Upgrade Guide + +The `7.0.0` release of the Google provider for Terraform is a major version and +includes some changes that you will need to consider when upgrading. This guide +is intended to help with that process and focuses only on the changes necessary +to upgrade from the final `6.X` series release to `7.0.0`. + +Most of the changes outlined in this guide have been previously marked as +deprecated in the Terraform `plan`/`apply` output throughout previous provider +releases, up to and including the final `6.X` series release. These changes, +such as deprecation notices, can always be found in the CHANGELOG of the +affected providers. [google](https://github.com/hashicorp/terraform-provider-google/blob/main/CHANGELOG.md) +[google-beta](https://github.com/hashicorp/terraform-provider-google-beta/blob/main/CHANGELOG.md) + +## I accidentally upgraded to 7.0.0, how do I downgrade to `6.X`? + +If you've inadvertently upgraded to `7.0.0`, first see the +[Provider Version Configuration Guide](#provider-version-configuration) to lock +your provider version; if you've constrained the provider to a lower version +such as shown in the previous version example in that guide, Terraform will pull +in a `6.X` series release on `terraform init`. + +If you've only ran `terraform init` or `terraform plan`, your state will not +have been modified and downgrading your provider is sufficient. + +If you've ran `terraform refresh` or `terraform apply`, Terraform may have made +state changes in the meantime. + +* If you're using a local state, or a remote state backend that does not support +versioning, `terraform refresh` with a downgraded provider is likely sufficient +to revert your state. The Google provider generally refreshes most state +information from the API, and the properties necessary to do so have been left +unchanged. + +* If you're using a remote state backend that supports versioning such as +[Google Cloud Storage](https://developer.hashicorp.com/terraform/language/settings/backends/gcs), +you can revert the Terraform state file to a previous version. If you do +so and Terraform had created resources as part of a `terraform apply` in the +meantime, you'll need to either delete them by hand or `terraform import` them +so Terraform knows to manage them. + +## Provider Version Configuration + +-> Before upgrading to version 7.0.0, it is recommended to upgrade to the most +recent `6.X` series release of the provider, make the changes noted in this guide, +and ensure that your environment successfully runs +[`terraform plan`](https://developer.hashicorp.com/terraform/cli/commands/plan) +without unexpected changes or deprecation notices. + +It is recommended to use [version constraints](https://developer.hashicorp.com/terraform/language/providers/requirements#requiring-providers) +when configuring Terraform providers. If you are following that recommendation, +update the version constraints in your Terraform configuration and run +[`terraform init`](https://developer.hashicorp.com/terraform/cli/commands/init) to download +the new version. + +If you aren't using version constraints, you can use `terraform init -upgrade` +in order to upgrade your provider to the latest released version. + +For example, given this previous configuration: + +```hcl +terraform { + required_providers { + google = { + version = "~> 5.30.0" + } + } +} +``` + +An updated configuration: + +```hcl +terraform { + required_providers { + google = { + version = "~> 7.0.0" + } + } +} +``` + +## Provider + +### Provider-level change example header + +Description of the change and how users should adjust their configuration (if needed). + +## Datasources + +## Datasource: `google_product_datasource` + +### Datasource-level change example header + +Description of the change and how users should adjust their configuration (if needed). + +## Resources + +## Resource: `google_product_resource` + +### Resource-level change example header + +Description of the change and how users should adjust their configuration (if needed). From 8489ea354586b6dcd1f595065d615f92e5c4ee27 Mon Sep 17 00:00:00 2001 From: Ron Gal <125445217+ron-gal@users.noreply.github.com> Date: Wed, 25 Jun 2025 13:33:43 -0400 Subject: [PATCH 002/201] feat(bigtable): rename instance to instance_name for table_iam resource (#14350) --- .../services/bigtable/iam_bigtable_table.go | 2 +- .../resource_bigtable_table_iam_test.go | 34 +++++++++---------- .../d/bigtable_table_iam_policy.html.markdown | 4 +-- .../guides/version_7_upgrade.html.markdown | 18 ++++++++++ .../docs/r/bigtable_table_iam.html.markdown | 26 +++++++------- 5 files changed, 51 insertions(+), 33 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go index 774efb4f761c..03c647546453 100644 --- a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go @@ -14,7 +14,7 @@ import ( ) var IamBigtableTableSchema = map[string]*schema.Schema{ - "instance": { + "instance_name": { Type: schema.TypeString, Required: true, ForceNew: true, diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go index e729f0a49a81..dd31e17eb757 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table_iam_test.go @@ -140,10 +140,10 @@ resource "google_service_account" "test-account2" { } resource "google_bigtable_table_iam_binding" "binding" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - members = [ + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + members = [ "serviceAccount:${google_service_account.test-account1.email}", ] } @@ -163,10 +163,10 @@ resource "google_service_account" "test-account2" { } resource "google_bigtable_table_iam_binding" "binding" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - members = [ + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + members = [ "serviceAccount:${google_service_account.test-account1.email}", "serviceAccount:${google_service_account.test-account2.email}", ] @@ -182,10 +182,10 @@ resource "google_service_account" "test-account" { } resource "google_bigtable_table_iam_member" "member" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - role = "%s" - member = "serviceAccount:${google_service_account.test-account.email}" + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + role = "%s" + member = "serviceAccount:${google_service_account.test-account.email}" } `, instance, cluster, cluster, account, role) } @@ -205,14 +205,14 @@ data "google_iam_policy" "policy" { } resource "google_bigtable_table_iam_policy" "policy" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name - policy_data = data.google_iam_policy.policy.policy_data + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name + policy_data = data.google_iam_policy.policy.policy_data } data "google_bigtable_table_iam_policy" "policy" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name } `, instance, cluster, cluster, account, role) diff --git a/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown b/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown index e44bd2b3e6f4..bfec2bb723b4 100644 --- a/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/bigtable_table_iam_policy.html.markdown @@ -12,8 +12,8 @@ Retrieves the current IAM policy data for a Bigtable Table. ```hcl data "google_bigtable_table_iam_policy" "policy" { - instance = google_bigtable_instance.instance.name - table = google_bigtable_table.table.name + instance_name = google_bigtable_instance.instance.name + table = google_bigtable_table.table.name } ``` diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 2b7ba1125ecb..e469a53221da 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -107,3 +107,21 @@ Description of the change and how users should adjust their configuration (if ne ### Resource-level change example header Description of the change and how users should adjust their configuration (if needed). + +## Resource: `google_bigtable_table_iam_policy` + +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. + +## Resource: `google_bigtable_table_iam_binding` + +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. + +## Resource: `google_bigtable_table_iam_member` + +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown index 7007e82bb986..b8dd500407fb 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_table_iam.html.markdown @@ -29,10 +29,10 @@ data "google_iam_policy" "admin" { } resource "google_bigtable_table_iam_policy" "editor" { - project = "your-project" - instance = "your-bigtable-instance" - table = "your-bigtable-table" - policy_data = data.google_iam_policy.admin.policy_data + project = "your-project" + instance_name = "your-bigtable-instance" + table = "your-bigtable-table" + policy_data = data.google_iam_policy.admin.policy_data } ``` @@ -40,10 +40,10 @@ resource "google_bigtable_table_iam_policy" "editor" { ```hcl resource "google_bigtable_table_iam_binding" "editor" { - table = "your-bigtable-table" - instance = "your-bigtable-instance" - role = "roles/bigtable.user" - members = [ + table = "your-bigtable-table" + instance_name = "your-bigtable-instance" + role = "roles/bigtable.user" + members = [ "user:jane@example.com", ] } @@ -53,10 +53,10 @@ resource "google_bigtable_table_iam_binding" "editor" { ```hcl resource "google_bigtable_table_iam_member" "editor" { - table = "your-bigtable-table" - instance = "your-bigtable-instance" - role = "roles/bigtable.user" - member = "user:jane@example.com" + table = "your-bigtable-table" + instance_name = "your-bigtable-instance" + role = "roles/bigtable.user" + member = "user:jane@example.com" } ``` @@ -64,7 +64,7 @@ resource "google_bigtable_table_iam_member" "editor" { The following arguments are supported: -* `instance` - (Required) The name or relative resource id of the instance that owns the table. +* `instance_name` - (Required) The name or relative resource id of the instance that owns the table. * `table` - (Required) The name or relative resource id of the table to manage IAM policies for. From c8f9ad28c4fded046ae8c02f831af0e974d3d40b Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 2 Jul 2025 18:28:57 +0000 Subject: [PATCH 003/201] fix: (storagetransfer) path validation for GCS path source and sink (#14377) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: drfaust92 Signed-off-by: James Alseth Signed-off-by: Cezary Sobczak Signed-off-by: Misha Efimov Signed-off-by: David Xia Signed-off-by: pcrao Signed-off-by: Eric Bode Co-authored-by: Nick Elliot Co-authored-by: Cameron Thornton Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Co-authored-by: tulika-aakriti Co-authored-by: Scott Suarez Co-authored-by: anthonyrtong Co-authored-by: Zhenhua Li Co-authored-by: NA2047 <12290725+NA2047@users.noreply.github.com> Co-authored-by: Chris Hawk Co-authored-by: Ilia Lazebnik Co-authored-by: Ramon Vermeulen Co-authored-by: Sam Levenick Co-authored-by: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Co-authored-by: Shrishty Chandra Co-authored-by: Sharan Teja M Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: James Alseth Co-authored-by: Riley Karson Co-authored-by: stevenyang72 Co-authored-by: oferhandel-google Co-authored-by: Jatin Miglani Co-authored-by: translucens Co-authored-by: Sing Co-authored-by: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Co-authored-by: Ronson Xaviour <50081163+ronsonx@users.noreply.github.com> Co-authored-by: Ronson Xaviour Co-authored-by: Thomas Rodgers Co-authored-by: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Co-authored-by: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Co-authored-by: Iris Chen <10179943+iyabchen@users.noreply.github.com> Co-authored-by: Or Sela Co-authored-by: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Co-authored-by: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Co-authored-by: kigesui Co-authored-by: Meng Yang Co-authored-by: Ashwin G Co-authored-by: Allison Fisher Co-authored-by: mihhalj Co-authored-by: Guy Bidkar <5646214+gbidkar@users.noreply.github.com> Co-authored-by: Dawid212 Co-authored-by: Michael Lopez Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: sahil-mahajan-google Co-authored-by: kautikdk <144651627+kautikdk@users.noreply.github.com> Co-authored-by: harshithpatte-g Co-authored-by: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Co-authored-by: ML Co-authored-by: Marek Lipert Co-authored-by: James Alseth Co-authored-by: Madhura Phadnis Co-authored-by: YashTayal04 <47032845+YashTayal04@users.noreply.github.com> Co-authored-by: Misha Efimov Co-authored-by: Aiden Grossman Co-authored-by: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Co-authored-by: Wiktor Niesiobędzki Co-authored-by: MatthewVu-dev Co-authored-by: Madhu Suraj Co-authored-by: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Co-authored-by: Jun Luo Co-authored-by: Raj Anand <88097156+raazanand@users.noreply.github.com> Co-authored-by: Tommy Reddad Co-authored-by: palramanathan <117597159+palramanathan@users.noreply.github.com> Co-authored-by: Michał Wiatrowski Co-authored-by: rlapin-pl <114071972+rlapin-pl@users.noreply.github.com> Co-authored-by: rlapin-pl Co-authored-by: tonybayvas Co-authored-by: Ryan Oaks Co-authored-by: David Xia Co-authored-by: sachin purohit Co-authored-by: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Co-authored-by: Jack Weinbender Co-authored-by: Balanagu Harsha Vardhan Co-authored-by: porky256 <61063240+porky256@users.noreply.github.com> Co-authored-by: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Co-authored-by: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Co-authored-by: Ashwin G Co-authored-by: Xian-Ji Chen <68801742+XianJiChen@users.noreply.github.com> Co-authored-by: Nithin Daniel <55326622+nithindaniel@users.noreply.github.com> Co-authored-by: Nithin Daniel Co-authored-by: Michał Wiatrowski Co-authored-by: veraz0818 Co-authored-by: DavinaRen Co-authored-by: dishaagarwal03-google Co-authored-by: Margubur Rahman <150442997+googlyrahman@users.noreply.github.com> Co-authored-by: Nandini Agrawal Co-authored-by: Ziting Co-authored-by: Taneli Leppä Co-authored-by: martin-guillen Co-authored-by: FilipKubawskiOkta Co-authored-by: Calvin Liu Co-authored-by: Sepehr Javid <32390553+sepehrjavid@users.noreply.github.com> Co-authored-by: Niharika <35183015+niharika-98@users.noreply.github.com> Co-authored-by: Arnav Dham Co-authored-by: Daniel Rieske Co-authored-by: Luca Prete Co-authored-by: Luca Prete Co-authored-by: echiugoog Co-authored-by: Justin Scofield <47263509+scawful@users.noreply.github.com> Co-authored-by: liaoaohaha Co-authored-by: Wonje Kang <96211823+wonjekang@users.noreply.github.com> Co-authored-by: Pradeep Rao <84025829+pradeepcrao@users.noreply.github.com> Co-authored-by: Tlaquetzal Co-authored-by: StealthyCoder Co-authored-by: animeshnandanwar Co-authored-by: Nandini Agrawal Co-authored-by: Stephane Charite Co-authored-by: Steven Davidovitz <13248+steved@users.noreply.github.com> Co-authored-by: vmiglani <142545940+vmiglani@users.noreply.github.com> Co-authored-by: xuebaoZ Co-authored-by: zhihaos Co-authored-by: Hoang Pham Co-authored-by: vbhadoriaB <150216360+vbhadoriaB@users.noreply.github.com> Co-authored-by: Lakshman Swaminathan Co-authored-by: luckyswaminathan Co-authored-by: iamkonohamaru Co-authored-by: Brad Fisher Co-authored-by: panerorenn9541 <36008213+panerorenn9541@users.noreply.github.com> Co-authored-by: Keith Jordy <6444028+kjordy@users.noreply.github.com> Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com> Co-authored-by: Yanwei Guo Co-authored-by: coder-221 <185867912+coder-221@users.noreply.github.com> Co-authored-by: Naga Bodepudi Co-authored-by: Yuval Brik Co-authored-by: Ron Gal <125445217+ron-gal@users.noreply.github.com> --- .../resource_storage_transfer_job.go.tmpl | 16 ++- .../resource_storage_transfer_job_test.go | 104 ++++++++++++++++++ .../guides/version_7_upgrade.html.markdown | 10 ++ 3 files changed, 128 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl index f283d995173d..3cd5304cd79f 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl @@ -4,6 +4,7 @@ import ( "fmt" "log" "reflect" + "regexp" "strings" "time" @@ -695,14 +696,25 @@ func gcsDataSchema() *schema.Resource { }, "path": { Optional: true, - Computed: true, Type: schema.TypeString, - Description: `Google Cloud Storage path in bucket to transfer`, + Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, + ValidateFunc: validateGCSDataPath, }, }, } } +func validateGCSDataPath(v interface{}, k string) (ws []string, errors []error) { + value := v.(string) + value = strings.TrimSpace(value) + // checks if path not started with "/" + regex, err := regexp.Compile("^/+") + if err == nil && len(value) > 0 && regex.Match([]byte(value)) { + errors = append(errors, fmt.Errorf("%q cannot start with /", k)) + } + return +} + func awsS3DataSchema() *schema.Resource { return &schema.Resource{ Schema: map[string]*schema.Schema{ diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 1d50924e8060..09647d6ff895 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -573,6 +573,32 @@ func TestAccStorageTransferJob_hdfsSource(t *testing.T) { }) } +func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { + t.Parallel() + + testDataSourceBucketName := acctest.RandString(t, 10) + testDataSinkName := acctest.RandString(t, 10) + testTransferJobDescription := acctest.RandString(t, 10) + testTransferJobName := fmt.Sprintf("tf-test-transfer-job-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageTransferJobDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), + }, + { + Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, ""), + }, + { + Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), + }, + }, + }) +} + func testAccStorageTransferJobDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { config := acctest.GoogleProviderConfig(t) @@ -2399,3 +2425,81 @@ resource "google_storage_transfer_job" "transfer_job" { } `, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project) } + +func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string, testTransferJobName string, gcsPath string) string { + return fmt.Sprintf(` + data "google_storage_transfer_project_service_account" "default" { + project = "%s" + } + + resource "google_storage_bucket" "data_source" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_member" "data_source" { + bucket = google_storage_bucket.data_source.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + } + + resource "google_storage_bucket" "data_sink" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true + uniform_bucket_level_access = true + } + + resource "google_storage_bucket_iam_member" "data_sink" { + bucket = google_storage_bucket.data_sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + } + + resource "google_storage_transfer_job" "transfer_job" { + name = "transferJobs/%s" + description = "%s" + project = "%s" + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.data_source.name + path = "foo/" + } + gcs_data_sink { + bucket_name = google_storage_bucket.data_sink.name + path = "%s" + } + } + + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 10 + day = 1 + } + start_time_of_day { + hours = 0 + minutes = 30 + seconds = 0 + nanos = 0 + } + repeat_interval = "604800s" + } + + depends_on = [ + google_storage_bucket_iam_member.data_source, + google_storage_bucket_iam_member.data_sink, + ] + } + `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project, gcsPath) +} diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index e469a53221da..da0230b6ab05 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -125,3 +125,13 @@ Description of the change and how users should adjust their configuration (if ne ### `instance` is now removed `instance` has been removed in favor of `instance_name`. + +## Resource: `google_storage_transfer_job` + +### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +### `transfer_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +### `replication_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +### `replication_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." From 7d64c0a170d2ce5c23c5aacd288358b08a51d845 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 8 Jul 2025 17:03:09 +0000 Subject: [PATCH 004/201] fix: (storage) data type change for retention_period (#14442) Co-authored-by: Riley Karson --- .../storage/resource_storage_bucket.go.tmpl | 26 +- .../resource_storage_bucket_600_migration.go | 530 ++++++++++++++++++ .../storage/resource_storage_bucket_test.go | 6 +- .../guides/version_7_upgrade.html.markdown | 10 +- .../docs/r/storage_bucket.html.markdown | 2 +- .../tgc/services/storage/storage_bucket.go | 5 +- 6 files changed, 564 insertions(+), 15 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl index 761a06ffb0ad..be8c88c35be4 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.tmpl @@ -6,7 +6,6 @@ import ( "errors" "fmt" "log" - "math" "regexp" "runtime" "strconv" @@ -47,7 +46,7 @@ func ResourceStorageBucket() *schema.Resource { Read: schema.DefaultTimeout(4 * time.Minute), }, - SchemaVersion: 3, + SchemaVersion: 4, StateUpgraders: []schema.StateUpgrader{ { Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), @@ -64,6 +63,11 @@ func ResourceStorageBucket() *schema.Resource { Upgrade: ResourceStorageBucketStateUpgradeV2, Version: 2, }, + { + Type: resourceStorageBucketV3().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV3, + Version: 3, + }, }, Schema: map[string]*schema.Schema{ @@ -409,9 +413,8 @@ func ResourceStorageBucket() *schema.Resource { Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, }, "retention_period": { - Type: schema.TypeInt, + Type: schema.TypeString, Required: true, - ValidateFunc: validation.IntBetween(1, math.MaxInt32), Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, }, }, @@ -864,7 +867,11 @@ func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error retentionPolicy := retention_policies[0].(map[string]interface{}) if v, ok := retentionPolicy["retention_period"]; ok { - sb.RetentionPolicy.RetentionPeriod = int64(v.(int)) + value, err := strconv.ParseInt(v.(string), 10, 64) + if err != nil { + return err + } + sb.RetentionPolicy.RetentionPeriod = value } } } @@ -1472,9 +1479,14 @@ func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentio } retentionPolicy := retentionPolicies[0].(map[string]interface{}) + var retentionPeriod int64 + if v, ok := retentionPolicy["retention_period"]; ok { + retentionPeriod, _ = strconv.ParseInt(v.(string), 10, 64) + } + bucketRetentionPolicy := &storage.BucketRetentionPolicy{ IsLocked: retentionPolicy["is_locked"].(bool), - RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), + RetentionPeriod: retentionPeriod, } return bucketRetentionPolicy @@ -1489,7 +1501,7 @@ func flattenBucketRetentionPolicy(bucketRetentionPolicy *storage.BucketRetention retentionPolicy := map[string]interface{}{ "is_locked": bucketRetentionPolicy.IsLocked, - "retention_period": bucketRetentionPolicy.RetentionPeriod, + "retention_period": fmt.Sprintf("%d", bucketRetentionPolicy.RetentionPeriod), } bucketRetentionPolicies = append(bucketRetentionPolicies, retentionPolicy) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index 39ff367d6f56..a34141e3b0f8 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -4,6 +4,7 @@ import ( "context" "log" "math" + "strconv" "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -1054,3 +1055,532 @@ func ResourceStorageBucketStateUpgradeV2(_ context.Context, rawState map[string] log.Printf("[DEBUG] Attributes after migration: %#v", rawState) return rawState, nil } + +func resourceStorageBucketV3() *schema.Resource { + return &schema.Resource{ + StateUpgraders: []schema.StateUpgrader{ + { + Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV0, + Version: 0, + }, + { + Type: resourceStorageBucketV1().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV1, + Version: 1, + }, + { + Type: resourceStorageBucketV2().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV1, + Version: 2, + }, + }, + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, + ValidateFunc: verify.ValidateGCSName, + }, + + "encryption": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "default_kms_key_name": { + Type: schema.TypeString, + Required: true, + Description: `A Cloud KMS key that will be used to encrypt objects inserted into this bucket, if no encryption method is specified. You must pay attention to whether the crypto key is available in the location that this bucket is created in. See the docs for more details.`, + }, + }, + }, + Description: `The bucket's encryption configuration.`, + }, + + "requester_pays": { + Type: schema.TypeBool, + Optional: true, + Description: `Enables Requester Pays on a storage bucket.`, + }, + + "force_destroy": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run.`, + }, + + "labels": { + Type: schema.TypeMap, + ValidateFunc: labelKeyValidator, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A set of key/value label pairs to assign to the bucket.`, + }, + + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + StateFunc: func(s interface{}) string { + return strings.ToUpper(s.(string)) + }, + Description: `The Google Cloud Storage location`, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The ID of the project in which the resource belongs. If it is not provided, the provider project is used.`, + }, + + "project_number": { + Type: schema.TypeInt, + Computed: true, + Description: `The project number of the project in which the resource belongs.`, + }, + + "self_link": { + Type: schema.TypeString, + Computed: true, + Description: `The URI of the created resource.`, + }, + + "url": { + Type: schema.TypeString, + Computed: true, + Description: `The base URL of the bucket, in the format gs://.`, + }, + + "storage_class": { + Type: schema.TypeString, + Optional: true, + Default: "STANDARD", + Description: `The Storage Class of the new bucket. Supported values include: STANDARD, MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, + }, + + "lifecycle_rule": { + Type: schema.TypeList, + Optional: true, + MaxItems: 100, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "action": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + MaxItems: 1, + Set: resourceGCSBucketLifecycleRuleActionHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + Description: `The type of the action of this Lifecycle Rule. Supported values include: Delete, SetStorageClass and AbortIncompleteMultipartUpload.`, + }, + "storage_class": { + Type: schema.TypeString, + Optional: true, + Description: `The target Storage Class of objects affected by this Lifecycle Rule. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, + }, + }, + }, + Description: `The Lifecycle Rule's action configuration. A single block of this type is supported.`, + }, + "condition": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + MaxItems: 1, + Set: resourceGCSBucketLifecycleRuleConditionHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "age": { + Type: schema.TypeInt, + Optional: true, + Description: `Minimum age of an object in days to satisfy this condition.`, + }, + "created_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "custom_time_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "days_since_custom_time": { + Type: schema.TypeInt, + Optional: true, + Description: `Number of days elapsed since the user-specified timestamp set on an object.`, + }, + "days_since_noncurrent_time": { + Type: schema.TypeInt, + Optional: true, + Description: `Number of days elapsed since the noncurrent timestamp of an object. This + condition is relevant only for versioned objects.`, + }, + "noncurrent_time_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "no_age": { + Type: schema.TypeBool, + Deprecated: "`no_age` is deprecated and will be removed in a future major release. Use `send_age_if_zero` instead.", + Optional: true, + Description: `While set true, age value will be omitted.Required to set true when age is unset in the config file.`, + }, + "with_state": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"LIVE", "ARCHIVED", "ANY", ""}, false), + Description: `Match to live and/or archived objects. Unversioned buckets have only live objects. Supported values include: "LIVE", "ARCHIVED", "ANY".`, + }, + "matches_storage_class": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Storage Class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE, STANDARD, DURABLE_REDUCED_AVAILABILITY.`, + }, + "num_newer_versions": { + Type: schema.TypeInt, + Optional: true, + Description: `Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition.`, + }, + "matches_prefix": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `One or more matching name prefixes to satisfy this condition.`, + }, + "matches_suffix": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `One or more matching name suffixes to satisfy this condition.`, + }, + "send_age_if_zero": { + Type: schema.TypeBool, + Optional: true, + Default: true, + Description: `While set true, age value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the age field. It can be used alone or together with age.`, + }, + "send_days_since_noncurrent_time_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, days_since_noncurrent_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_noncurrent_time field. It can be used alone or together with days_since_noncurrent_time.`, + }, + "send_days_since_custom_time_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, days_since_custom_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_custom_time field. It can be used alone or together with days_since_custom_time.`, + }, + "send_num_newer_versions_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, num_newer_versions value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the num_newer_versions field. It can be used alone or together with num_newer_versions.`, + }, + }, + }, + Description: `The Lifecycle Rule's condition configuration.`, + }, + }, + }, + Description: `The bucket's Lifecycle Rules configuration.`, + }, + + "enable_object_retention": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: `Enables each object in the bucket to have its own retention policy, which prevents deletion until stored for a specific length of time.`, + }, + + "versioning": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `While set to true, versioning is fully enabled for this bucket.`, + }, + }, + }, + Description: `The bucket's Versioning configuration.`, + }, + + "autoclass": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `While set to true, autoclass automatically transitions objects in your bucket to appropriate storage classes based on each object's access pattern.`, + }, + "terminal_storage_class": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The storage class that objects in the bucket eventually transition to if they are not read for a certain length of time. Supported values include: NEARLINE, ARCHIVE.`, + }, + }, + }, + Description: `The bucket's autoclass configuration.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + _, n := d.GetChange(strings.TrimSuffix(k, ".#")) + if !strings.HasSuffix(k, ".#") { + return false + } + var l []interface{} + if new == "1" && old == "0" { + l = n.([]interface{}) + contents, ok := l[0].(map[string]interface{}) + if !ok { + return false + } + if contents["enabled"] == false { + return true + } + } + if new == "0" && old == "1" { + n := d.Get(strings.TrimSuffix(k, ".#")) + l = n.([]interface{}) + contents := l[0].(map[string]interface{}) + if contents["enabled"] == false { + return true + } + } + return false + }, + }, + "website": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "main_page_suffix": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: []string{"website.0.not_found_page", "website.0.main_page_suffix"}, + Description: `Behaves as the bucket's directory index where missing objects are treated as potential directories.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old != "" && new == "" + }, + }, + "not_found_page": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: []string{"website.0.main_page_suffix", "website.0.not_found_page"}, + Description: `The custom object to return when a requested resource is not found.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old != "" && new == "" + }, + }, + }, + }, + Description: `Configuration if the bucket acts as a website.`, + }, + + "retention_policy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "is_locked": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, + }, + "retention_period": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, math.MaxInt32), + Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, + }, + }, + }, + Description: `Configuration of the bucket's data retention policy for how long objects in the bucket should be retained.`, + }, + + "cors": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "origin": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of Origins eligible to receive CORS response headers. Note: "*" is permitted in the list of origins, and means "any Origin".`, + }, + "method": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of HTTP methods on which to include CORS response headers, (GET, OPTIONS, POST, etc) Note: "*" is permitted in the list of methods, and means "any method".`, + }, + "response_header": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of HTTP headers other than the simple response headers to give permission for the user-agent to share across domains.`, + }, + "max_age_seconds": { + Type: schema.TypeInt, + Optional: true, + Description: `The value, in seconds, to return in the Access-Control-Max-Age header used in preflight responses.`, + }, + }, + }, + Description: `The bucket's Cross-Origin Resource Sharing (CORS) configuration.`, + }, + + "default_event_based_hold": { + Type: schema.TypeBool, + Optional: true, + Description: `Whether or not to automatically apply an eventBasedHold to new objects added to the bucket.`, + }, + + "logging": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "log_bucket": { + Type: schema.TypeString, + Required: true, + Description: `The bucket that will receive log objects.`, + }, + "log_object_prefix": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The object prefix for log objects. If it's not provided, by default Google Cloud Storage sets this to this bucket's name.`, + }, + }, + }, + Description: `The bucket's Access & Storage Logs configuration.`, + }, + "uniform_bucket_level_access": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + Description: `Enables uniform bucket-level access on a bucket.`, + }, + "custom_placement_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "data_locations": { + Type: schema.TypeSet, + Required: true, + ForceNew: true, + MaxItems: 2, + MinItems: 2, + Elem: &schema.Schema{ + Type: schema.TypeString, + StateFunc: func(s interface{}) string { + return strings.ToUpper(s.(string)) + }, + }, + Description: `The list of individual regions that comprise a dual-region bucket. See the docs for a list of acceptable regions. Note: If any of the data_locations changes, it will recreate the bucket.`, + }, + }, + }, + Description: `The bucket's custom location configuration, which specifies the individual regions that comprise a dual-region bucket. If the bucket is designated a single or multi-region, the parameters are empty.`, + }, + "rpo": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Specifies the RPO setting of bucket. If set 'ASYNC_TURBO', The Turbo Replication will be enabled for the dual-region bucket. Value 'DEFAULT' will set RPO setting to default. Turbo Replication is only for buckets in dual-regions.See the docs for more details.`, + }, + "public_access_prevention": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Prevents public access to a bucket.`, + }, + "soft_delete_policy": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + Description: `The bucket's soft delete policy, which defines the period of time that soft-deleted objects will be retained, and cannot be permanently deleted. If it is not provided, by default Google Cloud Storage sets this to default soft delete policy`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "retention_duration_seconds": { + Type: schema.TypeInt, + Default: 604800, + Optional: true, + Description: `The duration in seconds that soft-deleted objects in the bucket will be retained and cannot be permanently deleted. Default value is 604800.`, + }, + "effective_time": { + Type: schema.TypeString, + Computed: true, + Description: `Server-determined value that indicates the time from which the policy, or one with a greater retention, was effective. This value is in RFC 3339 format.`, + }, + }, + }, + }, + }, + UseJSONNumber: true, + } +} + +func ResourceStorageBucketStateUpgradeV3(_ context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + log.Printf("[DEBUG] Attributes before migration: %#v", rawState) + if rawState["retention_policy"] != nil { + retentionPolicies := rawState["retention_policy"].([]interface{}) + if len(retentionPolicies) > 0 { + retentionPolicy := retentionPolicies[0].(map[string]interface{}) + if v, ok := retentionPolicy["retention_period"]; ok { + retentionPolicy["retention_period"] = strconv.Itoa(v.(int)) + } + } + } + log.Printf("[DEBUG] Attributes after migration: %#v", rawState) + return rawState, nil +} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go index b7d31bac9b85..40aec7f963d9 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go @@ -2680,7 +2680,7 @@ resource "google_storage_bucket" "bucket" { force_destroy = true retention_policy { - retention_period = 10 + retention_period = "10" } } `, bucketName) @@ -2695,7 +2695,7 @@ resource "google_storage_bucket" "bucket" { retention_policy { is_locked = true - retention_period = 10 + retention_period = "10" } } `, bucketName) @@ -2788,7 +2788,7 @@ resource "google_storage_bucket" "bucket" { force_destroy = true retention_policy { - retention_period = 3600 + retention_period = "3600" } } `, bucketName) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index da0230b6ab05..5df81a98b0df 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,11 +102,15 @@ Description of the change and how users should adjust their configuration (if ne ## Resources -## Resource: `google_product_resource` +## Resource: `google_storage_bucket` -### Resource-level change example header +### `retention_period` changed to `string` data type -Description of the change and how users should adjust their configuration (if needed). +`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. + +Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. + +To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. ## Resource: `google_bigtable_table_iam_policy` diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown index 9450a3ea945d..acf555657d06 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket.html.markdown @@ -255,7 +255,7 @@ The following arguments are supported: * `is_locked` - (Optional) If set to `true`, the bucket will be [locked](https://cloud.google.com/storage/docs/using-bucket-lock#lock-bucket) and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action. -* `retention_period` - (Required) The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 2,147,483,647 seconds. +* `retention_period` - (Required) The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds. The `logging` block supports: diff --git a/mmv1/third_party/tgc/services/storage/storage_bucket.go b/mmv1/third_party/tgc/services/storage/storage_bucket.go index 48ecbd64aa3a..b7f83e34d92f 100644 --- a/mmv1/third_party/tgc/services/storage/storage_bucket.go +++ b/mmv1/third_party/tgc/services/storage/storage_bucket.go @@ -10,6 +10,7 @@ package storage import ( "fmt" + "strconv" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -213,8 +214,10 @@ func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentio } retentionPolicy := retentionPolicies[0].(map[string]interface{}) + value, _ := strconv.ParseInt(retentionPolicy["retention_period"].(string), 10, 64) + bucketRetentionPolicy := &storage.BucketRetentionPolicy{ - RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), + RetentionPeriod: value, } return bucketRetentionPolicy From 78e4e30bc9d87b7e148992f2e6a49244fcf97961 Mon Sep 17 00:00:00 2001 From: haiyanmeng Date: Fri, 11 Jul 2025 17:11:40 -0400 Subject: [PATCH 005/201] Update beta api endpoint from v1beta1 to v1beta as v1beta1 will be deprecated soon (#14495) --- mmv1/products/gkehub/Membership.yaml | 6 ------ mmv1/products/gkehub/product.yaml | 2 +- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/mmv1/products/gkehub/Membership.yaml b/mmv1/products/gkehub/Membership.yaml index ef242bb3e385..f599106c5b9f 100644 --- a/mmv1/products/gkehub/Membership.yaml +++ b/mmv1/products/gkehub/Membership.yaml @@ -122,12 +122,6 @@ properties: description: | The unique identifier of the membership. output: true - - name: 'description' - type: String - description: | - The name of this entity type to be displayed on the console. This field is unavailable in v1 of the API. - min_version: 'beta' - deprecation_message: '`description` is deprecated and will be removed in a future major release.' - name: 'labels' type: KeyValueLabels description: | diff --git a/mmv1/products/gkehub/product.yaml b/mmv1/products/gkehub/product.yaml index eb701f7c8923..db104a135598 100644 --- a/mmv1/products/gkehub/product.yaml +++ b/mmv1/products/gkehub/product.yaml @@ -17,7 +17,7 @@ legacy_name: 'gke_hub' display_name: 'GKEHub' versions: - name: 'beta' - base_url: 'https://gkehub.googleapis.com/v1beta1/' + base_url: 'https://gkehub.googleapis.com/v1beta/' - name: 'ga' base_url: 'https://gkehub.googleapis.com/v1/' scopes: diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 5df81a98b0df..179f5e244b12 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -130,6 +130,12 @@ To reflect the new type explicitly, surround the current integer value in quotes `instance` has been removed in favor of `instance_name`. +## Resource: `google_gke_hub_membership` + +### `description` is now removed + +Remove `description` from your configuration after upgrade. + ## Resource: `google_storage_transfer_job` ### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." From 00fd0eecf9288735432015320374e4d6bf4873dc Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 14 Jul 2025 12:58:19 -0700 Subject: [PATCH 006/201] Delete notebooks location (#14479) --- mmv1/products/notebooks/Location.yaml | 43 ------------------- .../guides/version_7_upgrade.html.markdown | 26 ++++++----- 2 files changed, 15 insertions(+), 54 deletions(-) delete mode 100644 mmv1/products/notebooks/Location.yaml diff --git a/mmv1/products/notebooks/Location.yaml b/mmv1/products/notebooks/Location.yaml deleted file mode 100644 index 1438575441d3..000000000000 --- a/mmv1/products/notebooks/Location.yaml +++ /dev/null @@ -1,43 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Location' -kind: 'compute#zone' -description: 'Represents a Location resource.' -deprecation_message: >- - `google_notebooks_location` is deprecated and will be removed in a future major release. - This resource is not functional. -readonly: true -docs: -base_url: 'projects/{{project}}/locations' -has_self_link: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -collection_url_key: 'items' -custom_code: -parameters: -properties: - - name: 'name' - type: String - description: 'Name of the Location resource.' - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 179f5e244b12..7714d280b547 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,15 +102,17 @@ Description of the change and how users should adjust their configuration (if ne ## Resources -## Resource: `google_storage_bucket` +## Resource: `google_bigtable_table_iam_binding` -### `retention_period` changed to `string` data type +### `instance` is now removed -`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. +`instance` has been removed in favor of `instance_name`. -Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. +## Resource: `google_bigtable_table_iam_member` -To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. +### `instance` is now removed + +`instance` has been removed in favor of `instance_name`. ## Resource: `google_bigtable_table_iam_policy` @@ -118,17 +120,19 @@ To reflect the new type explicitly, surround the current integer value in quotes `instance` has been removed in favor of `instance_name`. -## Resource: `google_bigtable_table_iam_binding` +## Resource: `google_notebooks_location` is now removed -### `instance` is now removed +This resource is not functional. -`instance` has been removed in favor of `instance_name`. +## Resource: `google_storage_bucket` -## Resource: `google_bigtable_table_iam_member` +### `retention_period` changed to `string` data type -### `instance` is now removed +`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. -`instance` has been removed in favor of `instance_name`. +Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. + +To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. ## Resource: `google_gke_hub_membership` From c4267dda4157c885c98a1b1f7296716a79e7eb71 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Thu, 24 Jul 2025 09:23:58 -0700 Subject: [PATCH 007/201] Remove enable_flow_logs from google_compute_subnetwork (#14612) --- mmv1/products/compute/Subnetwork.yaml | 11 ----------- mmv1/templates/terraform/constants/subnetwork.tmpl | 12 ------------ .../docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 3 files changed, 6 insertions(+), 23 deletions(-) diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index ec5b99889f65..1bc5180aed83 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -360,7 +360,6 @@ properties: fingerprint_name: 'fingerprint' custom_flatten: 'templates/terraform/custom_flatten/subnetwork_log_config.go.tmpl' custom_expand: 'templates/terraform/custom_expand/subnetwork_log_config.go.tmpl' - diff_suppress_func: 'subnetworkLogConfigDiffSuppress' properties: - name: 'aggregationInterval' type: Enum @@ -512,16 +511,6 @@ properties: update_verb: 'PATCH' fingerprint_name: 'fingerprint' is_missing_in_cai: true - - name: 'enableFlowLogs' - type: Boolean - description: | - Whether to enable flow logging for this subnetwork. If this field is not explicitly set, - it will not appear in get listings. If not set the default behavior is determined by the - org policy, if there is no org policy specified, then it will default to disabled. - This field isn't supported if the subnet purpose field is set to REGIONAL_MANAGED_PROXY. - default_from_api: true - include_empty_value_in_cai: true - deprecation_message: 'This field is being removed in favor of log_config. If log_config is present, flow logs are enabled.' - name: 'state' type: Enum description: | diff --git a/mmv1/templates/terraform/constants/subnetwork.tmpl b/mmv1/templates/terraform/constants/subnetwork.tmpl index 78697330c8e6..9969642edc58 100644 --- a/mmv1/templates/terraform/constants/subnetwork.tmpl +++ b/mmv1/templates/terraform/constants/subnetwork.tmpl @@ -48,15 +48,3 @@ func sendSecondaryIpRangeIfEmptyDiff(_ context.Context, diff *schema.ResourceDif return nil } - -// DiffSuppressFunc for `log_config`. -func subnetworkLogConfigDiffSuppress(k, old, new string, d *schema.ResourceData) bool { - // If enable_flow_logs is enabled and log_config is not set, ignore the diff - if enable_flow_logs := d.Get("enable_flow_logs"); enable_flow_logs.(bool) { - logConfig := d.GetRawConfig().GetAttr("log_config") - logConfigIsEmpty := logConfig.IsNull() || logConfig.LengthInt() == 0 - return logConfigIsEmpty - } - - return false -} diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 7714d280b547..f467ebc1ae73 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -120,6 +120,12 @@ Description of the change and how users should adjust their configuration (if ne `instance` has been removed in favor of `instance_name`. +## Resource: `google_compute_subnetwork` + +### `enable_flow_logs`is now removed + +`enable_flow_logs` has been removed in favor of `log_config`. + ## Resource: `google_notebooks_location` is now removed This resource is not functional. From 3fdf5ec2e86c7526b1fded756f98e71d5d4a4f43 Mon Sep 17 00:00:00 2001 From: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Date: Mon, 28 Jul 2025 10:56:18 -0700 Subject: [PATCH 008/201] Mark `load_balancing_scheme` field required. (#14624) --- mmv1/products/networkservices/LbTrafficExtension.yaml | 1 + .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/mmv1/products/networkservices/LbTrafficExtension.yaml b/mmv1/products/networkservices/LbTrafficExtension.yaml index a71a99c345c2..d5ddcdd1d34e 100644 --- a/mmv1/products/networkservices/LbTrafficExtension.yaml +++ b/mmv1/products/networkservices/LbTrafficExtension.yaml @@ -201,6 +201,7 @@ properties: For more information, refer to [Choosing a load balancer](https://cloud.google.com/load-balancing/docs/backend-service) and [Supported application load balancers](https://cloud.google.com/service-extensions/docs/callouts-overview#supported-lbs). immutable: true + required: true enum_values: - 'INTERNAL_MANAGED' - 'EXTERNAL_MANAGED' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index f467ebc1ae73..753d9042c173 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -146,6 +146,12 @@ To reflect the new type explicitly, surround the current integer value in quotes Remove `description` from your configuration after upgrade. +## Resource: `google_network_services_lb_traffic_extension` + +### `load_balancing_scheme` is now required + +`load_balancing_scheme` is now a required field. + ## Resource: `google_storage_transfer_job` ### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." From fd6f9b8cdd0fd967a0e1968564c7c8e9394673c1 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 28 Jul 2025 12:55:43 -0700 Subject: [PATCH 009/201] syncing removal of validation to v3 schema --- .../storage/resource_storage_bucket_600_migration.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index fad2e44bd970..bf4a561b50ea 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -1073,11 +1073,10 @@ func resourceStorageBucketV3() *schema.Resource { }, Schema: map[string]*schema.Schema{ "name": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - ValidateFunc: verify.ValidateGCSName, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, }, "encryption": { From 041869fb6650b5331b956726a30d527bccff630d Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 29 Jul 2025 23:10:43 +0200 Subject: [PATCH 010/201] cloudfunctions2: changed `service` argument in `service_config` of `google_cloudfunctions2_function` to attribute (#14648) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- mmv1/products/cloudfunctions2/Function.yaml | 2 +- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 51c8b30033eb..e3534fabce79 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -503,7 +503,7 @@ properties: type: String description: | Name of the service associated with a Function. - default_from_api: true + output: true - name: 'timeoutSeconds' type: Integer description: | diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 753d9042c173..df921efce378 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -161,3 +161,9 @@ Remove `description` from your configuration after upgrade. ### `replication_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." ### `replication_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." + +## Resource: `google_cloudfunctions2_function` + +### `service_config.service` is changed from `Argument` to `Attribute` + +Remove `service_config.service` from your configuration after upgrade. From 6ac3ef3351a4f4f2afd175a8c6bb39f8440ae19b Mon Sep 17 00:00:00 2001 From: Paridhi Shah <166548459+paridhishah18@users.noreply.github.com> Date: Thu, 31 Jul 2025 11:04:05 -0700 Subject: [PATCH 011/201] remove dependsOn field as it is not supported for workerpools and fix failing test. (#14295) --- mmv1/products/cloudrunv2/WorkerPool.yaml | 6 ------ .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml index 37ecf07f8773..fcce2c9fdfb0 100644 --- a/mmv1/products/cloudrunv2/WorkerPool.yaml +++ b/mmv1/products/cloudrunv2/WorkerPool.yaml @@ -477,12 +477,6 @@ properties: type: String description: |- Container's working directory. If not specified, the container runtime's default will be used, which might be configured in the container image. - - name: 'dependsOn' - type: Array - description: |- - Containers which should be started before this container. If specified the container will wait to start until all containers with the listed names are healthy. - item_type: - type: String - name: 'volumes' type: Array description: |- diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index df921efce378..a487a1526410 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -167,3 +167,9 @@ Remove `description` from your configuration after upgrade. ### `service_config.service` is changed from `Argument` to `Attribute` Remove `service_config.service` from your configuration after upgrade. + +## Resource: `google_cloud_run_v2_worker_pool` + +### `template.containers.depends_on` is reomved as it is not supported. + +Remove `template.containers.depends_on` from your configuration after upgrade. From d6bf6c421910c016da2e190a8a1a3b4904fe3c7f Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 1 Aug 2025 21:49:57 +0200 Subject: [PATCH 012/201] vertexai: marked `enable_secure_private_service_connect` in `google_vertex_ai_endpoint` as beta isntead of GA (beta) (#14665) --- mmv1/products/vertexai/Endpoint.yaml | 3 ++- .../vertex_ai_endpoint_private_service_connect.tf.tmpl | 1 - .../website/docs/guides/version_7_upgrade.html.markdown | 6 +++++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/mmv1/products/vertexai/Endpoint.yaml b/mmv1/products/vertexai/Endpoint.yaml index 30046d585e5d..7dd64bb13d28 100644 --- a/mmv1/products/vertexai/Endpoint.yaml +++ b/mmv1/products/vertexai/Endpoint.yaml @@ -21,7 +21,7 @@ description: references: guides: 'Official Documentation': 'https://cloud.google.com/vertex-ai/docs' - api: 'https://cloud.google.com/vertex-ai/docs/reference/rest/v1beta1/projects.locations.endpoints' + api: 'https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.endpoints' docs: base_url: 'projects/{{project}}/locations/{{location}}/endpoints' self_link: 'projects/{{project}}/locations/{{location}}/endpoints/{{name}}' @@ -458,6 +458,7 @@ properties: description: 'A list of Projects from which the forwarding rule will target the service attachment.' - name: 'enableSecurePrivateServiceConnect' + min_version: 'beta' type: Boolean description: 'If set to true, enable secure private service connect with IAM authorization. Otherwise, private service connect will be done without authorization. Note latency will be slightly increased if authorization is enabled.' diff --git a/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl index f8dd51f3f1eb..eea9532d043b 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl +++ b/mmv1/templates/terraform/examples/vertex_ai_endpoint_private_service_connect.tf.tmpl @@ -12,7 +12,6 @@ resource "google_vertex_ai_endpoint" "{{$.PrimaryResourceId}}" { project_allowlist = [ "${data.google_project.project.project_id}" ] - enable_secure_private_service_connect = false } } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index a487a1526410..50e9d1b4a456 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -170,6 +170,10 @@ Remove `service_config.service` from your configuration after upgrade. ## Resource: `google_cloud_run_v2_worker_pool` -### `template.containers.depends_on` is reomved as it is not supported. +### `template.containers.depends_on` is removed as it is not supported. Remove `template.containers.depends_on` from your configuration after upgrade. + +## Resource: `google_vertex_ai_endpoint` + +### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. From 4ba5c470bd75edd078d35803834216c547355f90 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Fri, 8 Aug 2025 13:49:21 -0500 Subject: [PATCH 013/201] make event_type required in `google_cloudfunctions2_function` resource (#14791) --- mmv1/products/cloudfunctions2/Function.yaml | 1 + .../website/docs/guides/version_7_upgrade.html.markdown | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index e3534fabce79..b7381c6cfda7 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -678,6 +678,7 @@ properties: default_from_api: true - name: 'eventType' type: String + required: true description: 'Required. The type of event to observe.' - name: 'eventFilters' type: Array diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 50e9d1b4a456..7b33a23fe275 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -164,6 +164,10 @@ Remove `description` from your configuration after upgrade. ## Resource: `google_cloudfunctions2_function` +### `event_trigger.event_type` is now required + +The `event_type` field is now required when `event_trigger` is configured. + ### `service_config.service` is changed from `Argument` to `Attribute` Remove `service_config.service` from your configuration after upgrade. From f5d97538160c4d68711861296bb3a9ebd6f241ae Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Mon, 11 Aug 2025 11:30:27 -0500 Subject: [PATCH 014/201] Changed default on disable_on_destroy to false (#14790) --- ...dbuild_bitbucket_server_config_peered_network.tf.tmpl | 1 - .../terraform/examples/eventarc_basic_tf.tf.tmpl | 2 -- .../terraform/examples/eventarc_workflows.tf.tmpl | 3 --- ...firebase_app_check_play_integrity_config_full.tf.tmpl | 1 - ...ebase_app_check_play_integrity_config_minimal.tf.tmpl | 3 --- ...e_app_check_recaptcha_enterprise_config_basic.tf.tmpl | 3 --- .../firebase_app_check_service_config_enforced.tf.tmpl | 1 - .../firebase_app_check_service_config_off.tf.tmpl | 1 - .../firebase_app_check_service_config_unenforced.tf.tmpl | 1 - .../examples/firebase_app_hosting_backend_full.tf.tmpl | 2 -- .../firebase_app_hosting_backend_minimal.tf.tmpl | 2 -- .../examples/firebase_app_hosting_build_full.tf.tmpl | 2 -- .../examples/firebase_app_hosting_build_minimal.tf.tmpl | 2 -- .../firebase_app_hosting_traffic_rollout_policy.tf.tmpl | 2 -- ...e_app_hosting_traffic_rollout_policy_disabled.tf.tmpl | 2 -- .../examples/firebase_app_hosting_traffic_target.tf.tmpl | 2 -- .../firebase_database_instance_default_database.tf.tmpl | 4 ---- .../examples/firebasedataconnect_service_basic.tf.tmpl | 1 - ...rebasedataconnect_service_with_force_deletion.tf.tmpl | 1 - .../terraform/examples/kms_autokey_config_all.tf.tmpl | 1 - .../terraform/examples/kms_key_handle_basic.tf.tmpl | 1 - .../terraform/examples/shared_future_reservation.tf.tmpl | 1 - .../terraform/examples/shared_reservation_basic.tf.tmpl | 1 - .../terraform/examples/shared_reservation_beta.tf.tmpl | 1 - ..._featureonlinestore_featureview_cross_project.tf.tmpl | 1 - .../terraform/provider/provider_billing_project_test.go | 3 --- ...e_bigquery_analytics_hub_listing_subscription_test.go | 1 - .../resource_compute_shared_reservation_update_test.go | 8 -------- .../container/resource_container_cluster_test.go.tmpl | 2 -- ...esource_document_ai_warehouse_document_schema_test.go | 1 - ...source_firebase_app_check_service_config_test.go.tmpl | 3 --- .../resource_firebase_data_connect_service_test.go | 1 - .../resource_gke_hub_feature_membership_test.go.tmpl | 4 ---- .../services/gkehub2/iam_gke_hub_feature_test.go | 5 ----- .../gkehub2/resource_gke_hub_feature_test.go.tmpl | 9 --------- .../services/gkehub2/resource_gke_hub_fleet_test.go.tmpl | 2 -- .../resource_gke_hub_scope_rbac_role_binding_test.go | 1 - .../resource_google_project_service.go.tmpl | 2 -- .../resource_google_project_service_test.go.tmpl | 6 ++++-- .../guides/external_credentials_stacks.html.markdown | 1 - .../website/docs/guides/version_7_upgrade.html.markdown | 8 ++++++++ .../website/docs/r/cloudbuild_worker_pool.html.markdown | 1 - .../website/docs/r/google_project_service.html.markdown | 7 ++----- 43 files changed, 14 insertions(+), 93 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl index 190cf5ebd566..15e1351804e7 100644 --- a/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudbuild_bitbucket_server_config_peered_network.tf.tmpl @@ -2,7 +2,6 @@ data "google_project" "project" {} resource "google_project_service" "servicenetworking" { service = "servicenetworking.googleapis.com" - disable_on_destroy = false } resource "google_compute_network" "vpc_network" { diff --git a/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl b/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl index 2aa6cd00729a..d7db6749e22e 100644 --- a/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl +++ b/mmv1/templates/terraform/examples/eventarc_basic_tf.tf.tmpl @@ -7,14 +7,12 @@ data "google_project" "project" { resource "google_project_service" "run" { provider = google-beta service = "run.googleapis.com" - disable_on_destroy = false } # Enable Eventarc API resource "google_project_service" "eventarc" { provider = google-beta service = "eventarc.googleapis.com" - disable_on_destroy = false } # Deploy Cloud Run service diff --git a/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl b/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl index ea7f916116fd..50edabb245f4 100644 --- a/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl +++ b/mmv1/templates/terraform/examples/eventarc_workflows.tf.tmpl @@ -7,21 +7,18 @@ data "google_project" "project" { resource "google_project_service" "eventarc" { provider = google-beta service = "eventarc.googleapis.com" - disable_on_destroy = false } # Enable Pub/Sub API resource "google_project_service" "pubsub" { provider = google-beta service = "pubsub.googleapis.com" - disable_on_destroy = false } # Enable Workflows API resource "google_project_service" "workflows" { provider = google-beta service = "workflows.googleapis.com" - disable_on_destroy = false } diff --git a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl index db5a93b8a4ec..eba66461acce 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_full.tf.tmpl @@ -6,7 +6,6 @@ resource "google_project_service" "play_integrity" { service = "playintegrity.googleapis.com" # Don't disable the service if the resource block is removed by accident. - disable_on_destroy = false } resource "google_firebase_android_app" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl index bbe75253a126..62c7717d0815 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_play_integrity_config_minimal.tf.tmpl @@ -4,9 +4,6 @@ resource "google_project_service" "play_integrity" { project = "{{index $.TestEnvVars "project_id"}}" service = "playintegrity.googleapis.com" - - # Don't disable the service if the resource block is removed by accident. - disable_on_destroy = false } resource "google_firebase_android_app" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl index ee2abef0f7eb..2fad49e48fc6 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_recaptcha_enterprise_config_basic.tf.tmpl @@ -4,9 +4,6 @@ resource "google_project_service" "recaptcha_enterprise" { project = "{{index $.TestEnvVars "project_id"}}" service = "recaptchaenterprise.googleapis.com" - - # Don't disable the service if the resource block is removed by accident. - disable_on_destroy = false } resource "google_firebase_web_app" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl index e29fb3518c28..e886b85db32b 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_service_config_enforced.tf.tmpl @@ -1,7 +1,6 @@ resource "google_project_service" "appcheck" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false } resource "google_firebase_app_check_service_config" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl index ff000e5a3b42..be56b53643ce 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_service_config_off.tf.tmpl @@ -1,7 +1,6 @@ resource "google_project_service" "appcheck" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false } resource "google_firebase_app_check_service_config" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl index 11f85955c357..3ece67283265 100644 --- a/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_check_service_config_unenforced.tf.tmpl @@ -1,7 +1,6 @@ resource "google_project_service" "appcheck" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false } resource "google_firebase_app_check_service_config" "default" { diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl index a6e4a28da553..c50660acf7fe 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_full.tf.tmpl @@ -60,7 +60,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl index 1b98329068f9..1c25f3967af4 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_backend_minimal.tf.tmpl @@ -34,7 +34,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl index 801f28a88a0a..9a1524b62f24 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_build_full.tf.tmpl @@ -56,7 +56,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl index 23a7c8d83914..2871a6dfb35b 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_build_minimal.tf.tmpl @@ -47,7 +47,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl index e8048229281c..6caa5b35e3e1 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy.tf.tmpl @@ -44,7 +44,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl index 0c9c43112824..8194f2b02f29 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_rollout_policy_disabled.tf.tmpl @@ -45,7 +45,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl index 98fa778f879d..4606bfc1111d 100644 --- a/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_app_hosting_traffic_target.tf.tmpl @@ -60,7 +60,5 @@ resource "google_project_iam_member" "app_hosting_sa_runner" { resource "google_project_service" "fah" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebaseapphosting.googleapis.com" - - disable_on_destroy = false } ### diff --git a/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl b/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl index 2ded65e3bcd0..db506733e11d 100644 --- a/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebase_database_instance_default_database.tf.tmpl @@ -13,8 +13,6 @@ resource "google_project_service" "firebase" { provider = google-beta project = google_project.default.project_id service = "firebase.googleapis.com" - - disable_on_destroy = false } resource "google_firebase_project" "default" { @@ -28,8 +26,6 @@ resource "google_project_service" "firebase_database" { provider = google-beta project = google_firebase_project.default.project service = "firebasedatabase.googleapis.com" - - disable_on_destroy = false } resource "time_sleep" "wait_60_seconds" { diff --git a/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl b/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl index ca2a62c09e80..228b973ec3f5 100644 --- a/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebasedataconnect_service_basic.tf.tmpl @@ -2,7 +2,6 @@ resource "google_project_service" "fdc" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebasedataconnect.googleapis.com" - disable_on_destroy = false } # Create a Firebase Data Connect service diff --git a/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl b/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl index 4d35b5c4278d..fd81b92dd5f7 100644 --- a/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl +++ b/mmv1/templates/terraform/examples/firebasedataconnect_service_with_force_deletion.tf.tmpl @@ -2,7 +2,6 @@ resource "google_project_service" "fdc" { project = "{{index $.TestEnvVars "project_id"}}" service = "firebasedataconnect.googleapis.com" - disable_on_destroy = false } # Create a Firebase Data Connect service diff --git a/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl b/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl index aa3ad3661cc6..fb2d32eb7aa9 100644 --- a/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl +++ b/mmv1/templates/terraform/examples/kms_autokey_config_all.tf.tmpl @@ -22,7 +22,6 @@ resource "google_project_service" "kms_api_service" { provider = google-beta service = "cloudkms.googleapis.com" project = google_project.key_project.project_id - disable_on_destroy = false disable_dependent_services = true depends_on = [google_project.key_project] } diff --git a/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl b/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl index b67930b88d11..20988805fc79 100644 --- a/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/kms_key_handle_basic.tf.tmpl @@ -33,7 +33,6 @@ resource "google_project_service" "kms_api_service" { provider = google-beta service = "cloudkms.googleapis.com" project = google_project.key_project.project_id - disable_on_destroy = false disable_dependent_services = true depends_on = [google_project.key_project] } diff --git a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl index c88b3d4bcf02..3dd33a56fc60 100644 --- a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl @@ -9,7 +9,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { diff --git a/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl index 202f64c469d6..7e2a710494dc 100644 --- a/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_reservation_basic.tf.tmpl @@ -10,7 +10,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { diff --git a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl index de72467ac91c..e41ebb71a48e 100644 --- a/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_reservation_beta.tf.tmpl @@ -12,7 +12,6 @@ resource "google_project_service" "compute" { provider = google-beta project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { diff --git a/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl b/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl index e81d0081edc8..786e75321265 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl +++ b/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_featureview_cross_project.tf.tmpl @@ -28,7 +28,6 @@ resource "google_project_service" "vertexai" { create = "30m" update = "40m" } - disable_on_destroy = false # Needed for CI tests for permissions to propagate, should not be needed for actual usage depends_on = [time_sleep.wait_60_seconds] } diff --git a/mmv1/third_party/terraform/provider/provider_billing_project_test.go b/mmv1/third_party/terraform/provider/provider_billing_project_test.go index 452ddbfa2b0b..e9f36c187931 100644 --- a/mmv1/third_party/terraform/provider/provider_billing_project_test.go +++ b/mmv1/third_party/terraform/provider/provider_billing_project_test.go @@ -265,8 +265,6 @@ resource "google_project" "project" { resource "google_project_service" "serviceusage" { project = google_project.project.project_id service = "serviceusage.googleapis.com" - - disable_on_destroy = false # Need it enabled in the project when the test disables services in post-test cleanup } `, context) } @@ -310,7 +308,6 @@ resource "google_project_service" "pubsub" { resource "google_project_service" "cloudresourcemanager" { project = google_project.project.project_id service = "cloudresourcemanager.googleapis.com" - disable_on_destroy = false # Need it enabled in the project when the test deletes the project resource in post-test cleanup } `, context) } diff --git a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go index 155ecb59f738..e4e4ee291689 100644 --- a/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go +++ b/mmv1/third_party/terraform/services/bigqueryanalyticshub/resource_bigquery_analytics_hub_listing_subscription_test.go @@ -74,7 +74,6 @@ resource "google_project" "project" { resource "google_project_service" "analyticshub" { project = google_project.project.project_id service = "analyticshub.googleapis.com" - disable_on_destroy = false # Need it enabled in the project when the test disables services in post-test cleanup } resource "google_bigquery_analytics_hub_data_exchange" "subscription" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go index 52b836733dd1..fb8c176a9614 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go @@ -68,7 +68,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { @@ -108,19 +107,16 @@ resource "google_organization_policy" "shared_reservation_org_policy" { resource "google_project_service" "compute_second_project" { project = google_project.guest_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_third_project" { project = google_project.guest_project_second.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_fourth_project" { project = google_project.guest_project_third.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_compute_reservation" "gce_reservation" { @@ -160,7 +156,6 @@ resource "google_project" "owner_project" { resource "google_project_service" "compute" { project = google_project.owner_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project" "guest_project" { @@ -200,19 +195,16 @@ resource "google_organization_policy" "shared_reservation_org_policy" { resource "google_project_service" "compute_second_project" { project = google_project.guest_project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_third_project" { project = google_project.guest_project_second.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "compute_fourth_project" { project = google_project.guest_project_third.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_compute_reservation" "gce_reservation" { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index d5b1b9772fa6..6461b0de65d6 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -8669,12 +8669,10 @@ func testAccContainerCluster_withNodeConfigReservationAffinitySpecific(reservati resource "google_project_service" "compute" { service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container" { service = "container.googleapis.com" - disable_on_destroy = false depends_on = [google_project_service.compute] } diff --git a/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go b/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go index e2521f2b5b33..1a3a5568b955 100644 --- a/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go +++ b/mmv1/third_party/terraform/services/documentaiwarehouse/resource_document_ai_warehouse_document_schema_test.go @@ -132,7 +132,6 @@ resource "google_project" "project" { resource "google_project_service" "contentwarehouse" { project = google_project.project.project_id service = "contentwarehouse.googleapis.com" - disable_on_destroy = false } resource "time_sleep" "wait_120s" { diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl index dfd7901eff82..9a518159c6f5 100644 --- a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl +++ b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_service_config_test.go.tmpl @@ -77,14 +77,12 @@ resource "google_project_service" "firebase" { provider = google-beta project = google_project.default.project_id service = "firebase.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "database" { provider = google-beta project = google_project.default.project_id service = "firebasedatabase.googleapis.com" - disable_on_destroy = false depends_on = [ google_project_service.firebase, ] @@ -94,7 +92,6 @@ resource "google_project_service" "appcheck" { provider = google-beta project = google_project.default.project_id service = "firebaseappcheck.googleapis.com" - disable_on_destroy = false depends_on = [ google_project_service.database, ] diff --git a/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go b/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go index 26c5bad11741..8dea93b53594 100644 --- a/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go +++ b/mmv1/third_party/terraform/services/firebasedataconnect/resource_firebase_data_connect_service_test.go @@ -61,7 +61,6 @@ func testAccFirebaseDataConnectService_update(context map[string]interface{}, di resource "google_project_service" "fdc" { project = "%{project_id}" service = "firebasedataconnect.googleapis.com" - disable_on_destroy = false } # Create an FDC service diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl index 2cea9bd3eff9..8f2b784f4b21 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.tmpl @@ -1308,7 +1308,6 @@ resource "google_project" "project" { resource "google_project_service" "anthos" { project = google_project.project.project_id service = "anthos.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "mesh" { @@ -1339,19 +1338,16 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } // It needs waiting until the API services are really activated. diff --git a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go index 48eb09f11440..c9133ec25c35 100644 --- a/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/iam_gke_hub_feature_test.go @@ -137,7 +137,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -174,7 +173,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -224,7 +222,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -262,7 +259,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" @@ -299,7 +295,6 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_gke_hub_feature" "feature" { name = "multiclusterservicediscovery" diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 5bbf840d3edf..37e510333765 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -178,21 +178,18 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" - disable_on_destroy = false provider = google-beta } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" - disable_on_destroy = false provider = google-beta } resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false provider = google-beta } `, context) @@ -1086,13 +1083,11 @@ resource "google_project_service" "mcsd" { resource "google_project_service" "compute" { project = google_project.project.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container" { project = google_project.project.project_id service = "container.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "anthos" { @@ -1103,7 +1098,6 @@ resource "google_project_service" "anthos" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } resource "google_project" "project_2" { @@ -1117,19 +1111,16 @@ resource "google_project" "project_2" { resource "google_project_service" "compute_2" { project = google_project.project_2.project_id service = "compute.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "container_2" { project = google_project.project_2.project_id service = "container.googleapis.com" - disable_on_destroy = false } resource "google_project_service" "gkehub_2" { project = google_project.project_2.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } `, context) } diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl index 7fb8d5170a94..8127202e4e94 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_fleet_test.go.tmpl @@ -124,14 +124,12 @@ resource "google_project" "project" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false depends_on = [google_project_service.anthos] } resource "google_project_service" "anthos" { project = google_project.project.project_id service = "anthos.googleapis.com" - disable_on_destroy = false } resource "time_sleep" "wait_for_gkehub_enablement" { diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go index cae802710262..ae358d84ebb7 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_rbac_role_binding_test.go @@ -207,7 +207,6 @@ resource "google_project_service" "anthos" { resource "google_project_service" "gkehub" { project = google_project.project.project_id service = "gkehub.googleapis.com" - disable_on_destroy = false } `, context) } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl index bf174baf2c47..b496cd7f1643 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service.go.tmpl @@ -119,13 +119,11 @@ func ResourceGoogleProjectService() *schema.Resource { "disable_on_destroy": { Type: schema.TypeBool, Optional: true, - Default: true, }, {{- if ne $.TargetVersionName "ga" }} "check_if_service_has_usage_on_destroy": { Type: schema.TypeBool, Optional: true, - Default: false, }, {{- end }} }, diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl index c5fdaded7489..8fcb603673f0 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_service_test.go.tmpl @@ -282,14 +282,16 @@ resource "google_project" "acceptance" { } resource "google_project_service" "test" { - project = google_project.acceptance.project_id - service = "%s" + project = google_project.acceptance.project_id + service = "%s" + disable_on_destroy = true } resource "google_project_service" "test2" { project = google_project.acceptance.project_id service = "%s" disable_dependent_services = %s + disable_on_destroy = true } `, pid, pid, org, billing, services[0], services[1], disableDependentServices) } diff --git a/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown b/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown index f214d75715de..4bd5c4c8a879 100644 --- a/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/external_credentials_stacks.html.markdown @@ -69,7 +69,6 @@ resource "google_project_service" "services" { project = var.project_id service = each.key disable_dependent_services = false - disable_on_destroy = false } # Create Workload Identity Pool (reference google_project_service to ensure APIs are enabled) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 7b33a23fe275..3a7d56e0ed4f 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -181,3 +181,11 @@ Remove `template.containers.depends_on` from your configuration after upgrade. ## Resource: `google_vertex_ai_endpoint` ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. + +## Resource: `google_project_service` + +### `disable_on_destroy` now defaults to `false` + +The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. + +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown index 0ad57f5ed54b..bd9e48c1904b 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown @@ -27,7 +27,6 @@ resource "google_cloudbuild_worker_pool" "pool" { ```hcl resource "google_project_service" "servicenetworking" { service = "servicenetworking.googleapis.com" - disable_on_destroy = false } resource "google_compute_network" "network" { diff --git a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown index 657f874cf3f2..f5a53f913637 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown @@ -33,8 +33,6 @@ resource "google_project_service" "project" { create = "30m" update = "40m" } - - disable_on_destroy = false } ``` @@ -49,9 +47,8 @@ is used. * `disable_on_destroy` - (Optional) If `true` or unset, disable the service when the Terraform resource is destroyed. If `false`, the service will be left enabled when -the Terraform resource is destroyed. Defaults to `true`. Most configurations should -set this to `false`; it should generally only be `true` or unset in configurations -that manage the `google_project` resource itself. +the Terraform resource is destroyed. Defaults to `false`. It should generally only +be `true` or unset in configurations that manage the `google_project` resource itself. * `disable_dependent_services` - (Optional) If `true`, services that are enabled and which depend on this service should also be disabled when this service is From c4de62f278258304a7caf9061e393acd262719e6 Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Tue, 12 Aug 2025 12:46:41 -0400 Subject: [PATCH 015/201] Set SecureSourceManager Instance and Repository deletion_policy default to PREVENT (#14781) --- .../securesourcemanager/BranchRule.yaml | 4 ++++ .../securesourcemanager/Instance.yaml | 8 ++++++- .../securesourcemanager/Repository.yaml | 6 ++++- ..._source_manager_branch_rule_update_test.go | 24 ++++++++----------- ...e_source_manager_repository_update_test.go | 22 ++++++----------- 5 files changed, 33 insertions(+), 31 deletions(-) diff --git a/mmv1/products/securesourcemanager/BranchRule.yaml b/mmv1/products/securesourcemanager/BranchRule.yaml index 4eb27bf14d76..8f2f83fb9a2a 100644 --- a/mmv1/products/securesourcemanager/BranchRule.yaml +++ b/mmv1/products/securesourcemanager/BranchRule.yaml @@ -52,6 +52,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' - name: 'secure_source_manager_branch_rule_with_fields' primary_resource_id: 'default' vars: @@ -63,6 +65,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' parameters: - name: 'branch_rule_id' type: String diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index c154cda42263..b03875eee254 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -66,6 +66,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_cmek' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -80,6 +81,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_private' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -95,6 +97,7 @@ examples: external_providers: ["time"] ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_private_psc_backend' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -118,6 +121,7 @@ examples: external_providers: ["time"] ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_private_psc_endpoint' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -138,6 +142,7 @@ examples: external_providers: ["time"] ignore_read_extra: - 'update_time' + - 'deletion_policy' - name: 'secure_source_manager_instance_workforce_identity_federation' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -150,6 +155,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' + - 'deletion_policy' parameters: - name: 'location' type: String @@ -177,7 +183,7 @@ virtual_fields: * DELETE * PREVENT * ABANDON - default_value: 'DELETE' + default_value: 'PREVENT' properties: - name: 'name' type: String diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index d9d15ff238b5..49b481949da9 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -60,6 +60,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' - name: 'secure_source_manager_repository_initial_config' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-repository%s", context["random_suffix"])' @@ -71,6 +73,8 @@ examples: 'deletion_policy': '"DELETE"' oics_vars_overrides: 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' parameters: - name: 'location' type: String @@ -98,7 +102,7 @@ virtual_fields: * DELETE * PREVENT * ABANDON - default_value: 'DELETE' + default_value: 'PREVENT' properties: - name: 'name' type: String diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go index 3e219e5659b7..ed1397c981a5 100644 --- a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_branch_rule_update_test.go @@ -11,7 +11,7 @@ func TestAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField t.Parallel() context := map[string]interface{}{ - "prevent_destroy": false, + "deletion_policy": "DELETE", "random_suffix": acctest.RandString(t, 10), } @@ -46,20 +46,18 @@ func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "tf-test-my-initial-instance%{random_suffix}" + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "repository" { repository_id = "tf-test-my-initial-repository%{random_suffix}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_branch_rule" "default" { @@ -83,20 +81,18 @@ func testAccSecureSourceManagerBranchRule_secureSourceManagerBranchRuleWithField resource "google_secure_source_manager_instance" "instance" { location = "us-central1" instance_id = "tf-test-my-initial-instance%{random_suffix}" - # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + + # Prevent accidental deletions. + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "repository" { repository_id = "tf-test-my-initial-repository%{random_suffix}" instance = google_secure_source_manager_instance.instance.name location = google_secure_source_manager_instance.instance.location + # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_branch_rule" "default" { diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go index 4a5264e2c8b7..a035b2094130 100644 --- a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_repository_update_test.go @@ -12,7 +12,7 @@ func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExam t.Parallel() context := map[string]interface{}{ - "prevent_destroy": false, + "deletion_policy": "DELETE", "random_suffix": acctest.RandString(t, 10), } @@ -27,7 +27,7 @@ func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExam ResourceName: "google_secure_source_manager_repository.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id"}, + ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id", "deletion_policy"}, }, { Config: testAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExample_update(context), @@ -41,7 +41,7 @@ func TestAccSecureSourceManagerRepository_secureSourceManagerRepositoryBasicExam ResourceName: "google_secure_source_manager_repository.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id"}, + ImportStateVerifyIgnore: []string{"initial_config", "location", "repository_id", "deletion_policy"}, }, }, }) @@ -54,9 +54,7 @@ resource "google_secure_source_manager_instance" "instance" { instance_id = "tf-test-my-instance%{random_suffix}" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "default" { @@ -65,9 +63,7 @@ resource "google_secure_source_manager_repository" "default" { instance = google_secure_source_manager_instance.instance.name # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } `, context) } @@ -79,9 +75,7 @@ resource "google_secure_source_manager_instance" "instance" { instance_id = "tf-test-my-instance%{random_suffix}" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } resource "google_secure_source_manager_repository" "default" { @@ -92,9 +86,7 @@ resource "google_secure_source_manager_repository" "default" { description = "new description" # Prevent accidental deletions. - lifecycle { - prevent_destroy = "%{prevent_destroy}" - } + deletion_policy = "%{deletion_policy}" } `, context) } From 2c144bf7da6cfcc13d3456c5e9a9a14579877eab Mon Sep 17 00:00:00 2001 From: zoeyai-google Date: Tue, 12 Aug 2025 09:51:57 -0700 Subject: [PATCH 016/201] Deprecate Cloud TPU google_tpu_node resoruce (#14794) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/tpu/Node.yaml | 189 ------------------ mmv1/products/tpu/product.yaml | 27 --- .../terraform/examples/tpu_node_basic.tf.tmpl | 15 -- .../terraform/examples/tpu_node_full.tf.tmpl | 50 ----- .../examples/tpu_node_full_test.tf.tmpl | 29 --- .../components/inputs/services_beta.kt | 5 - .../components/inputs/services_ga.kt | 5 - .../provider/provider_mmv1_resources.go.tmpl | 1 - .../data_source_tpu_tensorflow_versions.go | 93 --------- ...ata_source_tpu_tensorflow_versions_test.go | 68 ------- .../guides/version_7_upgrade.html.markdown | 6 +- 11 files changed, 5 insertions(+), 483 deletions(-) delete mode 100644 mmv1/products/tpu/Node.yaml delete mode 100644 mmv1/products/tpu/product.yaml delete mode 100644 mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl delete mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go delete mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go diff --git a/mmv1/products/tpu/Node.yaml b/mmv1/products/tpu/Node.yaml deleted file mode 100644 index 6e48ab9ff558..000000000000 --- a/mmv1/products/tpu/Node.yaml +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'Node' -description: | - A Cloud TPU instance. -references: - guides: - 'Official Documentation': 'https://cloud.google.com/tpu/docs/' - api: 'https://cloud.google.com/tpu/docs/reference/rest/v1/projects.locations.nodes' -deprecation_message: >- - `google_tpu_node` is deprecated and will be removed in a future major release. - Use `google_tpu_v2_vm` instead. For moving from TPU Node to TPU VM architecture, see - https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. -base_url: 'projects/{{project}}/locations/{{zone}}/nodes' -self_link: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}' -create_url: 'projects/{{project}}/locations/{{zone}}/nodes?nodeId={{name}}' -immutable: true -timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 -autogen_async: true -async: - actions: ['create', 'delete', 'update'] - type: 'OpAsync' - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true -custom_code: - constants: 'templates/terraform/constants/tpu_node.tmpl' -custom_diff: - - 'tpuNodeCustomizeDiff' -sweeper: - url_substitutions: - - zone: "us-central1-b" -examples: - - name: 'tpu_node_basic' - primary_resource_id: 'tpu' - vars: - node_name: 'test-tpu' - # resource is deprecated - exclude_test: true - - name: 'tpu_node_full' - primary_resource_id: 'tpu' - vars: - node_name: 'test-tpu' - global_address_name: 'my-global-address' - network_name: 'tpu-node-network' - exclude_test: true - - name: 'tpu_node_full_test' - primary_resource_id: 'tpu' - vars: - node_name: 'test-tpu' - network_name: 'tpu-node-network' - test_vars_overrides: - 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "vpc-network-1")' - # resource is deprecated - exclude_test: true - exclude_docs: true -parameters: - # TODO: resourceref? - - name: 'zone' - type: String - description: | - The GCP location for the TPU. If it is not provided, the provider zone is used. - url_param_only: true - immutable: true - default_from_api: true -properties: - - name: 'name' - type: String - description: | - The immutable name of the TPU. - required: true - immutable: true - custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.tmpl' - - name: 'description' - type: String - description: | - The user-supplied description of the TPU. Maximum of 512 characters. - immutable: true - - name: 'acceleratorType' - type: String - description: | - The type of hardware accelerators associated with this node. - required: true - immutable: true - - name: 'tensorflowVersion' - type: String - description: | - The version of Tensorflow running in the Node. - required: true - update_url: 'projects/{{project}}/locations/{{zone}}/nodes/{{name}}:reimage' - update_verb: 'POST' - - name: 'network' - type: String - description: | - The name of a network to peer the TPU node to. It must be a - preexisting Compute Engine network inside of the project on which - this API has been activated. If none is provided, "default" will be - used. - immutable: true - default_from_api: true - diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' - - name: 'cidrBlock' - type: String - description: | - The CIDR block that the TPU node will use when selecting an IP - address. This CIDR block must be a /29 block; the Compute Engine - networks API forbids a smaller block, and using a larger block would - be wasteful (a node can only consume one IP address). - - Errors will occur if the CIDR block has already been used for a - currently existing TPU node, the CIDR block conflicts with any - subnetworks in the user's provided network, or the provided network - is peered with another network that is using that CIDR block. - immutable: true - default_from_api: true - conflicts: - - use_service_networking - - name: 'serviceAccount' - type: String - description: | - The service account used to run the tensor flow services within the - node. To share resources, including Google Cloud Storage data, with - the Tensorflow job running in the Node, this account must have - permissions to that data. - output: true - - name: 'useServiceNetworking' - type: Boolean - description: | - Whether the VPC peering for the node is set up through Service Networking API. - The VPC Peering should be set up before provisioning the node. If this field is set, - cidr_block field should not be specified. If the network that you want to peer the - TPU Node to is a Shared VPC network, the node must be created with this this field enabled. - immutable: true - conflicts: - - cidr_block - default_value: false - - name: 'schedulingConfig' - type: NestedObject - description: | - Sets the scheduling options for this TPU instance. - immutable: true - diff_suppress_func: 'compareTpuNodeSchedulingConfig' - properties: - - name: 'preemptible' - type: Boolean - description: | - Defines whether the TPU instance is preemptible. - required: true - diff_suppress_func: 'compareTpuNodeSchedulingConfig' - - name: 'networkEndpoints' - type: Array - description: | - The network endpoints where TPU workers can be accessed and sent work. - It is recommended that Tensorflow clients of the node first reach out - to the first (index 0) entry. - output: true - item_type: - type: NestedObject - properties: - - name: 'ipAddress' - type: String - description: | - The IP address of this network endpoint. - output: true - - name: 'port' - type: Integer - description: | - The port of this network endpoint. - output: true - - name: 'labels' - type: KeyValueLabels - description: Resource labels to represent user provided metadata. - immutable: true diff --git a/mmv1/products/tpu/product.yaml b/mmv1/products/tpu/product.yaml deleted file mode 100644 index a9302af0cd89..000000000000 --- a/mmv1/products/tpu/product.yaml +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: 'TPU' -display_name: 'Cloud TPU' -versions: - - name: 'ga' - base_url: 'https://tpu.googleapis.com/v1/' -scopes: - - 'https://www.googleapis.com/auth/cloud-platform' -async: - type: "OpAsync" - operation: - base_url: '{{op_id}}' - result: - resource_inside_response: true diff --git a/mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl b/mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl deleted file mode 100644 index 9f516fd9ad65..000000000000 --- a/mmv1/templates/terraform/examples/tpu_node_basic.tf.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -{{/* WARNING: cidr_block must not overlap with other existing TPU blocks - Make sure if you change this value that it does not overlap with the - autogenerated examples. */ -}} - -data "google_tpu_tensorflow_versions" "available" { -} - -resource "google_tpu_node" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "node_name"}}" - zone = "us-central1-b" - - accelerator_type = "v3-8" - tensorflow_version = data.google_tpu_tensorflow_versions.available.versions[0] - cidr_block = "10.2.0.0/29" -} diff --git a/mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl b/mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl deleted file mode 100644 index af0c39fd7ef5..000000000000 --- a/mmv1/templates/terraform/examples/tpu_node_full.tf.tmpl +++ /dev/null @@ -1,50 +0,0 @@ -data "google_tpu_tensorflow_versions" "available" { -} - -{{/* WARNING: cidr_block must not overlap with other existing TPU blocks - Make sure if you change this value that it does not overlap with the - autogenerated examples. */ -}} - -resource "google_tpu_node" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "node_name"}}" - zone = "us-central1-b" - - accelerator_type = "v3-8" - - tensorflow_version = data.google_tpu_tensorflow_versions.available.versions[0] - - description = "Terraform Google Provider test TPU" - use_service_networking = true -{{/* We previously used a separate network resource here, but TPUs only allow using 50 - different network names, ever. This caused our tests to start failing, so just - use the default network in order to still demonstrate using as many fields as - possible on the resource. */ -}} - - network = google_service_networking_connection.private_service_connection.network - - labels = { - foo = "bar" - } - - scheduling_config { - preemptible = true - } -} - -resource "google_compute_network" "network" { - name = "{{index $.Vars "network_name"}}" -} - -resource "google_compute_global_address" "service_range" { - name = "{{index $.Vars "global_address_name"}}" - purpose = "VPC_PEERING" - address_type = "INTERNAL" - prefix_length = 16 - network = google_compute_network.network.id -} - -resource "google_service_networking_connection" "private_service_connection" { - network = google_compute_network.network.id - service = "servicenetworking.googleapis.com" - reserved_peering_ranges = [google_compute_global_address.service_range.name] -} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl b/mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl deleted file mode 100644 index 01ce73d6c2cf..000000000000 --- a/mmv1/templates/terraform/examples/tpu_node_full_test.tf.tmpl +++ /dev/null @@ -1,29 +0,0 @@ -{{/* WARNING: cidr_block must not overlap with other existing TPU blocks - Make sure if you change this value that it does not overlap with the - autogenerated examples. */ -}} - -resource "google_tpu_node" "{{$.PrimaryResourceId}}" { - name = "{{index $.Vars "node_name"}}" - zone = "us-central1-b" - - accelerator_type = "v3-8" - - tensorflow_version = "2.10.0" - - description = "Terraform Google Provider test TPU" - use_service_networking = true - - network = data.google_compute_network.network.id - - labels = { - foo = "bar" - } - - scheduling_config { - preemptible = true - } -} - -data "google_compute_network" "network" { - name = "{{index $.Vars "network_name"}}" -} diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index e16d98f34deb..fac5c46661cf 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -811,11 +811,6 @@ var ServicesListBeta = mapOf( "displayName" to "Tags", "path" to "./google-beta/services/tags" ), - "tpu" to mapOf( - "name" to "tpu", - "displayName" to "Tpu", - "path" to "./google-beta/services/tpu" - ), "tpuv2" to mapOf( "name" to "tpuv2", "displayName" to "Tpuv2", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 2c32d877bc36..175f9e8a5d15 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -806,11 +806,6 @@ var ServicesListGa = mapOf( "displayName" to "Tags", "path" to "./google/services/tags" ), - "tpu" to mapOf( - "name" to "tpu", - "displayName" to "Tpu", - "path" to "./google/services/tpu" - ), "tpuv2" to mapOf( "name" to "tpuv2", "displayName" to "Tpuv2", diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index b22f4fac9bd3..5eb91b1cfd3a 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -264,7 +264,6 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_tags_tag_keys": tags.DataSourceGoogleTagsTagKeys(), "google_tags_tag_value": tags.DataSourceGoogleTagsTagValue(), "google_tags_tag_values": tags.DataSourceGoogleTagsTagValues(), - "google_tpu_tensorflow_versions": tpu.DataSourceTpuTensorflowVersions(), {{- if ne $.TargetVersionName "ga" }} "google_tpu_v2_runtime_versions": tpuv2.DataSourceTpuV2RuntimeVersions(), "google_tpu_v2_accelerator_types": tpuv2.DataSourceTpuV2AcceleratorTypes(), diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go deleted file mode 100644 index f17ddc36d101..000000000000 --- a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go +++ /dev/null @@ -1,93 +0,0 @@ -package tpu - -import ( - "fmt" - "log" - "sort" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceTpuTensorflowVersions() *schema.Resource { - return &schema.Resource{ - Read: dataSourceTpuTensorFlowVersionsRead, - Schema: map[string]*schema.Schema{ - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "zone": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "versions": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - } -} - -func dataSourceTpuTensorFlowVersionsRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - zone, err := tpgresource.GetZone(d, config) - if err != nil { - return err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{TPUBasePath}}projects/{{project}}/locations/{{zone}}/tensorflowVersions") - if err != nil { - return err - } - - versionsRaw, err := tpgresource.PaginatedListRequest(project, url, userAgent, config, flattenTpuTensorflowVersions) - if err != nil { - return fmt.Errorf("Error listing TPU Tensorflow versions: %s", err) - } - - versions := make([]string, len(versionsRaw)) - for i, ver := range versionsRaw { - versions[i] = ver.(string) - } - sort.Strings(versions) - - log.Printf("[DEBUG] Received Google TPU Tensorflow Versions: %q", versions) - - if err := d.Set("versions", versions); err != nil { - return fmt.Errorf("Error setting versions: %s", err) - } - if err := d.Set("zone", zone); err != nil { - return fmt.Errorf("Error setting zone: %s", err) - } - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - d.SetId(fmt.Sprintf("projects/%s/zones/%s", project, zone)) - - return nil -} - -func flattenTpuTensorflowVersions(resp map[string]interface{}) []interface{} { - verObjList := resp["tensorflowVersions"].([]interface{}) - versions := make([]interface{}, len(verObjList)) - for i, v := range verObjList { - verObj := v.(map[string]interface{}) - versions[i] = verObj["version"] - } - return versions -} diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go deleted file mode 100644 index 78661db237e8..000000000000 --- a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package tpu_test - -import ( - "errors" - "fmt" - "strconv" - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccTPUTensorflowVersions_basic(t *testing.T) { - t.Parallel() - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccTpuTensorFlowVersionsConfig, - Check: resource.ComposeTestCheckFunc( - testAccCheckGoogleTpuTensorflowVersions("data.google_tpu_tensorflow_versions.available"), - ), - }, - }, - }) -} - -func testAccCheckGoogleTpuTensorflowVersions(n string) resource.TestCheckFunc { - return func(s *terraform.State) error { - rs, ok := s.RootModule().Resources[n] - if !ok { - return fmt.Errorf("Can't find TPU Tensorflow versions data source: %s", n) - } - - if rs.Primary.ID == "" { - return errors.New("data source id not set") - } - - count, ok := rs.Primary.Attributes["versions.#"] - if !ok { - return errors.New("can't find 'versions' attribute") - } - - cnt, err := strconv.Atoi(count) - if err != nil { - return errors.New("failed to read number of version") - } - if cnt < 2 { - return fmt.Errorf("expected at least 2 versions, received %d, this is most likely a bug", cnt) - } - - for i := 0; i < cnt; i++ { - idx := fmt.Sprintf("versions.%d", i) - _, ok := rs.Primary.Attributes[idx] - if !ok { - return fmt.Errorf("expected %q, version not found", idx) - } - } - return nil - } -} - -var testAccTpuTensorFlowVersionsConfig = ` -data "google_tpu_tensorflow_versions" "available" {} -` diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 3a7d56e0ed4f..b8bdc4306444 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -182,10 +182,14 @@ Remove `template.containers.depends_on` from your configuration after upgrade. ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. +## Resource: `google_tpu_node` is now removed + +`google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. + ## Resource: `google_project_service` ### `disable_on_destroy` now defaults to `false` The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. \ No newline at end of file +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. From 62ce3fa2d3641428b09c8ffc1e57673e251c0485 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Tue, 12 Aug 2025 12:31:10 -0700 Subject: [PATCH 017/201] resolve sync conflicts --- mmv1/products/compute/Subnetwork.yaml | 10 -- ... => resource_storage_transfer_job.go.tmpl} | 0 .../data_source_tpu_tensorflow_versions.go | 96 ------------------- 3 files changed, 106 deletions(-) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job.go => resource_storage_transfer_job.go.tmpl} (100%) delete mode 100644 mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go diff --git a/mmv1/products/compute/Subnetwork.yaml b/mmv1/products/compute/Subnetwork.yaml index bcbd5fb2fe0d..f3ebd6ada0c9 100644 --- a/mmv1/products/compute/Subnetwork.yaml +++ b/mmv1/products/compute/Subnetwork.yaml @@ -359,7 +359,6 @@ properties: fingerprint_name: 'fingerprint' custom_flatten: 'templates/terraform/custom_flatten/subnetwork_log_config.go.tmpl' custom_expand: 'templates/terraform/custom_expand/subnetwork_log_config.go.tmpl' - diff_suppress_func: 'subnetworkLogConfigDiffSuppress' properties: - name: 'aggregationInterval' type: Enum @@ -511,15 +510,6 @@ properties: update_verb: 'PATCH' fingerprint_name: 'fingerprint' is_missing_in_cai: true - - name: 'enableFlowLogs' - type: Boolean - description: | - Whether to enable flow logging for this subnetwork. If this field is not explicitly set, - it will not appear in get listings. If not set the default behavior is determined by the - org policy, if there is no org policy specified, then it will default to disabled. - This field isn't supported if the subnet purpose field is set to REGIONAL_MANAGED_PROXY. - default_from_api: true - deprecation_message: 'This field is being removed in favor of log_config. If log_config is present, flow logs are enabled.' - name: 'state' type: Enum description: | diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl similarity index 100% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl diff --git a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go b/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go deleted file mode 100644 index 96e983f57ac1..000000000000 --- a/mmv1/third_party/terraform/services/tpu/data_source_tpu_tensorflow_versions.go +++ /dev/null @@ -1,96 +0,0 @@ -package tpu - -import ( - "fmt" - "log" - "sort" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceTpuTensorflowVersions() *schema.Resource { - return &schema.Resource{ - Read: dataSourceTpuTensorFlowVersionsRead, - Schema: map[string]*schema.Schema{ - "project": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "zone": { - Type: schema.TypeString, - Optional: true, - Computed: true, - }, - "versions": { - Type: schema.TypeList, - Computed: true, - Elem: &schema.Schema{Type: schema.TypeString}, - }, - }, - DeprecationMessage: "`google_tpu_node` is deprecated and will be removed in a future major release. " + - "Use `google_tpu_v2_vm` instead. " + - "For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm.", - } -} - -func dataSourceTpuTensorFlowVersionsRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - - zone, err := tpgresource.GetZone(d, config) - if err != nil { - return err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{TPUBasePath}}projects/{{project}}/locations/{{zone}}/tensorflowVersions") - if err != nil { - return err - } - - versionsRaw, err := tpgresource.PaginatedListRequest(project, url, userAgent, config, flattenTpuTensorflowVersions) - if err != nil { - return fmt.Errorf("Error listing TPU Tensorflow versions: %s", err) - } - - versions := make([]string, len(versionsRaw)) - for i, ver := range versionsRaw { - versions[i] = ver.(string) - } - sort.Strings(versions) - - log.Printf("[DEBUG] Received Google TPU Tensorflow Versions: %q", versions) - - if err := d.Set("versions", versions); err != nil { - return fmt.Errorf("Error setting versions: %s", err) - } - if err := d.Set("zone", zone); err != nil { - return fmt.Errorf("Error setting zone: %s", err) - } - if err := d.Set("project", project); err != nil { - return fmt.Errorf("Error setting project: %s", err) - } - d.SetId(fmt.Sprintf("projects/%s/zones/%s", project, zone)) - - return nil -} - -func flattenTpuTensorflowVersions(resp map[string]interface{}) []interface{} { - verObjList := resp["tensorflowVersions"].([]interface{}) - versions := make([]interface{}, len(verObjList)) - for i, v := range verObjList { - verObj := v.(map[string]interface{}) - versions[i] = verObj["version"] - } - return versions -} From 62abbd8dea45ae24fdf42b55d7e3364d4a2b991c Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Tue, 12 Aug 2025 13:37:53 -0700 Subject: [PATCH 018/201] remove default vals and custom code for `publicRepository` in artifact registry repository (#14795) --- mmv1/products/artifactregistry/Repository.yaml | 13 ------------- ...artifact_registry_remote_repository.go.tmpl | 18 ------------------ .../guides/version_7_upgrade.html.markdown | 6 ++++++ 3 files changed, 6 insertions(+), 31 deletions(-) delete mode 100644 mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index 3b9ffebd2bff..1241e4cac2e8 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -53,7 +53,6 @@ iam_policy: custom_code: constants: 'templates/terraform/constants/artifact_registry_repository.go.tmpl' encoder: 'templates/terraform/encoders/location_from_region.go.tmpl' - pre_create: 'templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl' sweeper: url_substitutions: - region: "us-central1" @@ -517,9 +516,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.docker_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "DOCKER_HUB" enum_values: - 'DOCKER_HUB' - name: 'customRepository' @@ -556,9 +552,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.maven_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "MAVEN_CENTRAL" enum_values: - 'MAVEN_CENTRAL' - name: 'customRepository' @@ -595,9 +588,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.npm_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "NPMJS" enum_values: - 'NPMJS' - name: 'customRepository' @@ -634,9 +624,6 @@ properties: immutable: true conflicts: - remoteRepositoryConfig.0.python_repository.0.custom_repository - custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.tmpl' - # Eventually lets delete default_value and custom_flatten in a major release - default_value: "PYPI" enum_values: - 'PYPI' - name: 'customRepository' diff --git a/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl b/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl deleted file mode 100644 index bad5d0a9599b..000000000000 --- a/mmv1/templates/terraform/pre_create/artifact_registry_remote_repository.go.tmpl +++ /dev/null @@ -1,18 +0,0 @@ -// This file should be deleted in the next major terraform release, alongside -// the default values for 'publicRepository'. - -// deletePublicRepoIfCustom deletes the publicRepository key for a given -// pkg type from the remote repository config if customRepository is set. -deletePublicRepoIfCustom := func(pkgType string) { - if _, ok := d.GetOk(fmt.Sprintf("remote_repository_config.0.%s_repository.0.custom_repository", pkgType)); ok { - rrcfg := obj["remoteRepositoryConfig"].(map[string]interface{}) - repo := rrcfg[fmt.Sprintf("%sRepository", pkgType)].(map[string]interface{}) - delete(repo, "publicRepository") - } -} - -// Call above func for all pkg types that support custom remote repos. -deletePublicRepoIfCustom("docker") -deletePublicRepoIfCustom("maven") -deletePublicRepoIfCustom("npm") -deletePublicRepoIfCustom("python") \ No newline at end of file diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b8bdc4306444..128b9253d649 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,6 +102,12 @@ Description of the change and how users should adjust their configuration (if ne ## Resources +## Resource: `google_artifact_registry_repository` + +### `public_repository` fields have had their default values removed. + +`public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. + ## Resource: `google_bigtable_table_iam_binding` ### `instance` is now removed From 36fc5095ae85337fd60296e248e1ec624e41fc38 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Tue, 12 Aug 2025 13:57:08 -0700 Subject: [PATCH 019/201] `google_vertex_ai_index` fields made required (#14786) --- mmv1/products/vertexai/Index.yaml | 4 +++- .../website/docs/guides/version_7_upgrade.html.markdown | 4 ++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/mmv1/products/vertexai/Index.yaml b/mmv1/products/vertexai/Index.yaml index 5ee3e4fa1170..09e66602761e 100644 --- a/mmv1/products/vertexai/Index.yaml +++ b/mmv1/products/vertexai/Index.yaml @@ -85,6 +85,7 @@ properties: - name: 'metadata' type: NestedObject description: An additional information about the Index + required: true properties: - name: 'contentsDeltaUri' type: String @@ -106,6 +107,7 @@ properties: - name: 'config' type: NestedObject description: The configuration of the Matching Engine Index. + required: true immutable: true properties: - name: 'dimensions' @@ -150,7 +152,7 @@ properties: type: NestedObject description: The configuration with regard to the algorithms used for efficient - search. + search. This field may be required based on your configuration. properties: - name: 'treeAhConfig' type: NestedObject diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 128b9253d649..de5158a684dc 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -188,6 +188,10 @@ Remove `template.containers.depends_on` from your configuration after upgrade. ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. +## Resource: `google_vertex_ai_index` + +### `metadata`, and `metadata.config` are now required. Resource creation would fail without these attributes already, so no change is necessary to existing configurations. + ## Resource: `google_tpu_node` is now removed `google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. From 856b9af11764d8b3617159dab3ce192142862de9 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 14 Aug 2025 12:46:00 -0700 Subject: [PATCH 020/201] remove google_beyondcorp_application (#14798) --- mmv1/products/beyondcorp/Application.yaml | 152 ------------------ .../beyondcorp_application_basic.tf.tmpl | 13 -- .../beyondcorp_application_vpc.tf.tmpl | 23 --- .../resource_beyondcorp_application_test.go | 85 ---------- .../guides/version_7_upgrade.html.markdown | 6 + 5 files changed, 6 insertions(+), 273 deletions(-) delete mode 100644 mmv1/products/beyondcorp/Application.yaml delete mode 100644 mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl delete mode 100644 mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl delete mode 100644 mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go diff --git a/mmv1/products/beyondcorp/Application.yaml b/mmv1/products/beyondcorp/Application.yaml deleted file mode 100644 index a4d4e862bbaf..000000000000 --- a/mmv1/products/beyondcorp/Application.yaml +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2024 Google Inc. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -name: Application -deprecation_message: '`google_beyondcorp_application` is deprecated. Use `google_beyondcorp_security_gateway_application` instead.' -description: Specifies application endpoint(s) to protect behind a Security Gateway. -base_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications -update_mask: true -self_link: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} -create_url: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications?applicationId={{application_id}} -update_verb: PATCH -id_format: projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} -import_format: - - projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}} -iam_policy: - method_name_separator: ':' - iam_conditions_request_type: 'QUERY_PARAM_NESTED' - allowed_iam_role: 'roles/beyondcorp.securityGatewayUser' - parent_resource_attribute: 'application_id' - import_format: - - 'projects/{{project}}/locations/global/securityGateways/{{security_gateways_id}}/applications/{{application_id}}' - - '{{application_id}}' -examples: - - name: beyondcorp_application_basic - primary_resource_id: example - primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' - vars: - security_gateway_name: default - application_name: google - - name: beyondcorp_application_vpc - primary_resource_id: example - primary_resource_name: 'fmt.Sprintf("default%s", context["random_suffix"]), fmt.Sprintf("google%s", context["random_suffix"])' - vars: - security_gateway_name: default - application_name: my-vm-service -autogen_async: true -async: - operation: - timeouts: - insert_minutes: 20 - update_minutes: 20 - delete_minutes: 20 - base_url: '{{op_id}}' - actions: - - create - - delete - - update - type: OpAsync - result: - resource_inside_response: true - include_project: false -autogen_status: QXBwbGljYXRpb24= -parameters: - - name: securityGatewaysId - type: String - description: Part of `parent`. See documentation of `projectsId`. - immutable: true - url_param_only: true - required: true - - name: applicationId - type: String - description: |- - Optional. User-settable Application resource ID. - * Must start with a letter. - * Must contain between 4-63 characters from `/a-z-/`. - * Must end with a number or letter. - immutable: true - url_param_only: true - required: true -properties: - - name: createTime - type: String - description: Output only. Timestamp when the resource was created. - output: true - - name: displayName - type: String - description: |- - Optional. An arbitrary user-provided name for the Application resource. - Cannot exceed 64 characters. - - name: endpointMatchers - type: Array - description: |- - Required. Endpoint matchers associated with an application. - A combination of hostname and ports as endpoint matcher is used to match - the application. - Match conditions for OR logic. - An array of match conditions to allow for multiple matching criteria. - The rule is considered a match if one the conditions are met. - The conditions can be one of the following combination - (Hostname), (Hostname & Ports) - - EXAMPLES: - Hostname - ("*.abc.com"), ("xyz.abc.com") - Hostname and Ports - ("abc.com" and "22"), ("abc.com" and "22,33") etc - required: true - item_type: - type: NestedObject - properties: - - name: hostname - type: String - description: Required. Hostname of the application. - required: true - - name: ports - type: Array - description: Optional. Ports of the application. - item_type: - type: Integer - - name: upstreams - type: Array - description: Optional. List of which upstream resource(s) to forward traffic to. - item_type: - type: NestedObject - properties: - - name: egressPolicy - type: NestedObject - description: Optional. Routing policy information. - properties: - - name: regions - type: Array - description: Required. List of regions where the application sends traffic to. - required: true - item_type: - type: String - - name: network - type: NestedObject - description: Network to forward traffic to. - properties: - - name: name - type: string - description: |- - Required. Network name is of the format: - `projects/{project}/global/networks/{network}` - required: true - - name: name - type: String - description: Identifier. Name of the resource. - output: true - - name: updateTime - type: String - description: Output only. Timestamp when the resource was last modified. - output: true diff --git a/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl deleted file mode 100644 index 264aadfd924d..000000000000 --- a/mmv1/templates/terraform/examples/beyondcorp_application_basic.tf.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "{{index $.Vars "security_gateway_name"}}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "{{$.PrimaryResourceId}}" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "{{index $.Vars "application_name"}}" - endpoint_matchers { - hostname = "google.com" - } -} diff --git a/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl b/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl deleted file mode 100644 index a54a36989bf5..000000000000 --- a/mmv1/templates/terraform/examples/beyondcorp_application_vpc.tf.tmpl +++ /dev/null @@ -1,23 +0,0 @@ -data "google_project" "project" {} - -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "{{index $.Vars "security_gateway_name"}}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "{{$.PrimaryResourceId}}" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "{{index $.Vars "application_name"}}" - endpoint_matchers { - hostname = "my-vm-service.com" - } - upstreams { - egress_policy { - regions = ["us-central1"] - } - network { - name = "projects/${data.google_project.project.project_id}/global/networks/default" - } - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go b/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go deleted file mode 100644 index 28086bc32df4..000000000000 --- a/mmv1/third_party/terraform/services/beyondcorp/resource_beyondcorp_application_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package beyondcorp_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" - - "github.com/hashicorp/terraform-provider-google/google/acctest" -) - -func TestAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context), - }, - { - ResourceName: "google_beyondcorp_application.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_id", "security_gateways_id"}, - }, - { - Config: testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("google_beyondcorp_application.example", plancheck.ResourceActionUpdate), - }, - }, - }, - { - ResourceName: "google_beyondcorp_application.example", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"application_id", "security_gateways_id"}, - }, - }, - }) -} - -func testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_basic(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "default%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "example" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - application_id = "google%{random_suffix}" - endpoint_matchers { - hostname = "google.com" - } -} -`, context) -} - -func testAccBeyondcorpApplication_beyondcorpSecurityGatewayApplicationBasicExample_update(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_beyondcorp_security_gateway" "default" { - security_gateway_id = "default%{random_suffix}" - display_name = "My Security Gateway resource" - hubs { region = "us-central1" } -} - -resource "google_beyondcorp_application" "example" { - security_gateways_id = google_beyondcorp_security_gateway.default.security_gateway_id - display_name = "Updated Name" - application_id = "google%{random_suffix}" - endpoint_matchers { - hostname = "google.com" - } -} -`, context) -} diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index de5158a684dc..b9048301b65d 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -102,6 +102,12 @@ Description of the change and how users should adjust their configuration (if ne ## Resources + +## Resource: `google_beyondcorp_application` is now removed + +`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. +Use `google_beyondcorp_security_gateway_application` instead. + ## Resource: `google_artifact_registry_repository` ### `public_repository` fields have had their default values removed. From fb33ae4a7953140e3ac6904f0df46b65e65582df Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 14 Aug 2025 12:53:31 -0700 Subject: [PATCH 021/201] Remove `project` from `google_service_account_key` (#14784) --- .../data_source_google_service_account_key.go | 5 ----- .../website/docs/guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go index 6c5002ab40c0..e83244935d5e 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go +++ b/mmv1/third_party/terraform/services/resourcemanager/data_source_google_service_account_key.go @@ -29,11 +29,6 @@ func DataSourceGoogleServiceAccountKey() *schema.Resource { Optional: true, ValidateFunc: validation.StringInSlice([]string{"TYPE_NONE", "TYPE_X509_PEM_FILE", "TYPE_RAW_PUBLIC_KEY"}, false), }, - "project": { - Type: schema.TypeString, - Optional: true, - Deprecated: "`project` is deprecated and will be removed in a future major release. This field is non-functional and can be removed from your configuration safely.", - }, "key_algorithm": { Type: schema.TypeString, Computed: true, diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b9048301b65d..9bb435d9169c 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -100,6 +100,12 @@ Description of the change and how users should adjust their configuration (if ne Description of the change and how users should adjust their configuration (if needed). +## Datasource: `google_service_account_key` + +### `project` is now removed + +`project` has been removed. It can be safely removed from your configuration. + ## Resources From edca413400b25d377cf1fb09d157ddf6ecbee651 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Thu, 14 Aug 2025 16:33:07 -0500 Subject: [PATCH 022/201] fix bigtable Table IAM instance_name panic (#14864) --- .../services/bigtable/iam_bigtable_table.go | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go index 03c647546453..8afaf2e8d5ed 100644 --- a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table.go @@ -33,11 +33,11 @@ var IamBigtableTableSchema = map[string]*schema.Schema{ } type BigtableTableIamUpdater struct { - project string - instance string - table string - d tpgresource.TerraformResourceData - Config *transport_tpg.Config + project string + instanceName string + table string + d tpgresource.TerraformResourceData + Config *transport_tpg.Config } func NewBigtableTableUpdater(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgiamresource.ResourceIamUpdater, error) { @@ -51,18 +51,18 @@ func NewBigtableTableUpdater(d tpgresource.TerraformResourceData, config *transp } return &BigtableTableIamUpdater{ - project: project, - instance: d.Get("instance").(string), - table: d.Get("table").(string), - d: d, - Config: config, + project: project, + instanceName: d.Get("instance_name").(string), + table: d.Get("table").(string), + d: d, + Config: config, }, nil } func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Config) error { values := make(map[string]string) - m, err := tpgresource.GetImportIdQualifiers([]string{"projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)"}, d, config, d.Id()) + m, err := tpgresource.GetImportIdQualifiers([]string{"projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P
[^/]+)"}, d, config, d.Id()) if err != nil { return err } @@ -77,7 +77,7 @@ func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Conf return fmt.Errorf("Error setting project: %s", err) } - if err := d.Set("instance", values["instance"]); err != nil { + if err := d.Set("instance_name", values["instance_name"]); err != nil { return fmt.Errorf("Error setting instance: %s", err) } @@ -86,7 +86,7 @@ func BigtableTableIdParseFunc(d *schema.ResourceData, config *transport_tpg.Conf } // Explicitly set the id so imported resources have the same ID format as non-imported ones. - d.SetId(fmt.Sprintf("projects/%s/instances/%s/tables/%s", project, values["instance"], values["table"])) + d.SetId(fmt.Sprintf("projects/%s/instances/%s/tables/%s", project, values["instance_name"], values["table"])) return nil } @@ -133,13 +133,13 @@ func (u *BigtableTableIamUpdater) SetResourceIamPolicy(policy *cloudresourcemana } func (u *BigtableTableIamUpdater) GetResourceId() string { - return fmt.Sprintf("projects/%s/instances/%s/tables/%s", u.project, u.instance, u.table) + return fmt.Sprintf("projects/%s/instances/%s/tables/%s", u.project, u.instanceName, u.table) } func (u *BigtableTableIamUpdater) GetMutexKey() string { - return fmt.Sprintf("iam-bigtable-instance-%s-%s-%s", u.project, u.instance, u.table) + return fmt.Sprintf("iam-bigtable-instance-%s-%s-%s", u.project, u.instanceName, u.table) } func (u *BigtableTableIamUpdater) DescribeResource() string { - return fmt.Sprintf("Bigtable Table %s/%s-%s", u.project, u.instance, u.table) + return fmt.Sprintf("Bigtable Table %s/%s-%s", u.project, u.instanceName, u.table) } From ffc76c405d104025c0ffb459936336c6a365fc28 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 15 Aug 2025 09:54:54 -0700 Subject: [PATCH 023/201] Fix partial match regexes (#14866) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- .../terraform/custom_import/extract_taxonomy.go.tmpl | 2 +- .../custom_import/vertex_ai_tensorboard_import.go.tmpl | 8 ++++---- .../terraform/services/apigee/resource_apigee_api.go | 4 ++-- .../terraform/services/apigee/resource_apigee_flowhook.go | 4 ++-- .../resource_apigee_keystores_aliases_key_cert_file.go | 4 ++-- .../apigee/resource_apigee_keystores_aliases_pkcs12.go | 4 ++-- .../services/apigee/resource_apigee_sharedflow.go | 4 ++-- .../apigee/resource_apigee_sharedflow_deployment.go | 5 +++-- .../services/bigquery/resource_bigquery_table.go.tmpl | 6 +++--- .../bigtable/resource_bigtable_authorized_view.go | 6 +++--- .../services/bigtable/resource_bigtable_instance.go | 6 +++--- .../services/bigtable/resource_bigtable_table.go | 6 +++--- .../cloudfunctions/resource_cloudfunctions_function.go | 6 +++--- .../services/compute/resource_compute_instance.go.tmpl | 6 +++--- .../compute/resource_compute_instance_group.go.tmpl | 6 +++--- .../resource_compute_project_metadata_item.go.tmpl | 4 ++-- .../services/compute/resource_compute_target_pool.go.tmpl | 8 ++++---- .../terraform/services/dns/resource_dns_record_set.go | 6 +++--- .../osconfig/resource_os_config_os_policy_assignment.go | 6 +++--- .../resource_google_folder_organization_policy.go | 6 +++--- .../resource_google_project_iam_custom_role.go | 6 +++--- .../resource_google_project_organization_policy.go | 6 +++--- .../resourcemanager/resource_google_service_account.go | 6 +++--- .../services/sql/resource_sql_database_instance.go.tmpl | 6 +++--- .../website/docs/guides/version_7_upgrade.html.markdown | 4 ++++ tpgtools/ignored_handwritten/custom_import.go | 8 ++++---- 26 files changed, 74 insertions(+), 69 deletions(-) diff --git a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl index c99cf546f939..17d7a76b1bfb 100644 --- a/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl +++ b/mmv1/templates/terraform/custom_import/extract_taxonomy.go.tmpl @@ -1,7 +1,7 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)"}, d, config); err != nil { + "^(?Pprojects/[^/]+/locations/[^/]+/taxonomies/[^/]+)/policyTags/(?P.+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl index be6cd588a588..7d834ceeba51 100644 --- a/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl +++ b/mmv1/templates/terraform/custom_import/vertex_ai_tensorboard_import.go.tmpl @@ -1,9 +1,9 @@ config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/tensorboards/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go index 57375e59f2ae..2921e19e0caa 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_api.go @@ -324,8 +324,8 @@ func resourceApigeeApiDelete(d *schema.ResourceData, meta interface{}) error { func resourceApigeeApiImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/apis/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/apis/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go index 8a5d6ac30957..635838873cd0 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_flowhook.go @@ -223,8 +223,8 @@ func resourceApigeeFlowhookDelete(d *schema.ResourceData, meta interface{}) erro func resourceApigeeFlowhookImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/flowhooks/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go index 48747a85d5b9..03d842bfc305 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go @@ -362,8 +362,8 @@ func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, met func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go index 12b1f85fc0ef..ef6a2655cf39 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_pkcs12.go @@ -299,8 +299,8 @@ func ResourceApigeeKeystoresAliasesPkcs12Delete(d *schema.ResourceData, meta int func ResourceApigeeKeystoresAliasesPkcs12Import(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go index 4820b95768d7..3b0eba665a4c 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow.go @@ -324,8 +324,8 @@ func resourceApigeeSharedFlowDelete(d *schema.ResourceData, meta interface{}) er func resourceApigeeSharedFlowImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/sharedflows/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/sharedflows/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go index 7cd90e8676b0..68f1c91d9ac2 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_sharedflow_deployment.go @@ -228,8 +228,9 @@ func resourceApigeeSharedflowDeploymentDelete(d *schema.ResourceData, meta inter func resourceApigeeSharedflowDeploymentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)$", + "^organizations/(?P[^/]+)/environments/(?P[^/]+)/sharedflows/(?P[^/]+)/revisions/(?P[^/]+)/deployments$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index dce185ebebf6..bebe08d56791 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -3501,9 +3501,9 @@ func flattenSerDeInfo(si *bigquery.SerDeInfo) []map[string]interface{} { func resourceBigQueryTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/datasets/(?P[^/]+)/tables/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go index 4a26b6e82a8b..b34ff48e2388 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_authorized_view.go @@ -355,9 +355,9 @@ func resourceBigtableAuthorizedViewDestroy(d *schema.ResourceData, meta interfac func resourceBigtableAuthorizedViewImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)/authorizedViews/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go index 1487ce288b7e..c51d6b8c68ff 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_instance.go @@ -829,9 +829,9 @@ func resourceBigtableInstanceClusterReorderTypeListFunc(diff tpgresource.Terrafo func resourceBigtableInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go index 66df55fb8b2c..91cddb12b30d 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_table.go @@ -686,9 +686,9 @@ func FlattenColumnFamily(families []bigtable.FamilyInfo) ([]map[string]interface func resourceBigtableTableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/instances/(?P[^/]+)/tables/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index 5fbab6b22280..739b37d86a47 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -65,9 +65,9 @@ func (s *CloudFunctionId) locationId() string { func parseCloudFunctionId(d *schema.ResourceData, config *transport_tpg.Config) (*CloudFunctionId, error) { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/functions/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 46ea496e1388..718a857dab8d 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -3436,9 +3436,9 @@ func resourceComputeInstanceDelete(d *schema.ResourceData, meta interface{}) err func resourceComputeInstanceImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl index b5782de796cb..af13d60d0d6b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group.go.tmpl @@ -455,9 +455,9 @@ func resourceComputeInstanceGroupDelete(d *schema.ResourceData, meta interface{} func resourceComputeInstanceGroupImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/zones/(?P[^/]+)/instanceGroups/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl index fdf16116aa12..4f10ee9db007 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_project_metadata_item.go.tmpl @@ -184,8 +184,8 @@ func resourceComputeProjectMetadataItemDelete(d *schema.ResourceData, meta inter func resourceComputeProjectMetadataItemImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/meta-data/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/meta-data/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl index d5617b8e3e96..612ec7399dd1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_target_pool.go.tmpl @@ -571,10 +571,10 @@ func resourceComputeTargetPoolDelete(d *schema.ResourceData, meta interface{}) e func resourceTargetPoolStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/regions/(?P[^/]+)/targetPools/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go index a9f713075b73..a8e74f92a90d 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go +++ b/mmv1/third_party/terraform/services/dns/resource_dns_record_set.go @@ -648,9 +648,9 @@ func resourceDnsRecordSetUpdate(d *schema.ResourceData, meta interface{}) error func resourceDnsRecordSetImportState(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/managedZones/(?P[^/]+)/rrsets/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go index 5ae751ff3792..ad832cd01496 100644 --- a/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_os_policy_assignment.go @@ -1445,9 +1445,9 @@ func resourceOSConfigOSPolicyAssignmentDelete(d *schema.ResourceData, meta inter func resourceOSConfigOSPolicyAssignmentImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", + "^projects/(?P[^/]+)/locations/(?P[^/]+)/osPolicyAssignments/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go index 18ec055b54d1..6e17b8d7603b 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_folder_organization_policy.go @@ -47,9 +47,9 @@ func resourceFolderOrgPolicyImporter(d *schema.ResourceData, meta interface{}) ( config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "folders/(?P[^/]+)/constraints/(?P[^/]+)", - "folders/(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)"}, + "^folders/(?P[^/]+)/constraints/(?P[^/]+)$", + "^folders/(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go index ef27f2aacb9a..9b4828db0ab8 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_iam_custom_role.go @@ -238,9 +238,9 @@ func resourceGoogleProjectIamCustomRoleDelete(d *schema.ResourceData, meta inter func resourceGoogleProjectIamCustomRoleImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/roles/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)", + "^projects/(?P[^/]+)/roles/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$", }, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go index 503117edf5b8..88ae902d0783 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_organization_policy.go @@ -47,9 +47,9 @@ func resourceProjectOrgPolicyImporter(d *schema.ResourceData, meta interface{}) config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+):constraints/(?P[^/]+)", - "(?P[^/]+):constraints/(?P[^/]+)", - "(?P[^/]+):(?P[^/]+)"}, + "^projects/(?P[^/]+):constraints/(?P[^/]+)$", + "^(?P[^/]+):constraints/(?P[^/]+)$", + "^(?P[^/]+):(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go index fd50b35de26e..21e829a6374a 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_service_account.go @@ -321,9 +321,9 @@ func resourceGoogleServiceAccountUpdate(d *schema.ResourceData, meta interface{} func resourceGoogleServiceAccountImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)"}, d, config); err != nil { + "^projects/(?P[^/]+)/serviceAccounts/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index bd3c036f73f3..6ed39b1db93c 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -2367,9 +2367,9 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) func resourceSqlDatabaseInstanceImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { config := meta.(*transport_tpg.Config) if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/instances/(?P[^/]+)", - "(?P[^/]+)/(?P[^/]+)", - "(?P[^/]+)"}, d, config); err != nil { + "^projects/(?P[^/]+)/instances/(?P[^/]+)$", + "^(?P[^/]+)/(?P[^/]+)$", + "^(?P[^/]+)$"}, d, config); err != nil { return nil, err } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 9bb435d9169c..47707bfd4b45 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -92,6 +92,10 @@ terraform { Description of the change and how users should adjust their configuration (if needed). +### Resource import formats have improved validation + +Throughout the provider there were many resources which erroneously gave false positives to poorly formatted import input if a subset of the provided input was valid to their configured import formats. All GCP resource IDs supplied to "terraform import" must match the documentation specified import formats exactly. + ## Datasources ## Datasource: `google_product_datasource` diff --git a/tpgtools/ignored_handwritten/custom_import.go b/tpgtools/ignored_handwritten/custom_import.go index be5cfbc9c300..b50236160e00 100644 --- a/tpgtools/ignored_handwritten/custom_import.go +++ b/tpgtools/ignored_handwritten/custom_import.go @@ -10,8 +10,8 @@ import ( func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/repos/(?P.+)", - "(?P.+)", + "^projects/(?P[^/]+)/repos/(?P.+)$", + "^(?P.+)$", }, d, config); err != nil { return err } @@ -28,8 +28,8 @@ func sourceRepoImport(d *schema.ResourceData, config *transport_tpg.Config) erro func runtimeconfigVariableImport(d *schema.ResourceData, config *transport_tpg.Config) error { if err := tpgresource.ParseImportId([]string{ - "projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)", - "(?P[^/]+)/(?P.+)", + "^projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P.+)$", + "^(?P[^/]+)/(?P.+)$", }, d, config); err != nil { return err } From 216ba6393dbd079e1c80c34fb459d5be2dbb04f5 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 15 Aug 2025 10:30:14 -0700 Subject: [PATCH 024/201] convert storage transfer job file from .tmpl (#14867) --- ...ransfer_job.go.tmpl => resource_storage_transfer_job.go} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename mmv1/third_party/terraform/services/storagetransfer/{resource_storage_transfer_job.go.tmpl => resource_storage_transfer_job.go} (99%) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go similarity index 99% rename from mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl rename to mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 8187246ffdd2..38fbf29048af 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go.tmpl +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -694,9 +694,9 @@ func gcsDataSchema() *schema.Resource { Description: `Google Cloud Storage bucket name.`, }, "path": { - Optional: true, - Type: schema.TypeString, - Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, + Optional: true, + Type: schema.TypeString, + Description: `Google Cloud Storage path in bucket to transfer. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should not begin with a '/'.`, ValidateFunc: validateGCSDataPath, }, }, From bb28eed4e9e6ae4abcf4dd7c396f90d640d05dc0 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 18 Aug 2025 09:45:40 -0700 Subject: [PATCH 025/201] changed mirrored_resources nested objects to set (#14871) Co-authored-by: Jonathan Greger <43762185+jmgreger@users.noreply.github.com> --- mmv1/products/compute/PacketMirroring.yaml | 6 ++++ .../compute_packet_mirroring_full.tf.tmpl | 3 ++ .../tpgresource/self_link_helpers.go | 30 +++++++++++++++++++ .../guides/version_7_upgrade.html.markdown | 6 ++++ 4 files changed, 45 insertions(+) diff --git a/mmv1/products/compute/PacketMirroring.yaml b/mmv1/products/compute/PacketMirroring.yaml index 123fcc99c203..d4bf994a3a73 100644 --- a/mmv1/products/compute/PacketMirroring.yaml +++ b/mmv1/products/compute/PacketMirroring.yaml @@ -144,6 +144,8 @@ properties: properties: - name: 'subnetworks' type: Array + is_set: true + set_hash_func: tpgresource.NestedUrlSetHashFunc description: | All instances in one of these subnetworks will be mirrored. at_least_one_of: @@ -161,10 +163,13 @@ properties: The URL of the subnetwork where this rule should be active. required: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' resource: 'Subnetwork' imports: 'selfLink' - name: 'instances' type: Array + is_set: true + set_hash_func: tpgresource.NestedUrlSetHashFunc description: | All the listed instances will be mirrored. Specify at most 50. at_least_one_of: @@ -181,6 +186,7 @@ properties: The URL of the instances where this rule should be active. required: true custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' resource: 'Instance' imports: 'selfLink' - name: 'tags' diff --git a/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl b/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl index 4a82a492c4cd..054f849bf010 100644 --- a/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/compute_packet_mirroring_full.tf.tmpl @@ -68,6 +68,9 @@ resource "google_compute_packet_mirroring" "{{$.PrimaryResourceId}}" { instances { url = google_compute_instance.mirror.id } + subnetworks { + url = google_compute_subnetwork.default.id + } } filter { ip_protocols = ["tcp"] diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 0a1f3d9ff8be..93982ff5c0d1 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -1,8 +1,10 @@ package tpgresource import ( + "bytes" "errors" "fmt" + "log" "net/url" "regexp" "strings" @@ -92,6 +94,34 @@ func SelfLinkNameHash(selfLink interface{}) int { return Hashcode(name) } +// Hash based on relative url for a nested object containing a URL field. +func NestedUrlSetHashFunc(v interface{}) int { + if v == nil { + return 0 + } + + var buf bytes.Buffer + m := v.(map[string]interface{}) + log.Printf("[DEBUG] hashing %v", m) + + if v, ok := m["url"]; ok { + if v == nil { + v = "" + } else { + if relUrl, err := GetRelativePath(v.(string)); err != nil { + log.Printf("[WARN] Error on retrieving relative path of network url: %s", err) + } else { + v = relUrl + } + } + + buf.WriteString(fmt.Sprintf("%v-", v)) + } + + log.Printf("[DEBUG] computed hash value of %v from %v", Hashcode(buf.String()), buf.String()) + return Hashcode(buf.String()) +} + func ConvertSelfLinkToV1(link string) string { reg := regexp.MustCompile("/compute/[a-zA-Z0-9]*/projects/") return reg.ReplaceAllString(link, "/compute/v1/projects/") diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 47707bfd4b45..a913d137eefc 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -142,6 +142,12 @@ Use `google_beyondcorp_security_gateway_application` instead. `instance` has been removed in favor of `instance_name`. +## Resource: `google_compute_packet_mirroring` + +### `subnetworks` and `instances` fields have been converted to sets + +`subnetworks` and `instances` fields have been converted to sets. If you need to access values in their nested objects, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. + ## Resource: `google_compute_subnetwork` ### `enable_flow_logs`is now removed From 7b090acfc6e03a8874099823e9977540eb039e04 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 18 Aug 2025 12:05:10 -0700 Subject: [PATCH 026/201] add deletion protection to alloydb cluster (#14796) --- mmv1/products/alloydb/Cluster.yaml | 21 +++ .../examples/alloydb_backup_basic.tf.tmpl | 2 + .../alloydb_backup_basic_test.tf.tmpl | 2 + .../examples/alloydb_backup_full.tf.tmpl | 2 + .../examples/alloydb_backup_full_test.tf.tmpl | 2 + .../alloydb_cluster_after_upgrade.tf.tmpl | 2 + .../examples/alloydb_cluster_basic.tf.tmpl | 2 + .../alloydb_cluster_before_upgrade.tf.tmpl | 2 + .../examples/alloydb_cluster_full.tf.tmpl | 2 + .../examples/alloydb_cluster_restore.tf.tmpl | 6 + .../examples/alloydb_instance_basic.tf.tmpl | 2 + .../alloydb_instance_basic_test.tf.tmpl | 2 + .../alloydb_instance_psc_test.tf.tmpl | 2 + .../alloydb_secondary_cluster_basic.tf.tmpl | 3 + ...loydb_secondary_cluster_basic_test.tf.tmpl | 3 + .../alloydb_secondary_instance_basic.tf.tmpl | 3 + ...oydb_secondary_instance_basic_test.tf.tmpl | 3 + .../examples/alloydb_user_builtin.tf.tmpl | 2 + .../alloydb_user_builtin_test.tf.tmpl | 2 + .../examples/alloydb_user_iam.tf.tmpl | 2 + .../examples/alloydb_user_iam_test.tf.tmpl | 2 + ...onnection_profile_existing_alloydb.tf.tmpl | 2 + ..._migration_job_postgres_to_alloydb.tf.tmpl | 2 + .../pre_delete/alloydb_cluster.go.tmpl | 4 + .../alloydb/data_source_alloydb_cluster.go | 5 + .../data_source_alloydb_cluster_test.go | 2 + ...a_source_alloydb_database_instance_test.go | 2 + .../alloydb/resource_alloydb_backup_test.go | 8 + .../resource_alloydb_cluster_restore_test.go | 44 ++++- .../alloydb/resource_alloydb_cluster_test.go | 165 ++++++++++++------ .../alloydb/resource_alloydb_instance_test.go | 34 ++++ ...resource_alloydb_secondary_cluster_test.go | 132 +++++++++++--- ...esource_alloydb_secondary_instance_test.go | 28 +++ .../alloydb/resource_alloydb_user_test.go | 6 + .../guides/version_7_upgrade.html.markdown | 7 + .../tests/data/example_alloydb_instance.tf | 2 + 36 files changed, 429 insertions(+), 83 deletions(-) diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index 9a1341808086..d1754852121c 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -67,6 +67,8 @@ examples: primary_resource_id: 'default' vars: alloydb_cluster_name: 'alloydb-cluster' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_before_upgrade' primary_resource_id: 'default' vars: @@ -75,6 +77,8 @@ examples: network_name: 'alloydb-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_after_upgrade' primary_resource_id: 'default' vars: @@ -83,10 +87,14 @@ examples: network_name: 'alloydb-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-1")' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_full' primary_resource_id: 'full' vars: alloydb_cluster_name: 'alloydb-cluster-full' + ignore_read_extra: + - 'deletion_protection' - name: 'alloydb_cluster_restore' primary_resource_id: 'source' vars: @@ -99,6 +107,7 @@ examples: test_vars_overrides: 'network_name': 'acctest.BootstrapSharedTestNetwork(t, "alloydb-instance-basic")' ignore_read_extra: + - 'deletion_protection' - 'reconciling' - 'update_time' exclude_test: true @@ -108,6 +117,8 @@ examples: alloydb_primary_cluster_name: 'alloydb-primary-cluster' alloydb_primary_instance_name: 'alloydb-primary-instance' alloydb_secondary_cluster_name: 'alloydb-secondary-cluster' + ignore_read_extra: + - 'deletion_protection' exclude_test: true - name: 'alloydb_secondary_cluster_basic_test' primary_resource_id: 'secondary' @@ -118,6 +129,8 @@ examples: network_name: 'alloydb-network' test_vars_overrides: 'network_name': 'acctest.BootstrapSharedServiceNetworkingConnection(t, "alloydb-1")' + ignore_read_extra: + - 'deletion_protection' exclude_docs: true virtual_fields: - name: 'deletion_policy' @@ -128,6 +141,14 @@ virtual_fields: Possible values: DEFAULT, FORCE type: String default_value: "DEFAULT" + - name: 'deletion_protection' + description: | + Whether Terraform will be prevented from destroying the cluster. + When the field is set to true or unset in Terraform state, a `terraform apply` + or `terraform destroy` that would delete the cluster will fail. + When the field is set to false, deleting the cluster is allowed. + type: Boolean + default_value: true - name: 'skip_await_major_version_upgrade' type: Boolean default_value: true diff --git a/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl index a78ce7463f6a..933c5ec28e04 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_basic.tf.tmpl @@ -12,6 +12,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl index b09d5a4be7ef..aedf0a53d9ab 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_basic_test.tf.tmpl @@ -12,6 +12,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl index 563fdcbac298..6b4f0385861e 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_full.tf.tmpl @@ -17,6 +17,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl index 218bc7ee3bed..abb4e8618b2e 100644 --- a/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_backup_full_test.tf.tmpl @@ -17,6 +17,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl index 672301dfc794..cb829f13414f 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_after_upgrade.tf.tmpl @@ -20,6 +20,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl index c9bab8098b43..86f8d92fe2f9 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_basic.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl index 9cc7adf3c6aa..319e346fd914 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_before_upgrade.tf.tmpl @@ -20,6 +20,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl index 2b0c9e4c9f63..44f8e9faea1f 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_full.tf.tmpl @@ -44,6 +44,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { labels = { test = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl index 7713276440e6..bd1ff8d2668d 100644 --- a/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_cluster_restore.tf.tmpl @@ -6,6 +6,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } resource "google_alloydb_instance" "{{$.PrimaryResourceId}}" { @@ -37,6 +39,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { restore_backup_source { backup_name = google_alloydb_backup.{{$.PrimaryResourceId}}.name } + + deletion_protection = false } resource "google_alloydb_cluster" "restored_via_pitr" { @@ -49,6 +53,8 @@ resource "google_alloydb_cluster" "restored_via_pitr" { cluster = google_alloydb_cluster.{{$.PrimaryResourceId}}.name point_in_time = "2023-08-03T19:19:00.094Z" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl index 575c2a6ed790..31b0d9e40a0b 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_basic.tf.tmpl @@ -20,6 +20,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl index 0ca5146f0ac8..dd6c485736fe 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_basic_test.tf.tmpl @@ -17,6 +17,8 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { initial_user { password = "{{index $.Vars "alloydb_cluster_name"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl index d2d4712d0ae7..78b6cdf4d91e 100644 --- a/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_instance_psc_test.tf.tmpl @@ -18,4 +18,6 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { psc_config { psc_enabled = true } + + deletion_protection = false } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl index 6911d955d778..6d48cfa10e94 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -34,6 +36,7 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl index 9b04de5953d4..87fa2ba37399 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_cluster_basic_test.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -32,6 +34,7 @@ resource "google_alloydb_cluster" "{{$.PrimaryResourceId}}" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl index 8e2eeb44e88d..a10d35521051 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -43,6 +45,7 @@ resource "google_alloydb_cluster" "secondary" { ignore_changes = [instance_type] } + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl index 432fd4d91e80..f3c959d2baf0 100644 --- a/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_secondary_instance_basic_test.tf.tmpl @@ -4,6 +4,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -34,6 +36,7 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl index b91e92abddce..5e3480f68a38 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_builtin.tf.tmpl @@ -15,6 +15,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl index 01d0f92dd77c..2ccac4ee4918 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_builtin_test.tf.tmpl @@ -13,6 +13,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl index 107cdaf25b3f..62a938a57ee8 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_iam.tf.tmpl @@ -16,6 +16,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl b/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl index eebb03454761..1db1157c4b9f 100644 --- a/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/alloydb_user_iam_test.tf.tmpl @@ -13,6 +13,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "{{index $.Vars "alloydb_cluster_pass"}}" } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl b/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl index 10b99197cb48..5e272f5be9d4 100644 --- a/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl +++ b/mmv1/templates/terraform/examples/database_migration_service_connection_profile_existing_alloydb.tf.tmpl @@ -13,6 +13,8 @@ resource "google_alloydb_cluster" "destination_alloydb" { user = "{{index $.Vars "destination_alloydb"}}" password = "{{index $.Vars "destination_alloydb"}}" } + + deletion_protection = false } resource "google_alloydb_instance" "destination_alloydb_primary" { diff --git a/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl b/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl index 2e66858dec05..1b8b97fb8209 100644 --- a/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl +++ b/mmv1/templates/terraform/examples/database_migration_service_migration_job_postgres_to_alloydb.tf.tmpl @@ -62,6 +62,8 @@ resource "google_alloydb_cluster" "destination_alloydb" { user = "{{index $.Vars "destination_alloydb"}}" password = "{{index $.Vars "destination_alloydb"}}" } + + deletion_protection = false } resource "google_alloydb_instance" "destination_alloydb_primary" { diff --git a/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl b/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl index 743d7e857912..b1543d5758c4 100644 --- a/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl +++ b/mmv1/templates/terraform/pre_delete/alloydb_cluster.go.tmpl @@ -1,3 +1,7 @@ +if d.Get("deletion_protection").(bool) { + return fmt.Errorf("cannot destroy cluster without setting deletion_protection=false and running `terraform apply`") +} + // Forcefully delete the secondary cluster and the dependent instances because deletion of secondary instance is not supported. if deletionPolicy := d.Get("deletion_policy"); deletionPolicy == "FORCE" { url = url + "?force=true" diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go index 4ccc3db083c3..3e289182ae8d 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster.go @@ -55,5 +55,10 @@ func dataSourceAlloydbDatabaseClusterRead(d *schema.ResourceData, meta interface if d.Id() == "" { return fmt.Errorf("%s not found", id) } + + if err := d.Set("deletion_protection", nil); err != nil { + return fmt.Errorf("Error setting deletion_protection: %s", err) + } + return nil } diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go index e520602a99f2..c70906077b38 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_cluster_test.go @@ -38,6 +38,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go index 5220263365cd..805a1cb8c1d3 100644 --- a/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/data_source_alloydb_database_instance_test.go @@ -48,6 +48,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go index 271676fdc51e..4bff41abc781 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_backup_test.go @@ -63,6 +63,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -99,6 +101,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -149,6 +153,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" { } @@ -218,6 +224,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "default" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go index 9d12fba70b1b..b3ce46da17df 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_restore_test.go @@ -34,7 +34,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.source", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { // Invalid input check - cannot pass in both sources @@ -54,7 +54,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.restored_from_backup", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "restore_backup_source"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "restore_backup_source"}, }, { // Validate PITR succeeds @@ -64,7 +64,7 @@ func TestAccAlloydbCluster_restore(t *testing.T) { ResourceName: "google_alloydb_cluster.restored_from_point_in_time", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "restore_continuous_backup_source"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "restore_continuous_backup_source"}, }, { // Make sure updates work without recreating the clusters @@ -89,6 +89,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -122,6 +124,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -155,6 +159,8 @@ resource "google_alloydb_cluster" "restored" { lifecycle { prevent_destroy = true } + + deletion_protection = false } data "google_project" "project" {} @@ -174,6 +180,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -201,6 +209,8 @@ resource "google_alloydb_cluster" "restored" { cluster = google_alloydb_cluster.source.name } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -222,6 +232,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -248,6 +260,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { backup_name = google_alloydb_backup.default.name } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -271,6 +285,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -297,6 +313,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { backup_name = google_alloydb_backup.default.name } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -313,6 +331,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { point_in_time = google_alloydb_backup.default.update_time } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -336,6 +356,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -367,6 +389,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -388,6 +412,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -411,6 +437,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -450,6 +478,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -473,6 +503,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { recovery_window_days = 20 } + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -496,6 +528,8 @@ resource "google_alloydb_cluster" "source" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "source" { @@ -521,6 +555,8 @@ resource "google_alloydb_cluster" "restored_from_backup" { restore_backup_source { backup_name = google_alloydb_backup.default.name } + + deletion_protection = false } resource "google_alloydb_cluster" "restored_from_point_in_time" { @@ -533,6 +569,8 @@ resource "google_alloydb_cluster" "restored_from_point_in_time" { cluster = google_alloydb_cluster.source.name point_in_time = google_alloydb_backup.default.update_time } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go index 3c7d9f433cae..d3f141b83b84 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_cluster_test.go @@ -30,7 +30,7 @@ func TestAccAlloydbCluster_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels"}, }, { Config: testAccAlloydbCluster_update(context), @@ -39,7 +39,7 @@ func TestAccAlloydbCluster_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -56,6 +56,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" { @@ -87,7 +89,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, }, { Config: testAccAlloydbCluster_afterUpgrade(context), @@ -96,7 +98,7 @@ func TestAccAlloydbCluster_upgrade(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location", "labels", "terraform_labels", "skip_await_major_version_upgrade"}, }, }, }) @@ -112,6 +114,8 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id } database_version = "POSTGRES_14" + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -140,6 +144,8 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id } database_version = "POSTGRES_15" + + deletion_protection = false } resource "google_alloydb_instance" "default" { @@ -192,6 +198,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" { @@ -235,6 +243,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" { @@ -269,7 +279,7 @@ func TestAccAlloydbCluster_addAutomatedBackupPolicyAndInitialUser(t *testing.T) ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withInitialUserAndAutomatedBackupPolicy(context), @@ -278,7 +288,7 @@ func TestAccAlloydbCluster_addAutomatedBackupPolicyAndInitialUser(t *testing.T) ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -310,7 +320,7 @@ func TestAccAlloydbCluster_deleteAutomatedBackupPolicyAndInitialUser(t *testing. ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withoutInitialUserAndAutomatedBackupPolicy(context), @@ -319,7 +329,7 @@ func TestAccAlloydbCluster_deleteAutomatedBackupPolicyAndInitialUser(t *testing. ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -350,7 +360,7 @@ func TestAccAlloydbCluster_AutomatedBackupPolicyHandlesMidnight(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -397,6 +407,9 @@ resource "google_alloydb_cluster" "default" { test = "tf-test-alloydb-cluster%{random_suffix}" } } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -419,6 +432,9 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -450,9 +466,10 @@ func TestAccAlloydbCluster_missingWeeklySchedule(t *testing.T) { Config: testAccAlloydbCluster_missingWeeklySchedule(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -474,9 +491,11 @@ resource "google_alloydb_cluster" "default" { count = 1 } labels = { - test = "tf-test-alloydb-cluster%{random_suffix}" - } + test = "tf-test-alloydb-cluster%{random_suffix}" + } } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -542,17 +561,19 @@ func TestAccAlloydbCluster_deleteTimeBasedRetentionPolicy(t *testing.T) { Config: testAccAlloydbCluster_withTimeBasedRetentionPolicy(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccAlloydbCluster_withoutTimeBasedRetentionPolicy(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -588,6 +609,9 @@ resource "google_alloydb_cluster" "default" { retention_period = "4.5s" } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -628,6 +652,9 @@ resource "google_alloydb_cluster" "default" { } } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -663,7 +690,7 @@ func TestAccAlloydbCluster_usingCMEK(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, }, }) @@ -680,6 +707,8 @@ resource "google_alloydb_cluster" "default" { encryption_config { kms_key_name = "%{kms_key_name}" } + + deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } resource "google_compute_network" "default" { @@ -715,7 +744,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_updateCMEKInAutomatedBackup(context), @@ -724,7 +753,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_usingCMEKallowDeletion(context), @@ -733,7 +762,7 @@ func TestAccAlloydbCluster_CMEKInAutomatedBackupIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, }, }) @@ -761,8 +790,11 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } + + deletion_protection = false + lifecycle { - prevent_destroy = true + prevent_destroy = true } depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -803,6 +835,9 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -851,6 +886,8 @@ resource "google_alloydb_cluster" "default" { retention_period = "510s" } } + + deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -898,7 +935,7 @@ func TestAccAlloydbCluster_continuousBackup_enabledByDefault(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -934,7 +971,7 @@ func TestAccAlloydbCluster_continuousBackup_update_noChangeIfDefaultsSet(t *test ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context), @@ -947,7 +984,7 @@ func TestAccAlloydbCluster_continuousBackup_update_noChangeIfDefaultsSet(t *test ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -983,7 +1020,7 @@ func TestAccAlloydbCluster_continuousBackup_noChangeIfRemoved(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1028,7 +1065,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context), @@ -1041,7 +1078,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupConfig(context2), @@ -1054,7 +1091,7 @@ func TestAccAlloydbCluster_continuousBackup_update(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_alloydbClusterBasicExample(context), @@ -1071,6 +1108,9 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -1098,6 +1138,9 @@ resource "google_alloydb_cluster" "default" { enabled = %{enabled} recovery_window_days = %{recovery_window_days} } + + deletion_protection = false + lifecycle { prevent_destroy = true } @@ -1142,7 +1185,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_usingCMEKInClusterAndContinuousBackup(context2), @@ -1151,7 +1194,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_continuousBackupUsingCMEKAllowDeletion(context2), @@ -1160,7 +1203,7 @@ func TestAccAlloydbCluster_continuousBackup_CMEKIsUpdatable(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "cluster_id", "location"}, }, }, }) @@ -1181,8 +1224,11 @@ resource "google_alloydb_cluster" "default" { kms_key_name = "%{key_name}" } } + + deletion_protection = false + lifecycle { - prevent_destroy = true + prevent_destroy = true } depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -1216,6 +1262,8 @@ resource "google_alloydb_cluster" "default" { kms_key_name = "%{key_name}" } } + + deletion_protection = false depends_on = [google_kms_crypto_key_iam_member.crypto_key] } @@ -1250,9 +1298,10 @@ func TestAccAlloydbCluster_withNetworkConfig(t *testing.T) { Config: testAccAlloydbCluster_withNetworkConfig(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1266,6 +1315,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1291,9 +1342,10 @@ func TestAccAlloydbCluster_withNetworkConfigAndAllocatedIPRange(t *testing.T) { Config: testAccAlloydbCluster_withNetworkConfigAndAllocatedIPRange(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1308,6 +1360,8 @@ resource "google_alloydb_cluster" "default" { network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.default.name}" allocated_ip_range = google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1341,9 +1395,10 @@ func TestAccAlloydbCluster_withMaintenanceWindows(t *testing.T) { Config: testAccAlloydbCluster_withMaintenanceWindows(context), }, { - ResourceName: "google_alloydb_cluster.default", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_alloydb_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, }, }, }) @@ -1368,6 +1423,8 @@ resource "google_alloydb_cluster" "default" { } } } + + deletion_protection = false } data "google_project" "project" {} resource "google_compute_network" "default" { @@ -1416,6 +1473,8 @@ resource "google_alloydb_cluster" "default" { day = "WEDNESDAY" } } + + deletion_protection = false } resource "google_compute_network" "default" { @@ -1445,6 +1504,8 @@ resource "google_alloydb_cluster" "default" { } } } + + deletion_protection = false } resource "google_compute_network" "default" { @@ -1487,6 +1548,8 @@ resource "google_alloydb_cluster" "default" { psc_config { psc_enabled = true } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1515,7 +1578,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1524,7 +1587,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1533,7 +1596,7 @@ func TestAccAlloydbCluster_standardClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, }, }) @@ -1559,7 +1622,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeTrial(context), @@ -1568,7 +1631,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, { Config: testAccAlloydbCluster_withSubscriptionTypeStandard(context), @@ -1577,7 +1640,7 @@ func TestAccAlloydbCluster_trialClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.default", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "cluster_id", "location"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "cluster_id", "location"}, }, }, }) diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go index 72cec88fa08c..9280badf632b 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_instance_test.go @@ -64,6 +64,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { @@ -99,6 +101,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_compute_network" "default" { @@ -211,6 +215,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -235,6 +241,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -362,6 +370,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -445,6 +455,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -496,6 +508,8 @@ resource "google_alloydb_cluster" "default" { network = data.google_compute_network.default.id allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } data "google_compute_network" "default" { @@ -635,6 +649,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -666,6 +682,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} @@ -797,6 +815,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -835,6 +855,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -890,6 +912,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -917,6 +941,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -972,6 +998,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1054,6 +1082,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1078,6 +1108,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf-test-alloydb-cluster%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} `, context) @@ -1132,6 +1164,8 @@ resource "google_alloydb_cluster" "default" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } data "google_project" "project" {} diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go index 23c9340b6dc0..e3010a2c8454 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_cluster_test.go @@ -28,7 +28,7 @@ func TestAccAlloydbCluster_secondaryClusterMandatoryFields(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -42,6 +42,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -70,6 +72,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -111,6 +115,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -135,6 +141,8 @@ resource "google_alloydb_cluster" "secondary" { enabled = false } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -176,6 +184,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -203,6 +213,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -244,6 +256,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -272,6 +286,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -304,7 +320,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterUpdate(context), @@ -313,7 +329,7 @@ func TestAccAlloydbCluster_secondaryClusterUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -327,6 +343,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -359,6 +377,8 @@ resource "google_alloydb_cluster" "secondary" { foo = "bar" } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -391,7 +411,7 @@ func TestAccAlloydbCluster_secondaryClusterUsingCMEK(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -405,6 +425,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -437,6 +459,8 @@ resource "google_alloydb_cluster" "secondary" { kms_key_name = "%{kms_key_name}" } + deletion_protection = false + depends_on = [ google_alloydb_instance.primary, google_kms_crypto_key_iam_member.crypto_key @@ -478,7 +502,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfig(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -492,6 +516,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -520,6 +546,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -553,7 +581,7 @@ func TestAccAlloydbCluster_secondaryClusterWithNetworkConfigAndAllocatedIPRange( ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -568,6 +596,8 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -597,6 +627,8 @@ resource "google_alloydb_cluster" "secondary" { primary_cluster_name = google_alloydb_cluster.primary.name } + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -634,7 +666,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -643,7 +675,7 @@ func TestAccAlloydbCluster_secondaryClusterPromote(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -657,6 +689,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -687,6 +721,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -720,6 +756,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -743,6 +781,8 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -789,7 +829,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(context), @@ -798,7 +838,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndSimultaneousUpdate(t *testi ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -812,6 +852,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -839,6 +881,8 @@ resource "google_alloydb_cluster" "secondary" { labels = { foo = "bar" } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -885,7 +929,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -894,7 +938,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(context), @@ -903,7 +947,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteOriginalPrimary(t *te ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -922,6 +966,8 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -968,7 +1014,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -977,7 +1023,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndUpdate(context), @@ -986,7 +1032,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndUpdate(t *testing.T) { ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1000,6 +1046,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1028,6 +1076,7 @@ resource "google_alloydb_cluster" "secondary" { foo = "bar" } + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1074,7 +1123,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedIPRange(context), @@ -1083,7 +1132,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteWithNetworkConfigAndAllocatedI ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1098,6 +1147,8 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1129,6 +1180,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -1167,6 +1220,8 @@ resource "google_alloydb_cluster" "primary" { network = "projects/${data.google_project.project.number}/global/networks/${data.google_compute_network.default.name}" allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1191,6 +1246,8 @@ resource "google_alloydb_cluster" "secondary" { continuous_backup_config { enabled = false } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1242,7 +1299,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1251,7 +1308,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndAddAutomatedBackupPolicyAndInitialUser(context), @@ -1260,7 +1317,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1269,7 +1326,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddAndDeleteAutomatedBackup ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1283,6 +1340,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1336,6 +1395,8 @@ resource "google_alloydb_cluster" "secondary" { test = "tf-test-alloydb-secondary-cluster%{random_suffix}" } } + + deletion_protection = false } resource "google_alloydb_instance" "secondary" { @@ -1382,7 +1443,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1391,7 +1452,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithTimeBasedRetentionPolicy(context), @@ -1400,7 +1461,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteWithoutTimeBasedRetentionPolicy(context), @@ -1409,7 +1470,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndDeleteTimeBasedRetentionPol ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1423,6 +1484,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1471,6 +1534,9 @@ resource "google_alloydb_cluster" "secondary" { retention_period = "4.5s" } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -1508,6 +1574,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1553,6 +1621,9 @@ resource "google_alloydb_cluster" "secondary" { } } } + + deletion_protection = false + lifecycle { ignore_changes = [ automated_backup_policy[0].time_based_retention @@ -1604,7 +1675,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromote(context), @@ -1613,7 +1684,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, { Config: testAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(context), @@ -1622,7 +1693,7 @@ func TestAccAlloydbCluster_secondaryClusterPromoteAndAddContinuousBackupConfig(t ResourceName: "google_alloydb_cluster.secondary", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, + ImportStateVerifyIgnore: []string{"deletion_protection", "initial_user", "restore_backup_source", "restore_continuous_backup_source", "cluster_id", "location", "deletion_policy", "labels", "annotations", "terraform_labels", "reconciling"}, }, }, }) @@ -1636,6 +1707,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -1661,6 +1734,7 @@ resource "google_alloydb_cluster" "secondary" { recovery_window_days = 14 } + deletion_protection = false } resource "google_alloydb_instance" "secondary" { diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go index 9f57d3cfb9fd..4554df032a82 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_secondary_instance_test.go @@ -51,6 +51,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -81,6 +83,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -110,6 +114,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -140,6 +146,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -196,6 +204,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -226,6 +236,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -294,6 +306,8 @@ resource "google_alloydb_cluster" "primary" { network = data.google_compute_network.default.id allocated_ip_range = data.google_compute_global_address.private_ip_alloc.name } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -325,6 +339,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -394,6 +410,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -424,6 +442,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -493,6 +513,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -523,6 +545,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } @@ -586,6 +610,8 @@ resource "google_alloydb_cluster" "primary" { network_config { network = data.google_compute_network.default.id } + + deletion_protection = false } resource "google_alloydb_instance" "primary" { @@ -616,6 +642,8 @@ resource "google_alloydb_cluster" "secondary" { deletion_policy = "FORCE" + deletion_protection = false + depends_on = [google_alloydb_instance.primary] } diff --git a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go index 846a0bc53f02..f7a41a81df3f 100644 --- a/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go +++ b/mmv1/third_party/terraform/services/alloydb/resource_alloydb_user_test.go @@ -60,6 +60,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -132,6 +134,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} @@ -202,6 +206,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "tf_test_cluster_secret%{random_suffix}" } + + deletion_protection = false } data "google_project" "project" {} data "google_compute_network" "default" { diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index a913d137eefc..0995605c9c1b 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -112,6 +112,13 @@ Description of the change and how users should adjust their configuration (if ne ## Resources +## Resource: `google_alloydb_cluster` + +### Cluster deletion now prevented by default with `deletion_protection` + +The field `deletion_protection` has been added with a default value of `true`. This field prevents +Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing clusters will have +`deletion_protection` set to `true` during the next refresh unless otherwise set in configuration. ## Resource: `google_beyondcorp_application` is now removed diff --git a/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf b/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf index b6803e2986af..fbff8402bac3 100644 --- a/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf +++ b/mmv1/third_party/tgc/tests/data/example_alloydb_instance.tf @@ -22,6 +22,8 @@ resource "google_alloydb_cluster" "default" { initial_user { password = "alloydb-cluster" } + + deletion_protection = false } resource "google_alloydb_instance" "default" { From 40b7846650d554abde62ca7713d906ca509c6fef Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 18 Aug 2025 12:49:00 -0700 Subject: [PATCH 027/201] Remove post_startup_script_config field from google_colab_runtime_template resource (#14873) --- mmv1/products/colab/RuntimeTemplate.yaml | 18 ------------------ .../guides/version_7_upgrade.html.markdown | 6 ++++++ 2 files changed, 6 insertions(+), 18 deletions(-) diff --git a/mmv1/products/colab/RuntimeTemplate.yaml b/mmv1/products/colab/RuntimeTemplate.yaml index 02ee8dadfa26..e4008c309467 100644 --- a/mmv1/products/colab/RuntimeTemplate.yaml +++ b/mmv1/products/colab/RuntimeTemplate.yaml @@ -209,21 +209,3 @@ properties: If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not.' - - name: 'postStartupScriptConfig' - deprecation_message: '`post_startup_script_config` is deprecated and will be removed in a future major release. New resource creation with this field is unavailable at this time.' - type: NestedObject - description: 'Post startup script config.' - properties: - - name: 'postStartupScript' - type: String - description: 'Post startup script to run after runtime is started.' - - name: 'postStartupScriptUrl' - type: String - description: 'Post startup script url to download. Example: https://bucket/script.sh.' - - name: 'postStartupScriptBehavior' - type: Enum - description: 'Post startup script behavior that defines download and execution behavior.' - enum_values: - - 'RUN_ONCE' - - 'RUN_EVERY_START' - - 'DOWNLOAD_AND_RUN_EVERY_START' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 0995605c9c1b..a2d922354caa 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -181,6 +181,12 @@ To reflect the new type explicitly, surround the current integer value in quotes Remove `description` from your configuration after upgrade. +## Resource: `google_colab_runtime_template` + +### `post_startup_script_config` is now removed. + +Remove `post_startup_script_config` from your configuration after upgrade. + ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required From eb796f473665acf0648df99ae2f3f6783208a258 Mon Sep 17 00:00:00 2001 From: Aman Mahendroo <30946991+amanMahendroo@users.noreply.github.com> Date: Tue, 19 Aug 2025 05:35:41 +0530 Subject: [PATCH 028/201] Convert advertised_ip_ranges from List to Set (#14854) Co-authored-by: Chris Hawk --- mmv1/products/compute/Router.yaml | 2 +- .../terraform/constants/router.go.tmpl | 2 +- .../compute_router_range.go.tmpl | 42 ------------------- .../resource_compute_router_peer.go.tmpl | 41 ++++++++++-------- 4 files changed, 25 insertions(+), 62 deletions(-) delete mode 100644 mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl diff --git a/mmv1/products/compute/Router.yaml b/mmv1/products/compute/Router.yaml index 846c992203d5..a800a8b0a04b 100644 --- a/mmv1/products/compute/Router.yaml +++ b/mmv1/products/compute/Router.yaml @@ -171,7 +171,7 @@ properties: ranges will be advertised in addition to any specified groups. Leave this field blank to advertise no custom IP ranges. send_empty_value: true - custom_flatten: 'templates/terraform/custom_flatten/compute_router_range.go.tmpl' + is_set: true item_type: type: NestedObject properties: diff --git a/mmv1/templates/terraform/constants/router.go.tmpl b/mmv1/templates/terraform/constants/router.go.tmpl index f4f93dfb5b2f..c4c387b890d6 100644 --- a/mmv1/templates/terraform/constants/router.go.tmpl +++ b/mmv1/templates/terraform/constants/router.go.tmpl @@ -16,7 +16,7 @@ func resourceComputeRouterCustomDiff(_ context.Context, diff *schema.ResourceDif block := diff.Get("bgp.0").(map[string]interface{}) advertiseMode := block["advertise_mode"] advertisedGroups := block["advertised_groups"].([]interface{}) - advertisedIPRanges := block["advertised_ip_ranges"].([]interface{}) + advertisedIPRanges := block["advertised_ip_ranges"].(*schema.Set).List() if advertiseMode == "DEFAULT" && len(advertisedGroups) != 0 { return fmt.Errorf("Error in bgp: advertised_groups cannot be specified when using advertise_mode DEFAULT") diff --git a/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl b/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl deleted file mode 100644 index a4afe3f63d8a..000000000000 --- a/mmv1/templates/terraform/custom_flatten/compute_router_range.go.tmpl +++ /dev/null @@ -1,42 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2024 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - apiData := make([]map[string]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - apiData = append(apiData, map[string]interface{}{ - "description": original["description"], - "range": original["range"], - }) - } - configData := []map[string]interface{}{} - if v, ok := d.GetOk("bgp.0.advertised_ip_ranges"); ok { - for _, item := range v.([]interface{}) { - configData = append(configData, item.(map[string]interface{})) - } - } - sorted, err := tpgresource.SortMapsByConfigOrder(configData, apiData, "range") - if err != nil { - log.Printf("[ERROR] Could not support API response for advertisedIpRanges.0.range: %s", err) - return apiData - } - return sorted -} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl index c5755139504b..197639163cd3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_router_peer.go.tmpl @@ -112,28 +112,14 @@ Leave this field blank to advertise no custom groups.`, }, }, "advertised_ip_ranges": { - Type: schema.TypeList, + Type: schema.TypeSet, Optional: true, Description: `User-specified list of individual IP ranges to advertise in custom mode. This field can only be populated if advertiseMode is 'CUSTOM' and is advertised to all peers of the router. These IP ranges will be advertised in addition to any specified groups. Leave this field blank to advertise no custom IP ranges.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "range": { - Type: schema.TypeString, - Required: true, - Description: `The IP range to advertise. The value must be a -CIDR-formatted string.`, - }, - "description": { - Type: schema.TypeString, - Optional: true, - Description: `User-specified description for the IP range.`, - }, - }, - }, + Elem: computeRouterBgpPeerAdvertisedIpRangesSchema(), }, "advertised_route_priority": { Type: schema.TypeInt, @@ -390,6 +376,24 @@ Must be unique within a router. Must be referenced by exactly one bgpPeer. Must } } +func computeRouterBgpPeerAdvertisedIpRangesSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "range": { + Type: schema.TypeString, + Required: true, + Description: `The IP range to advertise. The value must be a +CIDR-formatted string.`, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: `User-specified description for the IP range.`, + }, + }, + } +} + func resourceComputeRouterBgpPeerCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) @@ -1150,14 +1154,14 @@ func flattenNestedComputeRouterBgpPeerAdvertisedIpRanges(v interface{}, d *schem return v } l := v.([]interface{}) - transformed := make([]interface{}, 0, len(l)) + transformed := schema.NewSet(schema.HashResource(computeRouterBgpPeerAdvertisedIpRangesSchema()), []interface{}{}) for _, raw := range l { original := raw.(map[string]interface{}) if len(original) < 1 { // Do not include empty json objects coming back from the api continue } - transformed = append(transformed, map[string]interface{}{ + transformed.Add(map[string]interface{}{ "range": flattenNestedComputeRouterBgpPeerAdvertisedIpRangesRange(original["range"], d, config), "description": flattenNestedComputeRouterBgpPeerAdvertisedIpRangesDescription(original["description"], d, config), }) @@ -1388,6 +1392,7 @@ func expandNestedComputeRouterBgpPeerAdvertisedGroups(v interface{}, d tpgresour } func expandNestedComputeRouterBgpPeerAdvertisedIpRanges(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + v = v.(*schema.Set).List() l := v.([]interface{}) req := make([]interface{}, 0, len(l)) for _, raw := range l { From 6593c622d2b9ca4bbfaa212ec4b2cfe793e0d536 Mon Sep 17 00:00:00 2001 From: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Date: Wed, 20 Aug 2025 20:19:22 +0530 Subject: [PATCH 029/201] Update go.mod and go.sum (#14894) Co-authored-by: Shrishty Chandra --- mmv1/third_party/terraform/go.mod | 22 +++++++------- mmv1/third_party/terraform/go.sum | 50 +++++++++++++++---------------- 2 files changed, 35 insertions(+), 37 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 9282c683f316..57a7b1520a17 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,7 +3,7 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.3 + cloud.google.com/go/auth v0.16.4 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 @@ -32,12 +32,12 @@ require ( github.com/stretchr/testify v1.10.0 go4.org/netipx v0.0.0-20231129151722-fdeea329fbba golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 - golang.org/x/net v0.42.0 + golang.org/x/net v0.43.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.245.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 + google.golang.org/api v0.247.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b google.golang.org/grpc v1.74.2 - google.golang.org/protobuf v1.36.6 + google.golang.org/protobuf v1.36.7 gopkg.in/yaml.v2 v2.4.0 ) @@ -45,7 +45,7 @@ require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect cel.dev/expr v0.24.0 // indirect cloud.google.com/go v0.121.0 // indirect - cloud.google.com/go/compute/metadata v0.7.0 // indirect + cloud.google.com/go/compute/metadata v0.8.0 // indirect cloud.google.com/go/iam v1.5.2 // indirect cloud.google.com/go/longrunning v0.6.7 // indirect cloud.google.com/go/monitoring v1.24.2 // indirect @@ -108,13 +108,13 @@ require ( go.opentelemetry.io/otel/sdk v1.36.0 // indirect go.opentelemetry.io/otel/sdk/metric v1.36.0 // indirect go.opentelemetry.io/otel/trace v1.36.0 // indirect - golang.org/x/crypto v0.40.0 // indirect - golang.org/x/mod v0.25.0 // indirect + golang.org/x/crypto v0.41.0 // indirect + golang.org/x/mod v0.26.0 // indirect golang.org/x/sync v0.16.0 // indirect - golang.org/x/sys v0.34.0 // indirect - golang.org/x/text v0.27.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/text v0.28.0 // indirect golang.org/x/time v0.12.0 // indirect - golang.org/x/tools v0.34.0 // indirect + golang.org/x/tools v0.35.0 // indirect google.golang.org/appengine v1.6.8 // indirect google.golang.org/genproto v0.0.0-20250603155806-513f23925822 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 2ed259c385dc..7afea121c03c 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -5,14 +5,14 @@ cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= -cloud.google.com/go/auth v0.16.3 h1:kabzoQ9/bobUmnseYnBO6qQG7q4a/CffFRlJSxv2wCc= -cloud.google.com/go/auth v0.16.3/go.mod h1:NucRGjaXfzP1ltpcQ7On/VTZ0H4kWB5Jy+Y9Dnm76fA= +cloud.google.com/go/auth v0.16.4 h1:fXOAIQmkApVvcIn7Pc2+5J8QTMVbUGLscnSVNl11su8= +cloud.google.com/go/auth v0.16.4/go.mod h1:j10ncYwjX/g3cdX7GpEzsdM+d+ZNsXAbb6qXA7p1Y5M= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= cloud.google.com/go/bigtable v1.37.0/go.mod h1:HXqddP6hduwzrtiTCqZPpj9ij4hGZb4Zy1WF/dT+yaU= -cloud.google.com/go/compute/metadata v0.7.0 h1:PBWF+iiAerVNe8UCHxdOt6eHLVc3ydFeOCw78U8ytSU= -cloud.google.com/go/compute/metadata v0.7.0/go.mod h1:j5MvL9PprKL39t166CoB1uVHfQMs4tFQZZcKwksXUjo= +cloud.google.com/go/compute/metadata v0.8.0 h1:HxMRIbao8w17ZX6wBnjhcDkW6lTFpgcaobyVfZWqRLA= +cloud.google.com/go/compute/metadata v0.8.0/go.mod h1:sYOGTp851OV9bOFJ9CH7elVvyzopvWQFNNghtDQ/Biw= cloud.google.com/go/iam v1.5.2 h1:qgFRAGEmd8z6dJ/qyEchAuL9jpswyODjA2lS+w234g8= cloud.google.com/go/iam v1.5.2/go.mod h1:SE1vg0N81zQqLzQEwxL2WI6yhetBdbNQuTvIKCSkUHE= cloud.google.com/go/longrunning v0.6.7 h1:IGtfDWHhQCgCjwQjV9iiLnUta9LBCo8R9QmAFsS/PrE= @@ -22,8 +22,6 @@ cloud.google.com/go/monitoring v1.24.2/go.mod h1:x7yzPWcgDRnPEv3sI+jJGBkwl5qINf+ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= @@ -300,8 +298,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= -golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8= golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY= @@ -311,8 +309,8 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= -golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.26.0 h1:EGMPT//Ezu+ylkCijjPc+f4Aih7sZvaAr+O3EHBxvZg= +golang.org/x/mod v0.26.0/go.mod h1:/j6NAhSk8iQ723BGAUyoAcn7SlD7s15Dp9Nd/SfeaFQ= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -323,8 +321,8 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs= -golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= @@ -353,18 +351,18 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA= -golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.33.0 h1:NuFncQrRcaRvVmgRkvM3j/F00gWIAlcmlB8ACEKmGIg= -golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0= +golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4= +golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= -golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE= golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -376,14 +374,14 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= -golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= +golang.org/x/tools v0.35.0 h1:mBffYraMEf7aa0sB+NuKnuCy8qI/9Bughn8dC2Gu5r0= +golang.org/x/tools v0.35.0/go.mod h1:NKdj5HkL/73byiZSJjqJgKn3ep7KjFkBOkR/Hps3VPw= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.245.0 h1:YliGvz1rjXB+sTLNIST6Ffeji9WlRdLQ+LPl9ruSa5Y= -google.golang.org/api v0.245.0/go.mod h1:dMVhVcylamkirHdzEBAIQWUCgqY885ivNeZYd7VAVr8= +google.golang.org/api v0.247.0 h1:tSd/e0QrUlLsrwMKmkbQhYVa109qIintOls2Wh6bngc= +google.golang.org/api v0.247.0/go.mod h1:r1qZOPmxXffXg6xS5uhx16Fa/UFY8QU/K4bfKrnvovM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -395,8 +393,8 @@ google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuO google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0 h1:MAKi5q709QWfnkkpNQ0M12hYJ1+e8qYVDyowc4U1XZM= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250728155136-f173205681a0/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b h1:zPKJod4w6F1+nRGDI9ubnXYhU9NSWoFAijkHkUXeTK8= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -406,8 +404,8 @@ google.golang.org/grpc v1.74.2 h1:WoosgB65DlWVC9FqI82dGsZhWFNBSLjQ84bjROOpMu4= google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= -google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= +google.golang.org/protobuf v1.36.7 h1:IgrO7UwFQGJdRNXH/sQux4R1Dj1WAKcLElzeeRaXV2A= +google.golang.org/protobuf v1.36.7/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= From 6a26598f66d56d8c5883dc4eb2091b629d1c0fc7 Mon Sep 17 00:00:00 2001 From: Daniel Liao Date: Thu, 21 Aug 2025 01:47:11 +1000 Subject: [PATCH 030/201] Add support to pause google_cloud_tasks_queue resources (#14278) Signed-off-by: Daniel Liao --- mmv1/products/cloudtasks/Queue.yaml | 23 +++++++ .../cloud_tasks_queue_state.go.tmpl | 16 +++++ .../cloud_tasks_queue_state.go.tmpl | 35 ++++++++++ .../resource_cloud_tasks_queue_test.go | 68 ++++++++++++++++++- 4 files changed, 141 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl create mode 100644 mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl diff --git a/mmv1/products/cloudtasks/Queue.yaml b/mmv1/products/cloudtasks/Queue.yaml index ed7b41ca2ec8..d1f4cff5abab 100644 --- a/mmv1/products/cloudtasks/Queue.yaml +++ b/mmv1/products/cloudtasks/Queue.yaml @@ -34,6 +34,8 @@ iam_policy: - '{{name}}' custom_code: constants: 'templates/terraform/constants/cloud_tasks_retry_config_custom_diff.go.tmpl' + post_create: 'templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl' + post_update: 'templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl' examples: - name: 'queue_basic' primary_resource_id: 'default' @@ -56,6 +58,18 @@ examples: primary_resource_id: 'http_target_oauth' vars: name: 'cloud-tasks-queue-http-target-oauth' +virtual_fields: + - name: 'desired_state' + type: Enum + description: | + The desired state of the queue. Use this to pause and resume the queue. + + * RUNNING: The queue is running. Tasks can be dispatched. + * PAUSED: The queue is paused. Tasks are not dispatched but can be added to the queue. + default_value: 'RUNNING' + enum_values: + - 'RUNNING' + - 'PAUSED' parameters: - name: 'location' type: String @@ -207,6 +221,15 @@ properties: This field may contain any value between 0.0 and 1.0, inclusive. 0.0 is the default and means that no operations are logged. required: true + - name: 'state' + type: Enum + description: | + The current state of the queue. + output: true + enum_values: + - 'RUNNING' + - 'PAUSED' + - 'DISABLED' - name: 'httpTarget' type: NestedObject description: Modifies HTTP target for HTTP tasks. diff --git a/mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl b/mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl new file mode 100644 index 000000000000..7b21cc1c06f1 --- /dev/null +++ b/mmv1/templates/terraform/post_create/cloud_tasks_queue_state.go.tmpl @@ -0,0 +1,16 @@ +// Handle desired state after queue creation +if v, ok := d.GetOk("desired_state"); ok && v.(string) == "PAUSED" { + pauseUrl := fmt.Sprintf("%s%s:pause", config.CloudTasksBasePath, id) + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: pauseUrl, + UserAgent: userAgent, + }) + + if err != nil { + return fmt.Errorf("Error pausing queue %q: %s", d.Id(), err) + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl b/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl new file mode 100644 index 000000000000..59e65255432c --- /dev/null +++ b/mmv1/templates/terraform/post_update/cloud_tasks_queue_state.go.tmpl @@ -0,0 +1,35 @@ +// Handle desired state changes +if d.HasChange("desired_state") { + old, new := d.GetChange("desired_state") + + if old.(string) != new.(string) { + var action string + + actionUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}CloudTasksBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/queues/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + if new.(string) == "PAUSED" { + actionUrl = fmt.Sprintf("%s:pause", actionUrl) + action = "pausing" + } else if new.(string) == "RUNNING" { + actionUrl = fmt.Sprintf("%s:resume", actionUrl) + action = "resuming" + } + + if actionUrl != "" { + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: actionUrl, + UserAgent: userAgent, + }) + + if err != nil { + return fmt.Errorf("Error %s queue %q: %s", action, d.Id(), err) + } + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go index 707509e9033e..eb7d61fe65ec 100644 --- a/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go +++ b/mmv1/third_party/terraform/services/cloudtasks/resource_cloud_tasks_queue_test.go @@ -2,9 +2,10 @@ package cloudtasks_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) @@ -173,6 +174,37 @@ func TestAccCloudTasksQueue_HttpTargetOAuth_update(t *testing.T) { }) } +func TestAccCloudTasksQueue_paused(t *testing.T) { + t.Parallel() + + name := "cloudtasksqueuetest-" + acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudTasksQueue_full(name), + }, + { + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_engine_routing_override.0.service", "app_engine_routing_override.0.version", "app_engine_routing_override.0.instance"}, + }, + { + Config: testAccCloudTasksQueue_paused(name), + }, + { + ResourceName: "google_cloud_tasks_queue.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_engine_routing_override.0.service", "app_engine_routing_override.0.version", "app_engine_routing_override.0.instance", "desired_state"}, + }, + }, + }) +} + func testAccCloudTasksQueue_basic(name string) string { return fmt.Sprintf(` resource "google_cloud_tasks_queue" "default" { @@ -192,6 +224,7 @@ func testAccCloudTasksQueue_full(name string) string { resource "google_cloud_tasks_queue" "default" { name = "%s" location = "us-central1" + desired_state = "RUNNING" app_engine_routing_override { service = "worker" @@ -378,3 +411,36 @@ resource "google_service_account" "test" { `, name, serviceAccountID) } + +func testAccCloudTasksQueue_paused(name string) string { + return fmt.Sprintf(` +resource "google_cloud_tasks_queue" "default" { + name = "%s" + location = "us-central1" + desired_state = "PAUSED" + + app_engine_routing_override { + service = "main" + version = "2.0" + instance = "beta" + } + + rate_limits { + max_concurrent_dispatches = 4 + max_dispatches_per_second = 3 + } + + retry_config { + max_attempts = 6 + max_retry_duration = "5s" + max_backoff = "4s" + min_backoff = "3s" + max_doublings = 2 + } + + stackdriver_logging_config { + sampling_ratio = 0.1 + } +} +`, name) +} From 1de36cd16aac7ee3c96fd95f0d309f58c10a0681 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 20 Aug 2025 09:25:49 -0700 Subject: [PATCH 031/201] tgc-revival: add google_backup_dr_backup_vault and google_backup_dr_backup_plan (#14907) --- mmv1/products/backupdr/BackupPlan.yaml | 3 +++ mmv1/products/backupdr/BackupVault.yaml | 4 ++++ .../decoders/backup_dr_backup_plan.go.tmpl | 20 +++++++++++++++++++ 3 files changed, 27 insertions(+) create mode 100644 mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index c3bc7c401f27..c60029f8850a 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -29,6 +29,9 @@ timeouts: insert_minutes: 60 update_minutes: 60 delete_minutes: 60 +custom_code: + tgc_decoder: 'templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'backup_dr_backup_plan_simple' primary_resource_id: 'my-backup-plan-1' diff --git a/mmv1/products/backupdr/BackupVault.yaml b/mmv1/products/backupdr/BackupVault.yaml index e9c4cc5efa4b..0caeb7dc6f73 100644 --- a/mmv1/products/backupdr/BackupVault.yaml +++ b/mmv1/products/backupdr/BackupVault.yaml @@ -39,6 +39,7 @@ async: resource_inside_response: true custom_code: pre_delete: 'templates/terraform/pre_delete/backup_dr_backup_vault.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'backup_dr_backup_vault_full' primary_resource_id: 'backup-vault-test' @@ -113,6 +114,7 @@ properties: - name: 'labels' type: KeyValueLabels description: "Optional. Resource labels to represent user provided metadata. " + is_missing_in_cai: true - name: 'createTime' type: String description: 'Output only. The time when the instance was created. ' @@ -167,6 +169,7 @@ properties: type: KeyValueAnnotations description: "Optional. User annotations. See https://google.aip.dev/128#annotations\nStores small amounts of arbitrary data. " + is_missing_in_cai: true - name: 'accessRestriction' type: Enum description: | @@ -182,6 +185,7 @@ properties: - name: 'backupRetentionInheritance' type: Enum ignore_read: true + is_missing_in_cai: true description: | How a backup's enforced retention end time is inherited. Default value is `INHERIT_VAULT_RETENTION` if not provided during creation. enum_values: diff --git a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl new file mode 100644 index 000000000000..996b7f1a1cbd --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl @@ -0,0 +1,20 @@ +// startHourOfDay is missing in CAI, but has default value 0 in API object +if rules, ok := res["backupRules"].([]interface{}); ok { + for _, raw := range rules { + if rule, ok := raw.(map[string]interface{}); ok { + if raw, ok := rule["standardSchedule"]; ok { + if ss, ok := raw.(map[string]interface{}); ok { + if raw, ok := ss["backupWindow"]; ok { + if bw, ok := raw.(map[string]interface{}); ok { + if _, ok := ss["startHourOfDay"]; !ok { + bw["startHourOfDay"] = 0 + } + } + } + } + } + } + } +} + +return res, nil \ No newline at end of file From 0dd2f1cb08b636481946d17d431ef8f3815764f0 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 20 Aug 2025 18:29:34 +0200 Subject: [PATCH 032/201] write-only: use `CamelizeProperty` for the expander property variable name instead of `ApiName` (#14906) --- mmv1/api/type.go | 4 +++ .../interconnect_attachment.go.tmpl | 2 +- .../terraform/post_create/labels.tmpl | 6 ++-- mmv1/templates/terraform/resource.go.tmpl | 32 +++++++++---------- mmv1/templates/tgc/resource_converter.go.tmpl | 10 +++--- .../tfplan2cai/resource_converter.go.tmpl | 10 +++--- 6 files changed, 34 insertions(+), 30 deletions(-) diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 078775772585..3e5fad14352e 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -508,6 +508,10 @@ func (t Type) TitlelizeProperty() string { return google.Camelize(t.Name, "upper") } +func (t Type) CamelizeProperty() string { + return google.Camelize(t.Name, "lower") +} + // If the Prefix field is already set, returns the value. // Otherwise, set the Prefix field and returns the value. func (t *Type) GetPrefix() string { diff --git a/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl b/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl index 6b3b0459d63c..34811f75dd02 100644 --- a/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl +++ b/mmv1/templates/terraform/post_create/interconnect_attachment.go.tmpl @@ -1,6 +1,6 @@ {{- if $.HasLabelsField }} -if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) { +if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, effectiveLabelsProp)) { labels := d.Get("labels") terraformLables := d.Get("terraform_labels") diff --git a/mmv1/templates/terraform/post_create/labels.tmpl b/mmv1/templates/terraform/post_create/labels.tmpl index 427d911cee87..907acab5277f 100644 --- a/mmv1/templates/terraform/post_create/labels.tmpl +++ b/mmv1/templates/terraform/post_create/labels.tmpl @@ -1,6 +1,6 @@ {{- if $.HasLabelsField }} -if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) { - labels := d.Get("labels") +if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, effectiveLabelsProp)) { + userLabels := d.Get("labels") terraformLables := d.Get("terraform_labels") // Labels cannot be set in a create. We'll have to set them here. @@ -50,7 +50,7 @@ if v, ok := d.GetOkExists("effective_labels"); !tpgresource.IsEmptyValue(reflect } // Set back the labels field, as it is needed to decide the value of "labels" in the state in the read function. - if err := d.Set("labels", labels); err != nil { + if err := d.Set("labels", userLabels); err != nil { return fmt.Errorf("Error setting back labels: %s", err) } diff --git a/mmv1/templates/terraform/resource.go.tmpl b/mmv1/templates/terraform/resource.go.tmpl index f209f8c9cbfe..6bed9ba5b917 100644 --- a/mmv1/templates/terraform/resource.go.tmpl +++ b/mmv1/templates/terraform/resource.go.tmpl @@ -186,19 +186,19 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} - {{ $prop.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"){{ end }}, d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{ else }}d.Get("{{ underscore $prop.Name }}"){{ end }}, d, config) if err != nil { return err -{{- if $prop.SendEmptyValue -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { -{{- else if $prop.FlattenObject -}} - } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) { -{{- else -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { -{{- end}} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + {{- if $prop.SendEmptyValue -}} + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { + {{- else if $prop.FlattenObject -}} + } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) { + {{- else -}} + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { + {{- end}} + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } -{{- end}} + {{- end}} {{if $.CustomCode.Encoder -}} obj, err = resource{{ $.ResourceName -}}Encoder(d, meta, obj) @@ -723,17 +723,17 @@ func resource{{ $.ResourceName -}}Update(d *schema.ResourceData, meta interface{ obj := make(map[string]interface{}) {{- range $prop := $.UpdateBodyProperties }} {{/* flattened $s won't have something stored in state so instead nil is passed to the next expander. */}} - {{- $prop.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{else}}d.Get("{{underscore $prop.Name}}"){{ end }}, d, config) + {{- $prop.CamelizeProperty -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}({{ if $prop.FlattenObject }}nil{{else}}d.Get("{{underscore $prop.Name}}"){{ end }}, d, config) if err != nil { return err {{- if $prop.SendEmptyValue -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { {{- else if $prop.FlattenObject -}} - } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) { + } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) { {{- else -}} - } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { + } else if v, ok := d.GetOkExists("{{ underscore $prop.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { {{- end}} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } {{- end}} @@ -1276,4 +1276,4 @@ func resource{{ $.ResourceName -}}PostCreateSetComputedFields(d *schema.Resource {{- end }}{{/* range */}} return nil } -{{- end }} \ No newline at end of file +{{- end }} diff --git a/mmv1/templates/tgc/resource_converter.go.tmpl b/mmv1/templates/tgc/resource_converter.go.tmpl index 09087598c84d..a5fd4ec23e0b 100644 --- a/mmv1/templates/tgc/resource_converter.go.tmpl +++ b/mmv1/templates/tgc/resource_converter.go.tmpl @@ -81,18 +81,18 @@ func Get{{ $.ResourceName -}}ApiObject(d tpgresource.TerraformResourceData, conf obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} {{- if $prop.FlattenObject }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) {{- else }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) {{- end}} if err != nil { return nil, err {{- if not $prop.SendEmptyValue }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { {{- else }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { {{- end }} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } {{- end}} diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index be1a1e4e3eef..58b3ee417f09 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -84,18 +84,18 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf obj := make(map[string]interface{}) {{- range $prop := $.SettableProperties }} {{- if $prop.FlattenObject }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(nil, d, config) {{- else }} - {{ $prop.ApiName -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) + {{ $prop.CamelizeProperty -}}Prop, err := expand{{ $.ResourceName -}}{{$prop.TitlelizeProperty}}(d.Get("{{underscore $prop.Name}}"), d, config) {{- end}} if err != nil { return nil, err {{- if and (not $prop.SendEmptyValue) (not $prop.TGCSendEmptyValue) }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.ApiName -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop)) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); !tpgresource.IsEmptyValue(reflect.ValueOf({{ $prop.CamelizeProperty -}}Prop)) && (ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop)) { {{- else }} - } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.ApiName -}}Prop) { + } else if v, ok := d.GetOkExists("{{underscore $prop.Name}}"); ok || !reflect.DeepEqual(v, {{ $prop.CamelizeProperty -}}Prop) { {{- end }} - obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + obj["{{ $prop.ApiName -}}"] = {{ $prop.CamelizeProperty -}}Prop } {{- end}} From de01f82e8550fee15350f4910de647a8b01e58e4 Mon Sep 17 00:00:00 2001 From: karolgorc Date: Wed, 20 Aug 2025 18:33:00 +0200 Subject: [PATCH 033/201] Remove hardcoded values that are handled by the API (#11423) Co-authored-by: Cameron Thornton Co-authored-by: Nick Elliot --- .../resource_compute_instance_template.go.tmpl | 7 ------- ...source_compute_instance_template_internal_test.go | 11 ++++++----- ...te_region_instance_template_internal_test.go.tmpl | 9 +++++---- .../docs/guides/version_7_upgrade.html.markdown | 12 ++++++++++++ 4 files changed, 23 insertions(+), 16 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index 88d85a142384..ba87f811432b 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -1376,9 +1376,6 @@ func buildDisks(d *schema.ResourceData, config *transport_tpg.Config) ([]*comput // Build the disk var disk compute.AttachedDisk - disk.Type = "PERSISTENT" - disk.Mode = "READ_WRITE" - disk.Interface = "SCSI" disk.Boot = i == 0 disk.AutoDelete = d.Get(prefix + ".auto_delete").(bool) @@ -1833,10 +1830,6 @@ func reorderDisks(configDisks []interface{}, apiDisks []map[string]interface{}) disksByDeviceName[v.(string)] = i } else if v := disk["type"]; v.(string) == "SCRATCH" { iface := disk["interface"].(string) - if iface == "" { - // apply-time default - iface = "SCSI" - } scratchDisksByInterface[iface] = append(scratchDisksByInterface[iface], i) } else if v := disk["source"]; v.(string) != "" { attachedDisksBySource[v.(string)] = i diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go index 1ff79ef03e2a..f917b6b601e9 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_internal_test.go @@ -19,8 +19,9 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { cDeviceName := map[string]interface{}{ "device_name": "disk-1", } - cScratch := map[string]interface{}{ - "type": "SCRATCH", + cScratchScsi := map[string]interface{}{ + "type": "SCRATCH", + "interface": "SCSI", } cSource := map[string]interface{}{ "source": "disk-source", @@ -78,7 +79,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, @@ -89,7 +90,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, @@ -100,7 +101,7 @@ func TestComputeInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cFallThrough, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl index a643e4719836..bb235b036588 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_internal_test.go.tmpl @@ -22,8 +22,9 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { cDeviceName := map[string]interface{}{ "device_name": "disk-1", } - cScratch := map[string]interface{}{ + cScratchScsi := map[string]interface{}{ "type": "SCRATCH", + "interface": "SCSI", } cSource := map[string]interface{}{ "source": "disk-source", @@ -81,7 +82,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aSource, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aSource, aScratchNvme, @@ -92,7 +93,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aNoMatch, aScratchNvme, aScratchScsi, aFallThrough, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aScratchNvme, aNoMatch, @@ -103,7 +104,7 @@ func TestComputeRegionInstanceTemplate_reorderDisks(t *testing.T) { aBoot, aScratchNvme, aFallThrough, aSource, aScratchScsi, aFallThrough2, aDeviceName, }, ConfigDisks: []interface{}{ - cBoot, cFallThrough, cDeviceName, cScratch, cFallThrough, cSource, cScratchNvme, + cBoot, cFallThrough, cDeviceName, cScratchScsi, cFallThrough, cSource, cScratchNvme, }, ExpectedResult: []map[string]interface{}{ aBoot, aFallThrough, aDeviceName, aScratchScsi, aFallThrough2, aSource, aScratchNvme, diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index a2d922354caa..3d1477190388 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -161,6 +161,18 @@ Use `google_beyondcorp_security_gateway_application` instead. `enable_flow_logs` has been removed in favor of `log_config`. +## Resource: `google_compute_instance_template` + +### The resource will no longer use hardcoded values + +`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. + +## Resource: `google_compute_region_instance_template` + +### The resource will no longer use hardcoded values + +`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. + ## Resource: `google_notebooks_location` is now removed This resource is not functional. From 0c59673e59a77f50d8cd9b5fd7fb9a7c952917d2 Mon Sep 17 00:00:00 2001 From: Stephane Charite Date: Wed, 20 Aug 2025 10:20:47 -0700 Subject: [PATCH 034/201] Fix Lustre timeout failures (#14878) --- mmv1/products/lustre/Instance.yaml | 2 +- .../services/lustre/data_source_lustre_instance_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/lustre/Instance.yaml b/mmv1/products/lustre/Instance.yaml index 69e865e18e68..5d12dbee9ad7 100644 --- a/mmv1/products/lustre/Instance.yaml +++ b/mmv1/products/lustre/Instance.yaml @@ -44,7 +44,7 @@ autogen_async: true async: operation: timeouts: - insert_minutes: 20 + insert_minutes: 120 update_minutes: 20 delete_minutes: 20 base_url: '{{op_id}}' diff --git a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go index fd4618b72873..e369beca67db 100644 --- a/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go +++ b/mmv1/third_party/terraform/services/lustre/data_source_lustre_instance_test.go @@ -44,7 +44,7 @@ resource "google_lustre_instance" "instance" { capacity_gib = 18000 network = data.google_compute_network.lustre-network.id gke_support_enabled = false - per_unit_storage_throughput = 1000 + per_unit_storage_throughput = 1000 } // This example assumes this network already exists. From 3d112f0851d25e313b084896b893140815287dc1 Mon Sep 17 00:00:00 2001 From: William Yardley Date: Wed, 20 Aug 2025 10:29:28 -0700 Subject: [PATCH 035/201] container: Removed instance type from resourceManagerTags test (#14834) --- .../container/resource_container_node_pool_test.go.tmpl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index d03b5db01290..bcebd1f3dc62 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -5559,7 +5559,6 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { - machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 resource_manager_tags = { @@ -5606,12 +5605,11 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { - machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 resource_manager_tags = { "%{pid}/%{tagKey1}" = "%{tagValue1}" - "%{pid}/%{tagKey2}" = "%{tagValue2}" + "%{pid}/%{tagKey2}" = "%{tagValue2}" } } } @@ -5654,7 +5652,6 @@ resource "google_container_node_pool" "primary_nodes" { node_count = 1 node_config { - machine_type = "n1-standard-1" // can't be e2 because of local-ssd disk_size_gb = 15 } } From 343c1d7f9762d72f2c62b900706abf1a1f624ffd Mon Sep 17 00:00:00 2001 From: Max Portocarrero CI&T <105444618+maxi-cit@users.noreply.github.com> Date: Wed, 20 Aug 2025 14:53:35 -0500 Subject: [PATCH 036/201] updated organization security policies (#14914) --- .../compute/OrganizationSecurityPolicy.yaml | 17 +++++++--- ..._organization_security_policy_test.go.tmpl | 34 +++++++++++++++++++ 2 files changed, 46 insertions(+), 5 deletions(-) diff --git a/mmv1/products/compute/OrganizationSecurityPolicy.yaml b/mmv1/products/compute/OrganizationSecurityPolicy.yaml index 966188adf897..f3b0a7039a4e 100644 --- a/mmv1/products/compute/OrganizationSecurityPolicy.yaml +++ b/mmv1/products/compute/OrganizationSecurityPolicy.yaml @@ -58,15 +58,20 @@ properties: - name: 'displayName' type: String description: | - A textual name of the security policy. + User-provided name of the organization security policy. The name should be unique in the organization in which the security policy is created. This should only be used when SecurityPolicyType is FIREWALL. min_version: 'beta' - required: true immutable: true - name: 'description' type: String description: | A textual description for the organization security policy. min_version: 'beta' + - name: 'shortName' + type: String + description: | + User-provided name of the organization security policy. The name should be unique in the organization in which the security policy is created. This should only be used when SecurityPolicyType is CLOUD_ARMOR. + min_version: 'beta' + immutable: true - name: 'fingerprint' type: Fingerprint description: | @@ -84,11 +89,13 @@ properties: - name: 'type' type: Enum description: | - The type indicates the intended use of the security policy. - For organization security policies, the only supported type - is "FIREWALL". + The type indicates the intended use of the security policy. This field can be set only at resource creation time. min_version: 'beta' immutable: true default_value: "FIREWALL" enum_values: - 'FIREWALL' + - 'CLOUD_ARMOR' + - 'CLOUD_ARMOR_EDGE' + - 'CLOUD_ARMOR_INTERNAL_SERVICE' + - 'CLOUD_ARMOR_NETWORK' diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl index cdfe1e9894a7..56bbcb25e466 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_organization_security_policy_test.go.tmpl @@ -42,6 +42,29 @@ func TestAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyUpdateEx }) } +func TestAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(t *testing.T) { + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeOrganizationSecurityPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(context), + }, + { + ResourceName: "google_compute_organization_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyPreUpdateExample(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_compute_organization_security_policy" "policy" { @@ -60,4 +83,15 @@ resource "google_compute_organization_security_policy" "policy" { } `, context) } + +func testAccComputeOrganizationSecurityPolicy_organizationSecurityPolicyShortName(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_organization_security_policy" "policy" { + short_name = "tf-test%{random_suffix}" + parent = "organizations/%{org_id}" + description = "org security policy description" + type = "CLOUD_ARMOR" +} +`, context) +} {{- end }} From 6374aaf29951e0e3f9a75254ae264e471bd5934d Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Wed, 20 Aug 2025 13:34:54 -0700 Subject: [PATCH 037/201] Revert " provider: eliminated the need to manually add `*_wo` and `*_wo_version` for write-only properties" (#14917) --- docs/content/reference/field.md | 9 +- mmv1/api/resource.go | 86 +---- mmv1/api/resource_test.go | 330 ------------------ mmv1/api/type.go | 49 +-- mmv1/api/type_test.go | 2 +- mmv1/main.go | 4 +- .../products/bigquerydatatransfer/Config.yaml | 22 +- .../monitoring/UptimeCheckConfig.yaml | 21 +- .../products/secretmanager/SecretVersion.yaml | 16 + .../terraform/flatten_property_method.go.tmpl | 2 +- 10 files changed, 67 insertions(+), 474 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index c07098e02f0c..1f24ec8abc06 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -108,11 +108,10 @@ sensitive: true ``` ### `write_only` -Set to true to enable write-only functionality for this field. -If true, the write-only fields will be automatically generated by the code generator (`_wo` and `_wo_version`). -When the write-only variant of a field is used, it means that its value will be obscured in Terraform output as well as not be stored in state. -This field is meant to replace `sensitive` as it doesn't store the value in state. -See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) for more information. +If true, the field is considered "write-only", which means that its value will +be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. +See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) +for more information. Write-only fields are only supported in Terraform v1.11+. Because the provider supports earlier Terraform versions, write only fields must be paired with (mutually exclusive) `sensitive` fields covering the same functionality for compatibility with those older versions. This field cannot be used in conjuction with `immutable` or `sensitive`. diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 95c10c201f4f..afa8b929d722 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -737,94 +737,14 @@ func (r Resource) GetIdentity() []*Type { }) } -func buildWriteOnlyField(name string, versionFieldName string, originalField *Type, originalFieldLineage string) *Type { - description := fmt.Sprintf("%s Note: This property is write-only and will not be read from the API. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", originalField.Description) - fieldPathOriginalField := originalFieldLineage - fieldPathCurrentField := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(name)) - requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(versionFieldName)) - - apiName := originalField.ApiName - if apiName == "" { - apiName = originalField.Name - } - - options := []func(*Type){ - propertyWithType("String"), - propertyWithRequired(false), - propertyWithDescription(description), - propertyWithWriteOnly(true), - propertyWithApiName(apiName), - propertyWithIgnoreRead(true), - propertyWithRequiredWith([]string{requiredWith}), - } - - if originalField.Required { - exactlyOneOf := append(originalField.ExactlyOneOf, fieldPathOriginalField, fieldPathCurrentField) - options = append(options, propertyWithExactlyOneOf(exactlyOneOf)) - } else { - conflicts := append(originalField.Conflicts, fieldPathOriginalField) - options = append(options, propertyWithConflicts(conflicts)) - } - - if len(originalField.AtLeastOneOf) > 0 { - atLeastOneOf := append(originalField.AtLeastOneOf, fieldPathCurrentField) - options = append(options, propertyWithAtLeastOneOf(atLeastOneOf)) - } - - return NewProperty(name, originalField.ApiName, options) -} - -func buildWriteOnlyVersionField(name string, originalField *Type, writeOnlyField *Type, originalFieldLineage string) *Type { - description := fmt.Sprintf("Triggers update of %s write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", google.Underscore(writeOnlyField.Name)) - requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(writeOnlyField.Name)) - - options := []func(*Type){ - propertyWithType("String"), - propertyWithImmutable(originalField.Immutable), - propertyWithDescription(description), - propertyWithRequiredWith([]string{requiredWith}), - propertyWithClientSide(true), - } - - return NewProperty(name, name, options) -} - -func (r *Resource) addWriteOnlyFields(props []*Type, propWithWoConfigured *Type, propWithWoConfiguredLineagePath string) []*Type { - if len(propWithWoConfigured.RequiredWith) > 0 { - log.Fatalf("WriteOnly property '%s' in resource '%s' cannot have RequiredWith set. This combination is not supported.", propWithWoConfigured.Name, r.Name) - } - woFieldName := fmt.Sprintf("%sWo", propWithWoConfigured.Name) - woVersionFieldName := fmt.Sprintf("%sVersion", woFieldName) - writeOnlyField := buildWriteOnlyField(woFieldName, woVersionFieldName, propWithWoConfigured, propWithWoConfiguredLineagePath) - writeOnlyVersionField := buildWriteOnlyVersionField(woVersionFieldName, propWithWoConfigured, writeOnlyField, propWithWoConfiguredLineagePath) - props = append(props, writeOnlyField, writeOnlyVersionField) - return props -} - -func (r *Resource) buildCurrentPropLineage(p *Type, lineage string) string { - underscoreName := google.Underscore(p.Name) - if lineage == "" { - return underscoreName - } - return fmt.Sprintf("%s.0.%s", lineage, underscoreName) -} - -// AddExtraFields processes properties and adds supplementary fields based on property types. -// It handles write-only properties, labels, and annotations. -func (r *Resource) AddExtraFields(props []*Type, parent *Type, lineage string) []*Type { +func (r *Resource) AddLabelsRelatedFields(props []*Type, parent *Type) []*Type { for _, p := range props { - currentPropLineage := r.buildCurrentPropLineage(p, lineage) - if p.WriteOnly && !strings.HasSuffix(p.Name, "Wo") { - props = r.addWriteOnlyFields(props, p, currentPropLineage) - p.WriteOnly = false - p.Required = false - } if p.IsA("KeyValueLabels") { props = r.addLabelsFields(props, parent, p) } else if p.IsA("KeyValueAnnotations") { props = r.addAnnotationsFields(props, parent, p) } else if p.IsA("NestedObject") && len(p.AllProperties()) > 0 { - p.Properties = r.AddExtraFields(p.AllProperties(), p, currentPropLineage) + p.Properties = r.AddLabelsRelatedFields(p.AllProperties(), p) } } return props @@ -843,7 +763,6 @@ func (r *Resource) addLabelsFields(props []*Type, parent *Type, labels *Type) [] terraformLabelsField := buildTerraformLabelsField("labels", parent, labels) effectiveLabelsField := buildEffectiveLabelsField("labels", labels) - props = append(props, terraformLabelsField, effectiveLabelsField) // The effective_labels field is used to write to API, instead of the labels field. @@ -880,7 +799,6 @@ func (r *Resource) addAnnotationsFields(props []*Type, parent *Type, annotations } effectiveAnnotationsField := buildEffectiveLabelsField("annotations", annotations) - props = append(props, effectiveAnnotationsField) return props } diff --git a/mmv1/api/resource_test.go b/mmv1/api/resource_test.go index 46a34b8b8036..ad7dd327b288 100644 --- a/mmv1/api/resource_test.go +++ b/mmv1/api/resource_test.go @@ -4,7 +4,6 @@ import ( "os" "path/filepath" "reflect" - "slices" "strings" "testing" @@ -504,332 +503,3 @@ func TestHasPostCreateComputedFields(t *testing.T) { }) } } - -func TestResourceAddExtraFields(t *testing.T) { - t.Parallel() - - createTestResource := func(name string) *Resource { - return &Resource{ - Name: name, - ProductMetadata: &Product{ - Name: "testproduct", - }, - } - } - - createTestType := func(name, typeStr string, options ...func(*Type)) *Type { - t := &Type{ - Name: name, - Type: typeStr, - } - for _, option := range options { - option(t) - } - return t - } - - withWriteOnly := func(writeOnly bool) func(*Type) { - return func(t *Type) { t.WriteOnly = writeOnly } - } - withRequired := func(required bool) func(*Type) { - return func(t *Type) { t.Required = required } - } - withDescription := func(desc string) func(*Type) { - return func(t *Type) { t.Description = desc } - } - withProperties := func(props []*Type) func(*Type) { - return func(t *Type) { t.Properties = props } - } - - t.Run("WriteOnly property adds companion fields", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - writeOnlyProp := createTestType("password", "String", - withWriteOnly(true), - withRequired(true), - withDescription("A password field"), - ) - - props := []*Type{writeOnlyProp} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 3 { - t.Errorf("Expected 3 properties after adding WriteOnly fields, got %d", len(result)) - } - - if writeOnlyProp.WriteOnly { - t.Error("Original WriteOnly property should have WriteOnly set to false after processing") - } - if writeOnlyProp.Required { - t.Error("Original WriteOnly property should have Required set to false after processing") - } - - var foundWoField, foundVersionField bool - for _, prop := range result { - if prop.Name == "passwordWo" { - foundWoField = true - if !prop.WriteOnly { - t.Error("passwordWo field should have WriteOnly=true") - } - } - if prop.Name == "passwordWoVersion" { - foundVersionField = true - if !prop.ClientSide { - t.Error("passwordWoVersion field should have ClientSide=true") - } - } - } - - if !foundWoField { - t.Error("Expected to find passwordWo field") - } - if !foundVersionField { - t.Error("Expected to find passwordWoVersion field") - } - }) - - t.Run("KeyValueLabels property adds terraform and effective labels", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - Description: "Resource labels", - } - - props := []*Type{labelsType} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 3 { - t.Errorf("Expected 3 properties after adding labels fields, got %d", len(result)) - } - - if !labelsType.IgnoreWrite { - t.Error("Original labels field should have IgnoreWrite=true after processing") - } - if !strings.Contains(labelsType.Description, "**Note**") { - t.Error("Original labels field description should contain note after processing") - } - - var foundTerraformLabels, foundEffectiveLabels bool - for _, prop := range result { - if prop.Name == "terraformLabels" { - foundTerraformLabels = true - if prop.Type != "KeyValueTerraformLabels" { - t.Errorf("terraformLabels should have type KeyValueTerraformLabels, got %s", prop.Type) - } - } - if prop.Name == "effectiveLabels" { - foundEffectiveLabels = true - if prop.Type != "KeyValueEffectiveLabels" { - t.Errorf("effectiveLabels should have type KeyValueEffectiveLabels, got %s", prop.Type) - } - } - } - - if !foundTerraformLabels { - t.Error("Expected to find terraformLabels field") - } - if !foundEffectiveLabels { - t.Error("Expected to find effectiveLabels field") - } - - expectedDiff := "tpgresource.SetLabelsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueLabels with ExcludeAttributionLabel adds different CustomDiff", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - resource.ExcludeAttributionLabel = true - - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - } - - props := []*Type{labelsType} - resource.AddExtraFields(props, nil, "") - - expectedDiff := "tpgresource.SetLabelsDiffWithoutAttributionLabel" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueLabels with metadata parent adds metadata CustomDiff", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - parent := &Type{Name: "metadata"} - - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - } - - props := []*Type{labelsType} - resource.AddExtraFields(props, parent, "") - - expectedDiff := "tpgresource.SetMetadataLabelsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueAnnotations property adds effective annotations", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - annotationsType := &Type{ - Name: "annotations", - Type: "KeyValueAnnotations", - Description: "Resource annotations", - } - - props := []*Type{annotationsType} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 2 { - t.Errorf("Expected 2 properties after adding annotations fields, got %d", len(result)) - } - - if !annotationsType.IgnoreWrite { - t.Error("Original annotations field should have IgnoreWrite=true after processing") - } - - var foundEffectiveAnnotations bool - for _, prop := range result { - if prop.Name == "effectiveAnnotations" { - foundEffectiveAnnotations = true - if prop.Type != "KeyValueEffectiveLabels" { - t.Errorf("effectiveAnnotations should have type KeyValueEffectiveLabels, got %s", prop.Type) - } - } - } - - if !foundEffectiveAnnotations { - t.Error("Expected to find effectiveAnnotations field") - } - - expectedDiff := "tpgresource.SetAnnotationsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("NestedObject with properties processes recursively", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - - nestedWriteOnly := createTestType("nestedPassword", "String", withWriteOnly(true)) - nestedObject := createTestType("config", "NestedObject", withProperties([]*Type{nestedWriteOnly})) - - props := []*Type{nestedObject} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 top-level property, got %d", len(result)) - } - - if len(nestedObject.Properties) != 3 { - t.Errorf("Expected 3 nested properties after recursive processing, got %d", len(nestedObject.Properties)) - } - - if nestedWriteOnly.WriteOnly { - t.Error("Nested WriteOnly property should have WriteOnly=false after processing") - } - }) - - t.Run("Empty NestedObject properties are not processed", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - emptyNestedObject := createTestType("config", "NestedObject", withProperties([]*Type{})) - - props := []*Type{emptyNestedObject} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property, got %d", len(result)) - } - if len(emptyNestedObject.Properties) != 0 { - t.Errorf("Expected 0 nested properties, got %d", len(emptyNestedObject.Properties)) - } - }) - - t.Run("WriteOnly property already ending with Wo is skipped", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - woProperty := createTestType("passwordWo", "String", withWriteOnly(true)) - - props := []*Type{woProperty} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property for Wo-suffixed field, got %d", len(result)) - } - - if !woProperty.WriteOnly { - t.Error("Wo-suffixed property should remain WriteOnly=true") - } - }) - - t.Run("Regular properties are passed through unchanged", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - regularProp := createTestType("name", "String", withRequired(true)) - - props := []*Type{regularProp} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property for regular field, got %d", len(result)) - } - - if result[0] != regularProp { - t.Error("Regular property should be passed through unchanged") - } - if !regularProp.Required { - t.Error("Regular property Required should be unchanged") - } - }) - - t.Run("Multiple property types processed correctly", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - - regularProp := createTestType("name", "String") - writeOnlyProp := createTestType("password", "String", withWriteOnly(true)) - labelsType := &Type{Name: "labels", Type: "KeyValueLabels"} - - props := []*Type{regularProp, writeOnlyProp, labelsType} - result := resource.AddExtraFields(props, nil, "") - - // Should have: name + password + passwordWo + passwordWoVersion + labels + terraformLabels + effectiveLabels = 7 - if len(result) != 7 { - t.Errorf("Expected 7 properties total, got %d", len(result)) - } - - names := make(map[string]bool) - for _, prop := range result { - names[prop.Name] = true - } - - expectedNames := []string{"name", "password", "passwordWo", "passwordWoVersion", "labels", "terraformLabels", "effectiveLabels"} - for _, expected := range expectedNames { - if !names[expected] { - t.Errorf("Expected to find property named %s", expected) - } - } - }) -} diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 3e5fad14352e..ba180ceafd5f 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -628,6 +628,7 @@ func (t Type) ExactlyOneOfList() []string { if t.ResourceMetadata == nil { return []string{} } + return t.ExactlyOneOf } @@ -1017,54 +1018,6 @@ func propertyWithIgnoreWrite(ignoreWrite bool) func(*Type) { } } -func propertyWithRequired(required bool) func(*Type) { - return func(p *Type) { - p.Required = required - } -} - -func propertyWithWriteOnly(writeOnly bool) func(*Type) { - return func(p *Type) { - p.WriteOnly = writeOnly - } -} - -func propertyWithIgnoreRead(ignoreRead bool) func(*Type) { - return func(p *Type) { - p.IgnoreRead = ignoreRead - } -} - -func propertyWithConflicts(conflicts []string) func(*Type) { - return func(p *Type) { - p.Conflicts = conflicts - } -} - -func propertyWithRequiredWith(requiredWith []string) func(*Type) { - return func(p *Type) { - p.RequiredWith = requiredWith - } -} - -func propertyWithExactlyOneOf(exactlyOneOf []string) func(*Type) { - return func(p *Type) { - p.ExactlyOneOf = exactlyOneOf - } -} - -func propertyWithAtLeastOneOf(atLeastOneOf []string) func(*Type) { - return func(p *Type) { - p.AtLeastOneOf = atLeastOneOf - } -} - -func propertyWithApiName(apiName string) func(*Type) { - return func(p *Type) { - p.ApiName = apiName - } -} - func (t *Type) validateLabelsField() { productName := t.ResourceMetadata.ProductMetadata.Name resourceName := t.ResourceMetadata.Name diff --git a/mmv1/api/type_test.go b/mmv1/api/type_test.go index f6a738d248f2..3d46d120a31f 100644 --- a/mmv1/api/type_test.go +++ b/mmv1/api/type_test.go @@ -361,7 +361,7 @@ func TestProviderOnly(t *testing.T) { }, }, } - labeled.Properties = labeled.AddExtraFields(labeled.PropertiesWithExcluded(), nil, "") + labeled.Properties = labeled.AddLabelsRelatedFields(labeled.PropertiesWithExcluded(), nil) labeled.SetDefault(nil) cases := []struct { diff --git a/mmv1/main.go b/mmv1/main.go index 9e7c0fc88612..ef62e7ba5f88 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -235,7 +235,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod resource.SourceYamlFile = resourceYamlPath resource.TargetVersionName = version - resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") + resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) @@ -268,7 +268,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod } resource.TargetVersionName = version - resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") + resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index b9bb9cf4b137..df88222fc7b7 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,10 +222,30 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: + - name: 'secretAccessKeyWoVersion' + type: Integer + url_param_only: true + required_with: + - 'sensitive_params.0.secretAccessKeyWo' + description: | + The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | The Secret Access Key of the AWS account transferring data from. sensitive: true + at_least_one_of: + - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secretAccessKeyWo' + conflicts: + - 'sensitive_params.0.secretAccessKeyWo' + - name: 'secretAccessKeyWo' # Wo is convention for write-only properties + type: String + description: | + The Secret Access Key of the AWS account transferring data from. write_only: true - required: true + at_least_one_of: + - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secretAccessKey' + conflicts: + - 'sensitive_params.0.secretAccessKey' diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 9affd60788a1..67f7fdd0e791 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -246,10 +246,27 @@ properties: - name: 'password' type: String description: The password to authenticate. - required: true - write_only: true + exactly_one_of: + - 'password' + - 'password_wo' sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' + - name: 'passwordWo' + type: String + description: The password to authenticate. + exactly_one_of: + - 'passwordWo' + - 'password' + required_with: + - 'http_check.0.auth_info.0.password_wo_version' + write_only: true + - name: 'passwordWoVersion' + type: String + immutable: true + ignore_read: true + description: The password write-only version. + required_with: + - 'http_check.0.auth_info.0.password_wo' - name: 'username' type: String description: The username to authenticate. diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index ac840f29e772..d3e0335ee2bd 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -160,6 +160,22 @@ properties: type: String description: The secret data. Must be no larger than 64KiB. api_name: data + conflicts: + - 'secretDataWo' immutable: true sensitive: true + - name: 'secretDataWo' + type: String + description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + api_name: data + required_with: + - 'SecretDataWoVersion' + conflicts: + - 'payload.0.secretData' write_only: true + - name: 'SecretDataWoVersion' + type: Integer + default_value: 0 + url_param_only: true + description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + immutable: true diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index 92387f432fff..cb0fbb7a76a8 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -18,7 +18,7 @@ {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - {{- if or (and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler)) $.ClientSide }} + {{- if and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler) }} return d.Get("{{ $.TerraformLineage }}") {{- else if $.IsA "NestedObject" }} if v == nil { From c657f7b664ca6176513609d7ce43998c3e4df33c Mon Sep 17 00:00:00 2001 From: Axel Kossek Date: Wed, 20 Aug 2025 23:21:12 +0200 Subject: [PATCH 038/201] Add resource_manager_tags support to Backend Service api (#14902) --- mmv1/products/compute/BackendService.yaml | 16 ++++++ ...ource_compute_backend_service_test.go.tmpl | 51 +++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 7ddc4ee360b4..351b20feb43d 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -1688,3 +1688,19 @@ properties: description: | A boolean flag enabling IP:PORT based dynamic forwarding. immutable: true + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the backend service. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl index c0e3c2dc9aac..5bb447cf1b70 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_service_test.go.tmpl @@ -10,6 +10,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeBackendService_basic(t *testing.T) { @@ -1163,6 +1164,35 @@ func TestAccComputeBackendService_withNetworkPassThroughLbTrafficPolicy(t *testi } {{- end }} +func TestAccComputeBackendService_resourceManagerTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-bs-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-bs-tagvalue", sharedTagkey, org) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendService_withTags(serviceName, checkName, tagKeyResult["name"], tagValueResult["name"]), + }, + { + ResourceName: "google_compute_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} + func testAccComputeBackendService_trafficDirectorBasic(serviceName, checkName string) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { @@ -2972,3 +3002,24 @@ resource "google_compute_health_check" "default" { `, namePrefix, spillover, ratio, namePrefix, namePrefix, namePrefix, namePrefix) } {{- end }} + +func testAccComputeBackendService_withTags(serviceName, checkName string, tagKey string, tagValue string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "%s" + health_checks = [google_compute_http_health_check.zero.self_link] + params { + resource_manager_tags = { + "%s" = "%s" + } + } +} + +resource "google_compute_http_health_check" "zero" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} +`, serviceName, tagKey, tagValue, checkName) +} From 776b3e465ee41cc613cf3c50e30c3defa138ca11 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 20 Aug 2025 14:34:33 -0700 Subject: [PATCH 039/201] tgc-revival: add TGCIgnoreTerraformEncoder and TGCIgnoreTerraformCustomFlatten (#14788) Co-authored-by: Thomas Rodgers --- mmv1/api/resource.go | 3 +++ mmv1/api/type.go | 7 ++++++ mmv1/products/alloydb/Cluster.yaml | 1 - mmv1/products/pubsub/Subscription.yaml | 2 +- mmv1/products/pubsub/Topic.yaml | 2 +- .../terraform/flatten_property_method.go.tmpl | 2 +- ...alloydb_cluster_input_user_flatten.go.tmpl | 24 ------------------- .../tgc_next/encoders/pubsub_add_name.go.tmpl | 10 -------- .../tfplan2cai/resource_converter.go.tmpl | 4 ++-- 9 files changed, 15 insertions(+), 40 deletions(-) delete mode 100644 mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl delete mode 100644 mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index afa8b929d722..194ae8ede6ad 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -379,6 +379,9 @@ type TGCResource struct { // and compute.googleapis.com/GlobalAddress has GlobalAddress for CaiResourceKind. // But they have the same api resource type: address CaiResourceKind string `yaml:"cai_resource_kind,omitempty"` + + // If true, the Terraform custom encoder is not applied during tfplan2cai + TGCIgnoreTerraformEncoder bool `yaml:"tgc_ignore_terraform_encoder,omitempty"` } func (r *Resource) UnmarshalYAML(unmarshal func(any) error) error { diff --git a/mmv1/api/type.go b/mmv1/api/type.go index ba180ceafd5f..2ab006fcda8f 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -322,6 +322,9 @@ type Type struct { // If a property is missing in CAI asset, use `is_missing_in_cai: true` // and `exclude_false_in_cai: true` is not needed ExcludeFalseInCai bool `yaml:"exclude_false_in_cai,omitempty"` + + // If true, the custom flatten function is not applied during cai2hcl + TGCIgnoreTerraformCustomFlatten bool `yaml:"tgc_ignore_terraform_custom_flatten,omitempty"` } const MAX_NAME = 20 @@ -1311,3 +1314,7 @@ func (t Type) TGCSendEmptyValue() bool { return false } + +func (t Type) ShouldIgnoreCustomFlatten() bool { + return t.ResourceMetadata.IsTgcCompiler() && (t.IgnoreRead || t.TGCIgnoreTerraformCustomFlatten) +} diff --git a/mmv1/products/alloydb/Cluster.yaml b/mmv1/products/alloydb/Cluster.yaml index 9a1341808086..0c5da97675cd 100644 --- a/mmv1/products/alloydb/Cluster.yaml +++ b/mmv1/products/alloydb/Cluster.yaml @@ -303,7 +303,6 @@ properties: Initial user to setup during cluster creation. ignore_read: true custom_flatten: 'templates/terraform/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl' - custom_tgc_flatten: 'templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl' properties: - name: 'user' type: String diff --git a/mmv1/products/pubsub/Subscription.yaml b/mmv1/products/pubsub/Subscription.yaml index a770ea6048b5..1624f57df84e 100644 --- a/mmv1/products/pubsub/Subscription.yaml +++ b/mmv1/products/pubsub/Subscription.yaml @@ -45,7 +45,7 @@ custom_code: constants: 'templates/terraform/constants/subscription.go.tmpl' encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_subscription.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/pubsub_add_name.go.tmpl' +tgc_ignore_terraform_encoder: true examples: - name: 'pubsub_subscription_push' primary_resource_id: 'example' diff --git a/mmv1/products/pubsub/Topic.yaml b/mmv1/products/pubsub/Topic.yaml index 9043692921cb..a47c9f935ae5 100644 --- a/mmv1/products/pubsub/Topic.yaml +++ b/mmv1/products/pubsub/Topic.yaml @@ -52,7 +52,7 @@ iam_policy: custom_code: encoder: 'templates/terraform/encoders/no_send_name.go.tmpl' update_encoder: 'templates/terraform/update_encoder/pubsub_topic.tmpl' - tgc_encoder: 'templates/tgc_next/encoders/pubsub_add_name.go.tmpl' +tgc_ignore_terraform_encoder: true error_retry_predicates: - 'transport_tpg.PubsubTopicProjectNotReady' include_in_tgc_next_DO_NOT_USE: true diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index cb0fbb7a76a8..7c24debc4096 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -14,7 +14,7 @@ limitations under the License. */ -}} {{- define "flattenPropertyMethod" }} {{- if $.WriteOnly }} -{{- else if $.CustomFlatten }} +{{- else if and $.CustomFlatten (not $.ShouldIgnoreCustomFlatten) }} {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl b/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl deleted file mode 100644 index fd462707e678..000000000000 --- a/mmv1/templates/tgc_next/custom_flatten/alloydb_cluster_input_user_flatten.go.tmpl +++ /dev/null @@ -1,24 +0,0 @@ -{{/* - The license inside this block applies to this file - Copyright 2025 Google Inc. - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ -}} -func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil || len(v.([]interface{})) == 0 { - return nil - } - - return []interface{}{ - map[string]interface{}{ - "user": d.Get("initial_user.0.user"), - "password": d.Get("initial_user.0.password"), - }, - } -} diff --git a/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl b/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl deleted file mode 100644 index 68fc32a4e971..000000000000 --- a/mmv1/templates/tgc_next/encoders/pubsub_add_name.go.tmpl +++ /dev/null @@ -1,10 +0,0 @@ -config := meta.(*transport_tpg.Config) - -nameProp, err := expandPubsubTopicName(d.Get("name"), d, config) -if err != nil { - return nil, err -} else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { - obj["name"] = nameProp -} - -return obj, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl index 58b3ee417f09..ad2153a20dca 100644 --- a/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/tfplan2cai/resource_converter.go.tmpl @@ -99,7 +99,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf } {{- end}} -{{ if $.CustomCode.Encoder -}} +{{ if and $.CustomCode.Encoder (not $.TGCIgnoreTerraformEncoder) -}} obj, err = resource{{ $.ResourceName -}}Encoder(d, config, obj) if err != nil { return nil, err @@ -113,7 +113,7 @@ func Get{{ $.ResourceName -}}CaiObject(d tpgresource.TerraformResourceData, conf {{- end}} } -{{if $.CustomCode.Encoder -}} +{{if and $.CustomCode.Encoder (not $.TGCIgnoreTerraformEncoder) -}} func resource{{ $.ResourceName -}}Encoder(d tpgresource.TerraformResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) { {{ $.CustomTemplate $.CustomCode.Encoder false -}} } From 239b775a87352d383b2bd52ffa80db90b38adc10 Mon Sep 17 00:00:00 2001 From: William Yardley Date: Wed, 20 Aug 2025 14:49:40 -0700 Subject: [PATCH 040/201] container: Used zonal cluster for network config test (#14847) --- .../resource_container_node_pool_test.go.tmpl | 83 ++++++++++--------- 1 file changed, 42 insertions(+), 41 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index bcebd1f3dc62..571f99164a7a 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -4100,7 +4100,8 @@ resource "google_compute_subnetwork" "container_subnetwork" { resource "google_container_cluster" "cluster" { name = "%s" - location = "us-central1" + # Zonal rather than regional to reduce setup time and node count per zone. + location = "us-central1-c" initial_node_count = 1 network = google_compute_network.container_network.name @@ -4110,82 +4111,82 @@ resource "google_container_cluster" "cluster" { services_secondary_range_name = google_compute_subnetwork.container_subnetwork.secondary_ip_range[1].range_name } release_channel { - channel = "RAPID" + channel = "RAPID" } deletion_protection = false } resource "google_container_node_pool" "with_manual_pod_cidr" { - name = "%s-manual" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-manual" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 network_config { create_pod_range = false - pod_range = google_compute_subnetwork.container_subnetwork.secondary_ip_range[2].range_name + pod_range = google_compute_subnetwork.container_subnetwork.secondary_ip_range[2].range_name } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_auto_pod_cidr" { - name = "%s-auto" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-auto" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 network_config { - create_pod_range = true - pod_range = "auto-pod-range" - pod_ipv4_cidr_block = "10.2.0.0/20" + create_pod_range = true + pod_range = "auto-pod-range" + pod_ipv4_cidr_block = "10.2.0.0/20" } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_pco_disabled" { - name = "%s-pco" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-pco" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 network_config { - pod_cidr_overprovision_config { - disabled = true - } + pod_cidr_overprovision_config { + disabled = true + } } node_config { - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } resource "google_container_node_pool" "with_tier1_net" { - name = "%s-tier1" - location = "us-central1" - cluster = google_container_cluster.cluster.name + name = "%s-tier1" + location = google_container_cluster.cluster.location + cluster = google_container_cluster.cluster.name node_count = 1 node_locations = [ - "us-central1-a", + "us-central1-c", ] network_config { - network_performance_config { - total_egress_bandwidth_tier = "%s" - } + network_performance_config { + total_egress_bandwidth_tier = "%s" + } } node_config { - machine_type = "n2-standard-32" - gvnic { - enabled = true - } - oauth_scopes = [ - "https://www.googleapis.com/auth/cloud-platform", - ] + machine_type = "n2-standard-32" + gvnic { + enabled = true + } + oauth_scopes = [ + "https://www.googleapis.com/auth/cloud-platform", + ] } } From f31b254e45eaa17fbab148846b4a88b660d7bfb7 Mon Sep 17 00:00:00 2001 From: wj-chen Date: Wed, 20 Aug 2025 14:51:03 -0700 Subject: [PATCH 041/201] Remove the default value of view.use_legacy_sql in google_bigquery_table (#14751) --- .../bigquery/resource_bigquery_table.go.tmpl | 24 +++++++++++-------- .../guides/version_7_upgrade.html.markdown | 6 +++++ .../docs/r/bigquery_table.html.markdown | 5 +++- 3 files changed, 24 insertions(+), 11 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index bebe08d56791..e6abc6db1021 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -14,6 +14,7 @@ import ( "golang.org/x/exp/slices" + "github.com/hashicorp/go-cty/cty" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" @@ -1051,13 +1052,13 @@ func ResourceBigQueryTable() *schema.Resource { }, // UseLegacySQL: [Optional] Specifies whether to use BigQuery's - // legacy SQL for this view. The default value is true. If set to - // false, the view will use BigQuery's standard SQL: + // legacy SQL for this view. If set to false, the view will use + // BigQuery's standard SQL: "use_legacy_sql": { Type: schema.TypeBool, Optional: true, - Default: true, - Description: `Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL`, + Computed: true, + Description: `Specifies whether to use BigQuery's legacy SQL for this view. If set to false, the view will use BigQuery's standard SQL`, }, }, }, @@ -1668,8 +1669,8 @@ func resourceTable(d *schema.ResourceData, meta interface{}) (*bigquery.Table, e }, } - if v, ok := d.GetOk("view"); ok { - table.View = expandView(v) + if _, ok := d.GetOk("view"); ok { + table.View = expandView(d) } if v, ok := d.GetOk("materialized_view"); ok { @@ -3051,12 +3052,15 @@ func flattenRangePartitioning(rp *bigquery.RangePartitioning) []map[string]inter return []map[string]interface{}{result} } -func expandView(configured interface{}) *bigquery.ViewDefinition { - raw := configured.([]interface{})[0].(map[string]interface{}) +func expandView(d *schema.ResourceData) *bigquery.ViewDefinition { + v, _ := d.GetOk("view") + raw := v.([]interface{})[0].(map[string]interface{}) vd := &bigquery.ViewDefinition{Query: raw["query"].(string)} - if v, ok := raw["use_legacy_sql"]; ok { - vd.UseLegacySql = v.(bool) + configValue := d.GetRawConfig().GetAttr("view").Index(cty.NumberIntVal(0)).AsValueMap() + useLegacySQLValue := configValue["use_legacy_sql"] + if !useLegacySQLValue.IsNull() { + vd.UseLegacySql = useLegacySQLValue.RawEquals(cty.True) vd.ForceSendFields = append(vd.ForceSendFields, "UseLegacySql") } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 3d1477190388..877e95cd6649 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -131,6 +131,12 @@ Use `google_beyondcorp_security_gateway_application` instead. `public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. +## Resource: `google_bigquery_table` + +### `view.use_legacy_sql` no longer has a default value of `True` + +The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For a new view, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. + ## Resource: `google_bigtable_table_iam_binding` ### `instance` is now removed diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index b92ced3e90e2..56d2430cc7d0 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -425,7 +425,10 @@ The following arguments are supported: * `query` - (Required) A query that BigQuery executes when the view is referenced. * `use_legacy_sql` - (Optional) Specifies whether to use BigQuery's legacy SQL for this view. - The default value is true. If set to false, the view will use BigQuery's standard SQL. + If set to `false`, the view will use BigQuery's standard SQL. If set to + `true`, the view will use BigQuery's legacy SQL. If unset, the API will + interpret it as a `true` and assumes the legacy SQL dialect for its query + according to the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition). -> **Note**: Starting in provider version `7.0.0`, no default value is provided for this field unless explicitly set in the configuration. From 35f331ec9908d0bf4717e3946ac4b189122d8028 Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Thu, 21 Aug 2025 03:21:35 +0530 Subject: [PATCH 042/201] Adding restoreBackup support for sql db instance using backupdr_backup (#14653) --- .../acctest/bootstrap_test_utils.go.tmpl | 88 ++++++ .../resource_sql_database_instance.go.tmpl | 35 ++- .../resource_sql_database_instance_meta.yaml | 1 + ...esource_sql_database_instance_test.go.tmpl | 284 +++++++++++++++++- .../terraform/transport/config.go.tmpl | 15 + .../r/sql_database_instance.html.markdown | 5 + 6 files changed, 421 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl index 6b8fbb575510..2c158386e146 100644 --- a/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl +++ b/mmv1/third_party/terraform/acctest/bootstrap_test_utils.go.tmpl @@ -34,6 +34,7 @@ import ( "google.golang.org/api/servicenetworking/v1" "google.golang.org/api/serviceusage/v1" sqladmin "google.golang.org/api/sqladmin/v1beta4" + backupdr "google.golang.org/api/backupdr/v1" ) var SharedKeyRing = "tftest-shared-keyring-1" @@ -1159,6 +1160,93 @@ func BootstrapSharedSQLInstanceBackupRun(t *testing.T) string { return bootstrapInstance.Name } +// waitForBackupdrOperation polls the operation until it is done or times out. +func waitForBackupdrOperation(ctx context.Context, t *testing.T, backupdrService *backupdr.Service, op *backupdr.Operation) (*backupdr.Operation, error) { + t.Helper() + opService := backupdr.NewProjectsLocationsOperationsService(backupdrService) + ticker := time.NewTicker(5 * time.Second) // Poll every 5 seconds + defer ticker.Stop() + + const timeout = 5 * time.Minute // Maximum time to wait + ctx, cancel := context.WithTimeout(ctx, timeout) + defer cancel() + + for { + select { + case <-ctx.Done(): + return nil, fmt.Errorf("timed out waiting for operation %s to complete", op.Name) + case <-ticker.C: + latestOp, err := opService.Get(op.Name).Context(ctx).Do() + if err != nil { + // Retry on transient errors if necessary, fail on others. + return nil, fmt.Errorf("error getting operation %s: %w", op.Name, err) + } + op = latestOp + t.Logf("Operation %s status: Done=%v", op.Name, op.Done) + + if op.Done { + if op.Error != nil { + return op, fmt.Errorf("operation %s failed: %v (code %d)", op.Name, op.Error.Message, op.Error.Code) + } + t.Logf("Operation %s completed successfully.", op.Name) + return op, nil + } + } + } +} + +// BootstrapBackupDRVault creates or gets a BackupDR backup vault for testing. +func BootstrapBackupDRVault(t *testing.T, vaultID, location string) string { + ctx := context.Background() + project := envvar.GetTestProjectFromEnv() + config := BootstrapConfig(t) + if config == nil { + t.Fatal("Could not bootstrap config.") + } + + // Create a backupdr client and check if the vault exists, if not create a vault + // backupdrClient := config.NewBackupDRClient(config.UserAgent) + vaultName := fmt.Sprintf("projects/%s/locations/%s/backupVaults/%s", project, location, vaultID) + projectAndLocation := fmt.Sprintf("projects/%s/locations/%s", project, location) + + log.Printf("[DEBUG] Getting BackupDR vault %q", vaultName) + backupdrService := config.NewBackupDRClient(config.UserAgent) + _, err := backupdrService.Projects.Locations.BackupVaults.Get(vaultName).Do() + if err != nil && transport_tpg.IsGoogleApiErrorWithCode(err, 404) { + log.Printf("[DEBUG] BackupDR vault %q not found, bootstrapping", vaultName) + // Prepare the request body for BackupVault creation + enforcedRetentionDays := 1 + effectiveDays := 1 + + retentionDuration := time.Duration(enforcedRetentionDays) * 24 * time.Hour + effectiveTime := time.Now().Add(time.Duration(effectiveDays) * 24 * time.Hour) + + backupVault := &backupdr.BackupVault{ + BackupMinimumEnforcedRetentionDuration: fmt.Sprintf("%ds", int(retentionDuration.Seconds())), + EffectiveTime: effectiveTime.Format(time.RFC3339), + Description: "Created by BootstrapBackupDRVault function", + } + {{/* _, err = config.NewBackupDRClient(config.UserAgent).Projects.Locations.BackupVaults.Create(projectAndLocation, backupVault).Do() */}} + createCall := backupdrService.Projects.Locations.BackupVaults.Create(projectAndLocation, backupVault) + createCall.BackupVaultId(vaultID) // *** This is REQUIRED for the query parameter *** + // createCall.ValidateOnly(false) // Optional: explicit validate only flag + op, err := createCall.Do() + if err != nil { + t.Fatalf("Error calling Create BackupDR vault %q: %s", vaultName, err) + } + fmt.Printf("Successfully initiated creation of BackupDR vault %q (Operation: %s)\n", vaultName, op.Name) + + // *** WAIT FOR COMPLETION *** + if _, err := waitForBackupdrOperation(ctx, t, backupdrService, op); err != nil { + t.Fatalf("Create operation for %s failed: %v", vaultName, err) + } + fmt.Printf("Successfully created BackupDR vault %q\n", vaultName) + + } + + return vaultName +} + func BootstrapSharedCaPoolInLocation(t *testing.T, location string) string { project := envvar.GetTestProjectFromEnv() poolName := "static-ca-pool" diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index bd3c036f73f3..361d3cc1d9fb 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -1131,6 +1131,11 @@ API (for read pools, effective_availability_type may differ from availability_ty }, }, }, + "backupdr_backup": { + Type: schema.TypeString, + Optional: true, + Description: `The name of the BackupDR backup to restore from.`, + }, "clone": { Type: schema.TypeList, Optional: true, @@ -1437,7 +1442,14 @@ func resourceSqlDatabaseInstanceCreate(d *schema.ResourceData, meta interface{}) // Perform a backup restore if the backup context exists if r, ok := d.GetOk("restore_backup_context"); ok { - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, r) + log.Printf("[DEBUG] Restoring instance %s from backup context: %v", name, r) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, r, "") + if err != nil { + return err + } + } else if b, ok := d.GetOk("backupdr_backup"); ok && b.(string) != "" { + log.Printf("[DEBUG] Restoring instance %s from BackupDR backup: %s", name, b.(string)) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, name, nil, b) if err != nil { return err } @@ -2260,7 +2272,14 @@ func resourceSqlDatabaseInstanceUpdate(d *schema.ResourceData, meta interface{}) // Perform a backup restore if the backup context exists and has changed if r, ok := d.GetOk("restore_backup_context"); ok { if d.HasChange("restore_backup_context") { - err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), r) + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), r, "") + if err != nil { + return err + } + } + } else if b, ok := d.GetOk("backupdr_backup"); ok && b.(string) != "" { + if d.HasChange("backupdr_backup") { + err = sqlDatabaseInstanceRestoreFromBackup(d, config, userAgent, project, d.Get("name").(string), nil, b) if err != nil { return err } @@ -2905,12 +2924,16 @@ func expandRestoreBackupContext(configured []interface{}) *sqladmin.RestoreBacku } } -func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, instanceId string, r interface{}) error { +func sqlDatabaseInstanceRestoreFromBackup(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, instanceId string, r interface{}, backupdrBackup interface{}) error { log.Printf("[DEBUG] Initiating SQL database instance backup restore") - restoreContext := r.([]interface{}) - backupRequest := &sqladmin.InstancesRestoreBackupRequest{ - RestoreBackupContext: expandRestoreBackupContext(restoreContext), + backupRequest := &sqladmin.InstancesRestoreBackupRequest{} + + if r != nil { + restoreContext := r.([]interface{}) + backupRequest.RestoreBackupContext = expandRestoreBackupContext(restoreContext) + } else if backupdrBackup != nil && backupdrBackup.(string) != "" { + backupRequest.BackupdrBackup = backupdrBackup.(string) } var op *sqladmin.Operation diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml index 39cca9f8f62b..df9d0644270c 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml @@ -5,6 +5,7 @@ api_version: 'v1beta4' api_resource_type_kind: 'DatabaseInstance' fields: - field: 'available_maintenance_versions' + - field: 'backupdr_backup' - field: 'clone.allocated_ip_range' - field: 'clone.database_names' - field: 'clone.point_in_time' diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 2a86ad8c74b8..87a8ab904fa4 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -1400,6 +1400,43 @@ func TestAccSqlDatabaseInstance_createFromBackup(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_createFromBackupDR(t *testing.T) { + t.Parallel() + + // Bootstrap the BackupDR vault + backupVaultID := "bv-test" + location := "us-central1" + project := envvar.GetTestProjectFromEnv() + backupvault := acctest.BootstrapBackupDRVault(t, backupVaultID, location) + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": project, + "backup_vault_id": backupVaultID, + "backup_vault": backupvault, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_createFromBackupDR(context), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "backupdr_backup"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { // Sqladmin client acctest.SkipIfVcr(t) @@ -1407,6 +1444,7 @@ func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { context := map[string]interface{}{ "random_suffix": acctest.RandString(t, 10), + "db_version": "POSTGRES_11", "original_db_name": acctest.BootstrapSharedSQLInstanceBackupRun(t), } @@ -1437,6 +1475,53 @@ func TestAccSqlDatabaseInstance_backupUpdate(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_BackupDRUpdate(t *testing.T) { + t.Parallel() + + // Bootstrap the BackupDR vault + backupVaultID := "bv-test" + location := "us-central1" + project := envvar.GetTestProjectFromEnv() + backupvault := acctest.BootstrapBackupDRVault(t, backupVaultID, location) + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project": project, + "backup_vault_id": backupVaultID, + "backup_vault": backupvault, + "db_version": "MYSQL_8_0_41", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccSqlDatabaseInstance_beforeBackup(context), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccSqlDatabaseInstance_updateFromBackupDR(context), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "backupdr_backup"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_basicClone(t *testing.T) { // Sqladmin client acctest.SkipIfVcr(t) @@ -6351,7 +6436,7 @@ func testAccSqlDatabaseInstance_beforeBackup(context map[string]interface{}) str return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { name = "tf-test-%{random_suffix}" - database_version = "POSTGRES_11" + database_version = "%{db_version}" region = "us-central1" settings { @@ -6400,6 +6485,203 @@ data "google_sql_backup_run" "backup" { `, context) } +func testAccSqlDatabaseInstance_createFromBackupDR(context map[string]interface{}) string { + return acctest.Nprintf(` +// Create service account +resource "google_service_account" "bkdr_sa" { + account_id = "tf-test-bkdr-sa-%{random_suffix}" + display_name = "Backup DR Service Account" +} + +// Create a backup plan +resource "google_backup_dr_backup_plan" "plan" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = "%{backup_vault}" + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 7 + + standard_schedule { + recurrence_type = "DAILY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 23 + } + } + } +} + +// Create source SQL instance to backup +resource "google_sql_database_instance" "source" { + name = "tf-test-source-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + project = "%{project}" + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + } + } + lifecycle { + ignore_changes = [ + settings[0].backup_configuration[0].enabled, + ] + } + deletion_protection = false +} + +// Associate backup plan with SQL instance +resource "google_backup_dr_backup_plan_association" "association" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = "projects/${google_sql_database_instance.source.project}/instances/${google_sql_database_instance.source.name}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.plan.name +} + +// Wait for the first backup to be created +resource "time_sleep" "wait_10_mins" { + depends_on = [google_backup_dr_backup_plan_association.association] + + create_duration = "600s" +} + +data "google_backup_dr_backup" "sql_backups" { + project = "%{project}" + location = "us-central1" + backup_vault_id = "%{backup_vault_id}" + data_source_id = element(split("/", google_backup_dr_backup_plan_association.association.data_source), length(split("/", google_backup_dr_backup_plan_association.association.data_source)) - 1) + + depends_on = [time_sleep.wait_10_mins] +} + +resource "google_sql_database_instance" "instance" { + name = "tf-test-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + + settings { + tier = "db-g1-small" + backup_configuration { + enabled = true + } + } + + backupdr_backup = data.google_backup_dr_backup.sql_backups.backups[0].name + + deletion_protection = false +} +`, context) +} + +func testAccSqlDatabaseInstance_updateFromBackupDR(context map[string]interface{}) string { + return acctest.Nprintf(` +// Create service account +resource "google_service_account" "bkdr_sa" { + account_id = "tf-test-bkdr-sa-%{random_suffix}" + display_name = "Backup DR Service Account" +} + +// Create a backup plan +resource "google_backup_dr_backup_plan" "plan" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = "%{backup_vault}" + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 7 + + standard_schedule { + recurrence_type = "DAILY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 0 + end_hour_of_day = 23 + } + } + } +} + +// Create source SQL instance to backup +resource "google_sql_database_instance" "source" { + name = "tf-test-source-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + project = "%{project}" + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + } + } + lifecycle { + ignore_changes = [ + settings[0].backup_configuration[0].enabled, + ] + } + deletion_protection = false +} + +// Associate backup plan with SQL instance +resource "google_backup_dr_backup_plan_association" "association" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = "projects/${google_sql_database_instance.source.project}/instances/${google_sql_database_instance.source.name}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.plan.name +} + +// Wait for the first backup to be created +resource "time_sleep" "wait_10_mins" { + depends_on = [google_backup_dr_backup_plan_association.association] + + create_duration = "600s" +} + +data "google_backup_dr_backup" "sql_backups" { + project = "%{project}" + location = "us-central1" + backup_vault_id = "%{backup_vault_id}" + data_source_id = element(split("/", google_backup_dr_backup_plan_association.association.data_source), length(split("/", google_backup_dr_backup_plan_association.association.data_source)) - 1) + + depends_on = [time_sleep.wait_10_mins] +} + +resource "google_sql_database_instance" "instance" { + name = "tf-test-%{random_suffix}" + database_version = "MYSQL_8_0_41" + region = "us-central1" + + settings { + tier = "db-g1-small" + backup_configuration { + enabled = true + } + } + lifecycle { + ignore_changes = [ + settings[0].backup_configuration[0].enabled, + ] + } + + backupdr_backup = data.google_backup_dr_backup.sql_backups.backups[0].name + + deletion_protection = false +} +`, context) +} + func testAccSqlDatabaseInstance_basicClone(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index 7d9b54ede5e6..15deef0d53fb 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -87,6 +87,7 @@ import ( "google.golang.org/api/sourcerepo/v1" "google.golang.org/api/spanner/v1" sqladmin "google.golang.org/api/sqladmin/v1beta4" + backupdr "google.golang.org/api/backupdr/v1" "google.golang.org/api/storage/v1" "google.golang.org/api/storagetransfer/v1" "google.golang.org/api/transport" @@ -903,6 +904,20 @@ func (c *Config) NewSqlAdminClient(userAgent string) *sqladmin.Service { return clientSqlAdmin } +func (c *Config) NewBackupDRClient(userAgent string) *backupdr.Service { + backupdrClientBasePath := RemoveBasePathVersion(RemoveBasePathVersion(c.BackupDRBasePath)) + log.Printf("[INFO] Instantiating Google SqlAdmin client for path %s", backupdrClientBasePath) + clientBackupdrAdmin, err := backupdr.NewService(c.Context, option.WithHTTPClient(c.Client)) + if err != nil { + log.Printf("[WARN] Error creating client storage: %s", err) + return nil + } + clientBackupdrAdmin.UserAgent = userAgent + clientBackupdrAdmin.BasePath = backupdrClientBasePath + + return clientBackupdrAdmin +} + func (c *Config) NewPubsubClient(userAgent string) *pubsub.Service { pubsubClientBasePath := RemoveBasePathVersion(c.PubsubBasePath) log.Printf("[INFO] Instantiating Google Pubsub client for path %s", pubsubClientBasePath) diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 5d3abe5dc5ed..b0675085eaf1 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -334,6 +334,11 @@ includes an up-to-date reference of supported versions. **NOTE:** Restoring from a backup is an imperative action and not recommended via Terraform. Adding or modifying this block during resource creation/update will trigger the restore action after the resource is created/updated. +* `backupdr_backup` - (optional) The backupdr_backup needed to restore the database to a backup run. This field will + cause Terraform to trigger the database to restore from the backup run indicated. The configuration is detailed below. + **NOTE:** Restoring from a backup is an imperative action and not recommended via Terraform. Adding or modifying this + block during resource creation/update will trigger the restore action after the resource is created/updated. + * `clone` - (Optional) The context needed to create this instance as a clone of another instance. When this field is set during resource creation, Terraform will attempt to clone another instance as indicated in the context. The configuration is detailed below. From 17fa04ed2ea422312ba677649fd13e878e95c1a2 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Wed, 20 Aug 2025 17:31:58 -0500 Subject: [PATCH 043/201] upgrade dcl to 1.82.0 gkehub binauthz removal (#14916) --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 2 ++ tpgtools/go.mod | 2 +- tpgtools/go.sum | 2 ++ 4 files changed, 6 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 9282c683f316..5d14fa4cdddb 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/auth v0.16.3 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 2ed259c385dc..a56b9444c47a 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -26,6 +26,8 @@ github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:Zp github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw= diff --git a/tpgtools/go.mod b/tpgtools/go.mod index 7b2747548e1e..d7a556fbec12 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index e727756f3b61..4c05f97caca1 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -10,6 +10,8 @@ github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:Zp github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= From b9d7b73ad0e712129c882f9466ea35452ad6656f Mon Sep 17 00:00:00 2001 From: Axel Kossek Date: Thu, 21 Aug 2025 02:44:46 +0200 Subject: [PATCH 044/201] Add resource_manager_tags support to Backend Bucket api (#14901) --- mmv1/products/compute/BackendBucket.yaml | 16 ++++++ .../resource_compute_backend_bucket_test.go | 49 +++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index a92ae63a5127..21ad6c671212 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -298,3 +298,19 @@ properties: enum_values: - 'INTERNAL_MANAGED' send_empty_value: true + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the backend bucket. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go index f515811428fd..dd2ff2fea9f3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_backend_bucket_test.go @@ -6,6 +6,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeBackendBucket_basicModified(t *testing.T) { @@ -205,6 +206,35 @@ func TestAccComputeBackendBucket_withCdnCacheMode_update(t *testing.T) { }) } +func TestAccComputeBackendBucket_withTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + backendName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + storageName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-bb-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey, _ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-bb-tagvalue", sharedTagkey, org) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeBackendBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeBackendBucket_withTags(backendName, storageName, tagKeyResult["name"], tagValueResult["name"]), + }, + { + ResourceName: "google_compute_backend_bucket.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} + func testAccComputeBackendBucket_basic(backendName, storageName string) string { return fmt.Sprintf(` resource "google_compute_backend_bucket" "foobar" { @@ -414,3 +444,22 @@ resource "google_storage_bucket" "bucket" { } `, backendName, default_ttl, storageName) } + +func testAccComputeBackendBucket_withTags(backendName, storageName string, tagKey string, tagValue string) string { + return fmt.Sprintf(` +resource "google_compute_backend_bucket" "foobar" { + name = "%s" + bucket_name = google_storage_bucket.bucket_one.name + params { + resource_manager_tags = { + "%s" = "%s" + } + } +} + +resource "google_storage_bucket" "bucket_one" { + name = "%s" + location = "EU" +} +`, backendName, tagKey, tagValue, storageName) +} From 4b274d48a5d8ead47b5006fc37439b237f518cbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Thu, 21 Aug 2025 18:10:16 +0200 Subject: [PATCH 045/201] Add singular data source for retrieving an NPM package from an Artifact Registry repository (#14804) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...ta_source_artifact_registry_npm_package.go | 295 ++++++++++++++++++ ...urce_artifact_registry_npm_package_test.go | 67 ++++ ...rtifact_registry_npm_package.html.markdown | 65 ++++ 4 files changed, 428 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 48732c2c9254..47b8c759ad9d 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -29,6 +29,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), + "google_artifact_registry_npm_package": artifactregistry.DataSourceArtifactRegistryNpmPackage(), "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go new file mode 100644 index 000000000000..ca0355700f8c --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package.go @@ -0,0 +1,295 @@ +package artifactregistry + +import ( + "fmt" + "net/url" + "sort" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type NpmPackage struct { + name string + packageName string + version string + tags []string + createTime time.Time + updateTime time.Time +} + +func DataSourceArtifactRegistryNpmPackage() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryNpmPackageRead, + + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Description: "Project ID of the project.", + }, + "location": { + Type: schema.TypeString, + Required: true, + Description: "The region of the Artifact Registry repository.", + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + Description: "The repository ID containing the Npm package.", + }, + "package_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of the Npm package.", + }, + "version": { + Type: schema.TypeString, + Computed: true, + Description: "The version of the Npm package.", + }, + "tags": { + Type: schema.TypeList, + Computed: true, + Description: "The tags associated with the Npm package.", + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: "The fully qualified name of the Npm package.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "The time the package was created.", + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: "The time the package was last updated.", + }, + }, + } +} + +func DataSourceArtifactRegistryNpmPackageRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + var res NpmPackage + + packageName, version := parseNpmPackage(d.Get("package_name").(string)) + + if version != "" { + // fetch package by version + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.npmPackages/get + packageUrlSafe := url.QueryEscape(packageName) + urlRequest, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/%s:%s", packageUrlSafe, version)) + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + resGet, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return err + } + + res = convertNpmPackageResponseToStruct(resGet) + } else { + // fetch the list of packages, ordered by update time + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.npmPackages/list + urlRequest, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages") + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + // to reduce the number of pages we need to fetch, we set the pageSize to 1000(max) + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageSize": "1000"}) + if err != nil { + return err + } + + res, err = retrieveAndFilterNpmPackages(d, config, urlRequest, userAgent, packageName, version) + if err != nil { + return err + } + } + + // Set Terraform schema fields + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("package_name", packageName); err != nil { + return err + } + if err := d.Set("name", res.name); err != nil { + return err + } + if err := d.Set("version", res.version); err != nil { + return err + } + if err := d.Set("tags", res.tags); err != nil { + return err + } + if err := d.Set("create_time", res.createTime.Format(time.RFC3339Nano)); err != nil { + return err + } + if err := d.Set("update_time", res.updateTime.Format(time.RFC3339Nano)); err != nil { + return err + } + + d.SetId(res.name) + + return nil +} + +func parseNpmPackage(pkg string) (packageName string, version string) { + splitByColon := strings.Split(pkg, ":") + + if len(splitByColon) == 2 { + packageName = splitByColon[0] + version = splitByColon[1] + } else { + packageName = pkg + } + + return packageName, version +} + +func retrieveAndFilterNpmPackages(d *schema.ResourceData, config *transport_tpg.Config, urlRequest string, userAgent string, packageName string, version string) (NpmPackage, error) { + // Paging through the list method until either: + // if a version was provided, the matching package name and version pair + // otherwise, return the first matching package name + + var allPackages []NpmPackage + + for { + resListNpmPackages, token, err := retrieveListOfNpmPackages(config, urlRequest, userAgent) + if err != nil { + return NpmPackage{}, err + } + + for _, pkg := range resListNpmPackages { + if strings.Contains(pkg.name, "/"+url.QueryEscape(packageName)+":") { + allPackages = append(allPackages, pkg) + } + } + + if token == "" { + break + } + + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageToken": token}) + if err != nil { + return NpmPackage{}, err + } + } + + if len(allPackages) == 0 { + return NpmPackage{}, fmt.Errorf("Requested Npm package was not found.") + } + + // Client-side sort by updateTime descending and createTime descending + sort.Slice(allPackages, func(i, j int) bool { + if !allPackages[i].updateTime.Equal(allPackages[j].updateTime) { + return allPackages[i].updateTime.After(allPackages[j].updateTime) + } + return allPackages[i].createTime.After(allPackages[j].createTime) + }) + + if version != "" { + for _, pkg := range allPackages { + if pkg.version == version { + return pkg, nil + } + } + return NpmPackage{}, fmt.Errorf("Requested version was not found.") + } + + // Return the latest package if no version specified + return allPackages[0], nil +} + +func retrieveListOfNpmPackages(config *transport_tpg.Config, urlRequest string, userAgent string) ([]NpmPackage, string, error) { + resList, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return make([]NpmPackage, 0), "", err + } + + if nextPageToken, ok := resList["nextPageToken"].(string); ok { + return flattenNpmPackageDataSourceListResponse(resList), nextPageToken, nil + } else { + return flattenNpmPackageDataSourceListResponse(resList), "", nil + } +} + +func flattenNpmPackageDataSourceListResponse(res map[string]interface{}) []NpmPackage { + var npmPackages []NpmPackage + + resNpmPackages, _ := res["npmPackages"].([]interface{}) + + for _, resPackage := range resNpmPackages { + pkg, _ := resPackage.(map[string]interface{}) + npmPackages = append(npmPackages, convertNpmPackageResponseToStruct(pkg)) + } + + return npmPackages +} + +func convertNpmPackageResponseToStruct(res map[string]interface{}) NpmPackage { + var npmPackage NpmPackage + + if name, ok := res["name"].(string); ok { + npmPackage.name = name + } + + if packageName, ok := res["packageName"].(string); ok { + npmPackage.packageName = packageName + } + + if version, ok := res["version"].(string); ok { + npmPackage.version = version + } + + var tags []string + if rawTags, ok := res["tags"].([]interface{}); ok { + for _, tag := range rawTags { + if tagStr, ok := tag.(string); ok { + tags = append(tags, tagStr) + } + } + } + npmPackage.tags = tags + + if createTimeStr, ok := res["createTime"].(string); ok { + npmPackage.createTime, _ = time.Parse(time.RFC3339, createTimeStr) + } + + if updateTimeStr, ok := res["updateTime"].(string); ok { + npmPackage.updateTime, _ = time.Parse(time.RFC3339, updateTimeStr) + } + + return npmPackage +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go new file mode 100644 index 000000000000..ae9b112b192e --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_npm_package_test.go @@ -0,0 +1,67 @@ +package artifactregistry_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryNpmPackage_basic(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + // At the moment there are no public Npm packages available in Artifact Registry. + // This test is skipped to avoid unnecessary failures. + // As soon as there are public packages available, this test can be enabled by removing the skip and adjusting the configuration accordingly. + t.Skip("No public Npm packages available in Artifact Registry") + + resourceName := "data.google_artifact_registry_npm_package.test" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryNpmPackageConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "location"), + resource.TestCheckResourceAttrSet(resourceName, "repository_id"), + resource.TestCheckResourceAttrSet(resourceName, "package_name"), + resource.TestCheckResourceAttrSet(resourceName, "name"), + resource.TestCheckResourceAttrSet(resourceName, "version"), + validateNpmPackageTimestamps(resourceName), + ), + }, + }, + }) +} + +const testAccDataSourceArtifactRegistryNpmPackageConfig = ` +data "google_artifact_registry_npm_package" "test" { + project = "example-project" + location = "us" + repository_id = "example-repo" + package_name = "example-package" +} +` + +func validateNpmPackageTimestamps(dataSourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + res, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("can't find %s in state", dataSourceName) + } + + for _, attr := range []string{"create_time", "update_time"} { + if ts, ok := res.Primary.Attributes[attr]; !ok || !isRFC3339(ts) { + return fmt.Errorf("%s is not RFC3339: %s", attr, ts) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown new file mode 100644 index 000000000000..b6e23d7d2829 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_npm_package.html.markdown @@ -0,0 +1,65 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about an NPM package within a Google Artifact Registry Repository. +--- + +# google_artifact_registry_npm_package + +This data source fetches information from a provided Artifact Registry repository, based on a the latest version of the package and optional version. + +## Example Usage + +```hcl +resource "google_artifact_registry_repository" "npm_repo" { + location = "us-central1" + repository_id = "my-npm-repo" + format = "NPM" +} + +data "google_artifact_registry_npm_package" "latest" { + location = google_artifact_registry_repository.npm_repo.location + repository_id = google_artifact_registry_repository.npm_repo.repository_id + package_name = "example-pkg" +} + +data "google_artifact_registry_npm_package" "with_version" { + location = google_artifact_registry_repository.npm_repo.location + repository_id = google_artifact_registry_repository.npm_repo.repository_id + package_name = "example-pkg:1.0.0" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` – (Required) The location of the Artifact Registry repository. + +* `repository_id` – (Required) The ID of the repository containing the NPM package. + +* `package_name` – (Required) The name of the package to fetch. Can optionally include a specific version (e.g., `my_pkg:1.2.3`). If no version is provided, the latest version is used. + +* `project` – (Optional) The ID of the project that owns the repository. If not provided, the provider-level project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `id` – The fully qualified name of the fetched package. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/{{package}}:{{version}} + ``` + +* `name` – The fully qualified name of the fetched package. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/npmPackages/{{package}}:{{version}} + ``` + +* `version` – The version of the NPM package. + +* `tags` - A list of all Tags attached to this package. + +* `create_time` – The time the package was created. + +* `update_time` – The time the package was last updated. From bc461dd7ef33d2e5095fcc3b92c692b5087b318d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 21 Aug 2025 09:25:02 -0700 Subject: [PATCH 046/201] tgc-revival: add beyondcorp resources (#14919) --- mmv1/products/beyondcorp/AppConnection.yaml | 1 + mmv1/products/beyondcorp/AppConnector.yaml | 1 + mmv1/products/beyondcorp/AppGateway.yaml | 1 + mmv1/products/binaryauthorization/Attestor.yaml | 1 + mmv1/third_party/tgc_next/test/hcl.go | 2 +- 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/mmv1/products/beyondcorp/AppConnection.yaml b/mmv1/products/beyondcorp/AppConnection.yaml index b18a67eae6ac..bc495bc95b0d 100644 --- a/mmv1/products/beyondcorp/AppConnection.yaml +++ b/mmv1/products/beyondcorp/AppConnection.yaml @@ -45,6 +45,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'beyondcorp_app_connection_basic' primary_resource_id: 'app_connection' diff --git a/mmv1/products/beyondcorp/AppConnector.yaml b/mmv1/products/beyondcorp/AppConnector.yaml index e6d813017782..d8fd86d53cc8 100644 --- a/mmv1/products/beyondcorp/AppConnector.yaml +++ b/mmv1/products/beyondcorp/AppConnector.yaml @@ -42,6 +42,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'beyondcorp_app_connector_basic' primary_resource_id: 'app_connector' diff --git a/mmv1/products/beyondcorp/AppGateway.yaml b/mmv1/products/beyondcorp/AppGateway.yaml index 1efdaf1504d3..808be9e4f2a6 100644 --- a/mmv1/products/beyondcorp/AppGateway.yaml +++ b/mmv1/products/beyondcorp/AppGateway.yaml @@ -45,6 +45,7 @@ async: result: resource_inside_response: true custom_code: +include_in_tgc_next_DO_NOT_USE: true schema_version: 1 state_upgraders: true examples: diff --git a/mmv1/products/binaryauthorization/Attestor.yaml b/mmv1/products/binaryauthorization/Attestor.yaml index d0862a41eb03..ab271659cc74 100644 --- a/mmv1/products/binaryauthorization/Attestor.yaml +++ b/mmv1/products/binaryauthorization/Attestor.yaml @@ -34,6 +34,7 @@ iam_policy: example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' custom_code: constants: 'templates/terraform/constants/binaryauthorization_attestor.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'binary_authorization_attestor_basic' primary_resource_id: 'attestor' diff --git a/mmv1/third_party/tgc_next/test/hcl.go b/mmv1/third_party/tgc_next/test/hcl.go index 6702e53cfec7..8748e33ce1fd 100644 --- a/mmv1/third_party/tgc_next/test/hcl.go +++ b/mmv1/third_party/tgc_next/test/hcl.go @@ -86,7 +86,7 @@ func parseHCLBody(body hcl.Body) ( func insert(data any, key string, parent map[string]any) { if existing, ok := parent[key]; ok { if existingSlice, ok := existing.([]any); ok { - existingSlice = append(existingSlice, data) + parent[key] = append(existingSlice, data) } else { // Until we see a second instance of a repeated block or attribute, it will look non-repeated. parent[key] = []any{existing, data} From 201fe03af9f3fe50bf6f778e90c99fd72b778669 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Thu, 21 Aug 2025 11:26:16 -0700 Subject: [PATCH 047/201] Deprecation of Field allow_fewer_zone_deployment (#14887) --- mmv1/products/memorystore/Instance.yaml | 1 + mmv1/products/redis/Cluster.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 4689f7d3e36f..73b440df94af 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -525,6 +525,7 @@ properties: the instance creation, the instance will only be deployed in 2 zones, and stay within the 2 zones for its lifecycle. immutable: true + deprecation_message: 'allow_fewer_zone_deployment flag will no longer be a user settable field, default behaviour will be as if set to true' - name: 'deletionProtectionEnabled' type: Boolean description: "Optional. If set to true deletion of the instance will fail. " diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 39ea126efb16..7f546ada66d1 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -386,6 +386,7 @@ properties: cluster in less than 3 zones. Once set, if there is a zonal outage during the cluster creation, the cluster will only be deployed in 2 zones, and stay within the 2 zones for its lifecycle. + deprecation_message: 'allow_fewer_zone_deployment flag will no longer be a user settable field, default behaviour will be as if set to true' - name: 'pscConfigs' type: Array description: | From 0756cee2b27fb6729e964d488152fc2611db60ff Mon Sep 17 00:00:00 2001 From: haiyanmeng Date: Thu, 21 Aug 2025 15:05:21 -0400 Subject: [PATCH 048/201] Remove `configmanagement.binauthz` field in google_gke_hub_feature_membership (#14531) Co-authored-by: Cameron Thornton --- ...ource_gke_hub_feature_membership_meta.yaml.tmpl | 1 - .../docs/guides/version_7_upgrade.html.markdown | 6 ++++++ .../r/gke_hub_feature_membership.html.markdown | 14 +------------- 3 files changed, 7 insertions(+), 14 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl index 778b7227674b..bb160974eefd 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_meta.yaml.tmpl @@ -8,7 +8,6 @@ api_version: 'v1' {{- end }} api_resource_type_kind: 'Feature' fields: - - field: 'configmanagement.binauthz.enabled' - field: 'configmanagement.config_sync.enabled' - field: 'configmanagement.config_sync.git.gcp_service_account_email' - field: 'configmanagement.config_sync.git.https_proxy' diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 877e95cd6649..b5e6f043714e 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -193,6 +193,12 @@ Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/e To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. +## Resource: `google_gke_hub_feature_membership` + +### `configmanagement.binauthz` is now removed + +Remove `configmanagement.binauthz` from your configuration after upgrade. + ## Resource: `google_gke_hub_membership` ### `description` is now removed diff --git a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown index b053b083c696..5bbe489b5019 100644 --- a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown @@ -4,7 +4,7 @@ description: |- Contains information about a GKEHub Feature Memberships. --- -# google_gkehub_feature_membership +# google_gke_hub_feature_membership Contains information about a GKEHub Feature Memberships. Feature Memberships configure GKEHub Features that apply to specific memberships rather than the project as a whole. The google_gke_hub is the Fleet API. @@ -426,11 +426,6 @@ The following arguments are supported: (Optional) Version of Config Sync installed. -* `binauthz` - - (Optional, Deprecated) - Binauthz configuration for the cluster. Structure is [documented below](#nested_binauthz). - This field will be ignored and should not be set. - * `hierarchy_controller` - (Optional) Hierarchy Controller configuration for the cluster. Structure is [documented below](#nested_hierarchy_controller). @@ -444,13 +439,6 @@ The following arguments are supported: Policy Controller configuration for the cluster. Structure is [documented below](#nested_policy_controller). Configuring Policy Controller through the configmanagement feature is no longer recommended. Use the policycontroller feature instead. - - -The `binauthz` block supports: - -* `enabled` - - (Optional) - Whether binauthz is enabled in this cluster. The `config_sync` block supports: From 00d53eb3a5b11e27279239c7f0352296539302be Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 21 Aug 2025 13:25:59 -0700 Subject: [PATCH 049/201] Update membership_data.go (#14940) --- .ci/magician/github/membership_data.go | 1 - 1 file changed, 1 deletion(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 390d48b868f4..488dd5c70d74 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -155,7 +155,6 @@ var ( // This is for new team members who are onboarding trustedContributors = map[string]struct{}{ "bbasata": struct{}{}, - "jaylonmcshan03": struct{}{}, "malhotrasagar2212": struct{}{}, } ) From 4efcbb9201946c26b761b2f8ddf0da98ddb10aab Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 21 Aug 2025 13:51:08 -0700 Subject: [PATCH 050/201] fixed issue where a failed creation on container_node_pool would result in an unrecoverable tainted state (#14937) --- .../container/resource_container_node_pool.go.tmpl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index 2531ce078380..f324f5dc45a8 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -702,6 +702,7 @@ func resourceContainerNodePoolCreate(d *schema.ResourceData, meta interface{}) e return nil }) if err != nil { + d.SetId("") return fmt.Errorf("error creating NodePool: %s", err) } timeout -= time.Since(startTime) @@ -788,13 +789,19 @@ func resourceContainerNodePoolRead(d *schema.ResourceData, meta interface{}) err clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } nodePool, err := clusterNodePoolsGetCall.Do() + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + } + {{- else }} npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) nodePool, err := npCache.get(nodePoolInfo.fullyQualifiedName(name)) -{{- end }} if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + d.SetId("") + return nil } +{{- end }} npMap, err := flattenNodePool(d, config, nodePool, "") if err != nil { From e2e1d5150ba90ddcfa9db0a99815399602fceb80 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Thu, 21 Aug 2025 13:57:34 -0700 Subject: [PATCH 051/201] remove default_from_api from credit_types and subaccounts in google_billing_budget (#14938) --- mmv1/products/billingbudget/Budget.yaml | 6 - .../billing/resource_billing_budget_test.go | 132 +++++++++--------- .../guides/version_7_upgrade.html.markdown | 6 + 3 files changed, 71 insertions(+), 73 deletions(-) diff --git a/mmv1/products/billingbudget/Budget.yaml b/mmv1/products/billingbudget/Budget.yaml index 13b0d5d889fd..8fa30a93ce35 100644 --- a/mmv1/products/billingbudget/Budget.yaml +++ b/mmv1/products/billingbudget/Budget.yaml @@ -213,9 +213,6 @@ properties: Optional. If creditTypesTreatment is INCLUDE_SPECIFIED_CREDITS, this is a list of credit types to be subtracted from gross cost to determine the spend for threshold calculations. See a list of acceptable credit type values. If creditTypesTreatment is not INCLUDE_SPECIFIED_CREDITS, this field must be empty. - - **Note:** If the field has a value in the config and needs to be removed, the field has to be an empty array in the config. - default_from_api: true at_least_one_of: - 'budget_filter.0.projects' - 'budget_filter.0.resource_ancestors' @@ -236,9 +233,6 @@ properties: the parent account, usage from the parent account will be included. If the field is omitted, the report will include usage from the parent account and all subaccounts, if they exist. - - **Note:** If the field has a value in the config and needs to be removed, the field has to be an empty array in the config. - default_from_api: true at_least_one_of: - 'budget_filter.0.projects' - 'budget_filter.0.resource_ancestors' diff --git a/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go b/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go index ffc59734ae29..9d2f4659f818 100644 --- a/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go +++ b/mmv1/third_party/terraform/services/billing/resource_billing_budget_test.go @@ -221,7 +221,6 @@ resource "google_billing_budget" "budget" { labels = { label = "bar" } - subaccounts = [] } amount { @@ -412,7 +411,7 @@ resource "google_billing_budget" "budget" { labels = { label1 = "bar2" } - calendar_period = "YEAR" + calendar_period = "YEAR" } amount { @@ -460,19 +459,18 @@ resource "google_billing_budget" "budget" { labels = { label1 = "bar2" } - custom_period { - start_date { - year = 2022 - month = 1 - day = 1 - } - end_date { - year = 2023 - month = 12 - day = 31 - } - } - credit_types = [] + custom_period { + start_date { + year = 2022 + month = 1 + day = 1 + } + end_date { + year = 2023 + month = 12 + day = 31 + } + } } amount { @@ -584,43 +582,43 @@ func testAccBillingBudget_budgetFilterProjectsOrdering1(context map[string]inter return acctest.Nprintf(` data "google_billing_account" "account" { - billing_account = "%{billing_acct}" + billing_account = "%{billing_acct}" } resource "google_project" "project1" { - project_id = "tf-test-%{random_suffix_1}" - name = "tf-test-%{random_suffix_1}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_1}" + name = "tf-test-%{random_suffix_1}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_project" "project2" { - project_id = "tf-test-%{random_suffix_2}" - name = "tf-test-%{random_suffix_2}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_2}" + name = "tf-test-%{random_suffix_2}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_billing_budget" "budget" { - billing_account = data.google_billing_account.account.id - display_name = "Example Billing Budget" - - budget_filter { - projects = [ - "projects/${google_project.project1.number}", - "projects/${google_project.project2.number}", - ] - } + billing_account = data.google_billing_account.account.id + display_name = "Example Billing Budget" - amount { - last_period_amount = true - } + budget_filter { + projects = [ + "projects/${google_project.project1.number}", + "projects/${google_project.project2.number}", + ] + } - threshold_rules { - threshold_percent = 10.0 - } + amount { + last_period_amount = true + } + + threshold_rules { + threshold_percent = 10.0 + } } `, context) @@ -630,43 +628,43 @@ func testAccBillingBudget_budgetFilterProjectsOrdering2(context map[string]inter return acctest.Nprintf(` data "google_billing_account" "account" { - billing_account = "%{billing_acct}" + billing_account = "%{billing_acct}" } resource "google_project" "project1" { - project_id = "tf-test-%{random_suffix_1}" - name = "tf-test-%{random_suffix_1}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_1}" + name = "tf-test-%{random_suffix_1}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_project" "project2" { - project_id = "tf-test-%{random_suffix_2}" - name = "tf-test-%{random_suffix_2}" - org_id = "%{org}" - billing_account = "%{project_billing_acct}" - deletion_policy = "DELETE" + project_id = "tf-test-%{random_suffix_2}" + name = "tf-test-%{random_suffix_2}" + org_id = "%{org}" + billing_account = "%{project_billing_acct}" + deletion_policy = "DELETE" } resource "google_billing_budget" "budget" { - billing_account = data.google_billing_account.account.id - display_name = "Example Billing Budget" - - budget_filter { - projects = [ - "projects/${google_project.project2.number}", - "projects/${google_project.project1.number}", - ] - } + billing_account = data.google_billing_account.account.id + display_name = "Example Billing Budget" - amount { - last_period_amount = true - } + budget_filter { + projects = [ + "projects/${google_project.project2.number}", + "projects/${google_project.project1.number}", + ] + } - threshold_rules { - threshold_percent = 10.0 - } + amount { + last_period_amount = true + } + + threshold_rules { + threshold_percent = 10.0 + } } `, context) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index b5e6f043714e..f69a98efdc99 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -155,6 +155,12 @@ The `view.use_legacy_sql` field no longer has a default value. Configurations th `instance` has been removed in favor of `instance_name`. +## Resource: `google_billing_budget` + +### `budget_filter.credit types` and `budget_filter.subaccounts` are no longer optional+computed, only optional + +`budget_filter.credit types` and `budget_filter.subaccounts` are no longer O+C. These fields already did not export any API-default values, so no change to your configuration should be necessary. + ## Resource: `google_compute_packet_mirroring` ### `subnetworks` and `instances` fields have been converted to sets From eed48c10c21e44544dc2385b27aa675cec5088b2 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Thu, 21 Aug 2025 14:05:14 -0700 Subject: [PATCH 052/201] Breaking Change: Remove allow_fewer_zones_deployment from Memorystore and Redis Cluster (#14889) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/memorystore/Instance.yaml | 8 -------- mmv1/products/redis/Cluster.yaml | 8 -------- .../terraform/examples/memorystore_instance_full.tf.tmpl | 1 - .../terraform/examples/redis_cluster_aof.tf.tmpl | 1 - .../website/docs/guides/version_7_upgrade.html.markdown | 8 ++++++++ 5 files changed, 8 insertions(+), 18 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index 4689f7d3e36f..ec40528f2814 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -517,14 +517,6 @@ properties: enum_values: - 'MULTI_ZONE' - 'SINGLE_ZONE' - - name: 'allowFewerZonesDeployment' - type: Boolean - description: | - Allows customers to specify if they are okay with deploying a multi-zone - instance in less than 3 zones. Once set, if there is a zonal outage during - the instance creation, the instance will only be deployed in 2 zones, and - stay within the 2 zones for its lifecycle. - immutable: true - name: 'deletionProtectionEnabled' type: Boolean description: "Optional. If set to true deletion of the instance will fail. " diff --git a/mmv1/products/redis/Cluster.yaml b/mmv1/products/redis/Cluster.yaml index 39ea126efb16..5b26b4639464 100644 --- a/mmv1/products/redis/Cluster.yaml +++ b/mmv1/products/redis/Cluster.yaml @@ -378,14 +378,6 @@ properties: type: String description: | Immutable. The zone for single zone Memorystore Redis cluster. - - name: 'allowFewerZonesDeployment' - type: Boolean - immutable: true - description: | - Allows customers to specify if they are okay with deploying a multi-zone - cluster in less than 3 zones. Once set, if there is a zonal outage during - the cluster creation, the cluster will only be deployed in 2 zones, and - stay within the 2 zones for its lifecycle. - name: 'pscConfigs' type: Array description: | diff --git a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl index 30d2cafe722b..742450575aaf 100644 --- a/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/memorystore_instance_full.tf.tmpl @@ -14,7 +14,6 @@ resource "google_memorystore_instance" "{{$.PrimaryResourceId}}" { engine_configs = { maxmemory-policy = "volatile-ttl" } - allow_fewer_zones_deployment = true zone_distribution_config { mode = "SINGLE_ZONE" zone = "us-central1-b" diff --git a/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl b/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl index 789bf86028d6..e5abf31f38d4 100644 --- a/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl +++ b/mmv1/templates/terraform/examples/redis_cluster_aof.tf.tmpl @@ -13,7 +13,6 @@ resource "google_redis_cluster" "{{$.PrimaryResourceId}}" { maxmemory-policy = "volatile-ttl" } deletion_protection_enabled = {{index $.Vars "deletion_protection_enabled"}} - allow_fewer_zones_deployment = true zone_distribution_config { mode = "MULTI_ZONE" } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index f69a98efdc99..19b0f73ef806 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -268,3 +268,11 @@ Remove `template.containers.depends_on` from your configuration after upgrade. The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. + +## Resource: `google_memorystore_instance` + + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. + +## Resource: `google_redis_cluster` + + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. \ No newline at end of file From 63cb1950f3a3a7663be8fe0d9b56447b85d4df80 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Thu, 21 Aug 2025 14:19:27 -0700 Subject: [PATCH 053/201] workbench: Make install-monitoring-agent settable but unmodifiable (#14918) --- mmv1/templates/terraform/constants/workbench_instance.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index 09e63d4fc722..817474e89de7 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -24,6 +24,7 @@ func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceD var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ + "install-monitoring-agent", "serial-port-logging-enable", } @@ -67,7 +68,6 @@ var WorkbenchInstanceProvidedMetadata = []string{ "generate-diagnostics-options", "google-logging-enabled", "image-url", - "install-monitoring-agent", "install-nvidia-driver", "installed-extensions", "instance-region", From c4a23a80951f1790dee041960a96646eed28ed44 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 21 Aug 2025 14:29:27 -0700 Subject: [PATCH 054/201] =?UTF-8?q?Revert=20"=20provider:=20eliminated=20t?= =?UTF-8?q?he=20need=20to=20manually=20add=20`*=5Fwo`=20and=20`*=5F?= =?UTF-8?q?=E2=80=A6=20(#14942)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Scott Suarez --- docs/content/reference/field.md | 9 +- mmv1/api/resource.go | 86 +---- mmv1/api/resource_test.go | 330 ------------------ mmv1/api/type.go | 49 +-- mmv1/api/type_test.go | 2 +- mmv1/main.go | 4 +- .../products/bigquerydatatransfer/Config.yaml | 22 +- .../monitoring/UptimeCheckConfig.yaml | 21 +- .../products/secretmanager/SecretVersion.yaml | 16 + .../terraform/flatten_property_method.go.tmpl | 2 +- 10 files changed, 67 insertions(+), 474 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index c07098e02f0c..1f24ec8abc06 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -108,11 +108,10 @@ sensitive: true ``` ### `write_only` -Set to true to enable write-only functionality for this field. -If true, the write-only fields will be automatically generated by the code generator (`_wo` and `_wo_version`). -When the write-only variant of a field is used, it means that its value will be obscured in Terraform output as well as not be stored in state. -This field is meant to replace `sensitive` as it doesn't store the value in state. -See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) for more information. +If true, the field is considered "write-only", which means that its value will +be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. +See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) +for more information. Write-only fields are only supported in Terraform v1.11+. Because the provider supports earlier Terraform versions, write only fields must be paired with (mutually exclusive) `sensitive` fields covering the same functionality for compatibility with those older versions. This field cannot be used in conjuction with `immutable` or `sensitive`. diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 95c10c201f4f..afa8b929d722 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -737,94 +737,14 @@ func (r Resource) GetIdentity() []*Type { }) } -func buildWriteOnlyField(name string, versionFieldName string, originalField *Type, originalFieldLineage string) *Type { - description := fmt.Sprintf("%s Note: This property is write-only and will not be read from the API. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", originalField.Description) - fieldPathOriginalField := originalFieldLineage - fieldPathCurrentField := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(name)) - requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(versionFieldName)) - - apiName := originalField.ApiName - if apiName == "" { - apiName = originalField.Name - } - - options := []func(*Type){ - propertyWithType("String"), - propertyWithRequired(false), - propertyWithDescription(description), - propertyWithWriteOnly(true), - propertyWithApiName(apiName), - propertyWithIgnoreRead(true), - propertyWithRequiredWith([]string{requiredWith}), - } - - if originalField.Required { - exactlyOneOf := append(originalField.ExactlyOneOf, fieldPathOriginalField, fieldPathCurrentField) - options = append(options, propertyWithExactlyOneOf(exactlyOneOf)) - } else { - conflicts := append(originalField.Conflicts, fieldPathOriginalField) - options = append(options, propertyWithConflicts(conflicts)) - } - - if len(originalField.AtLeastOneOf) > 0 { - atLeastOneOf := append(originalField.AtLeastOneOf, fieldPathCurrentField) - options = append(options, propertyWithAtLeastOneOf(atLeastOneOf)) - } - - return NewProperty(name, originalField.ApiName, options) -} - -func buildWriteOnlyVersionField(name string, originalField *Type, writeOnlyField *Type, originalFieldLineage string) *Type { - description := fmt.Sprintf("Triggers update of %s write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes)", google.Underscore(writeOnlyField.Name)) - requiredWith := strings.ReplaceAll(originalFieldLineage, google.Underscore(originalField.Name), google.Underscore(writeOnlyField.Name)) - - options := []func(*Type){ - propertyWithType("String"), - propertyWithImmutable(originalField.Immutable), - propertyWithDescription(description), - propertyWithRequiredWith([]string{requiredWith}), - propertyWithClientSide(true), - } - - return NewProperty(name, name, options) -} - -func (r *Resource) addWriteOnlyFields(props []*Type, propWithWoConfigured *Type, propWithWoConfiguredLineagePath string) []*Type { - if len(propWithWoConfigured.RequiredWith) > 0 { - log.Fatalf("WriteOnly property '%s' in resource '%s' cannot have RequiredWith set. This combination is not supported.", propWithWoConfigured.Name, r.Name) - } - woFieldName := fmt.Sprintf("%sWo", propWithWoConfigured.Name) - woVersionFieldName := fmt.Sprintf("%sVersion", woFieldName) - writeOnlyField := buildWriteOnlyField(woFieldName, woVersionFieldName, propWithWoConfigured, propWithWoConfiguredLineagePath) - writeOnlyVersionField := buildWriteOnlyVersionField(woVersionFieldName, propWithWoConfigured, writeOnlyField, propWithWoConfiguredLineagePath) - props = append(props, writeOnlyField, writeOnlyVersionField) - return props -} - -func (r *Resource) buildCurrentPropLineage(p *Type, lineage string) string { - underscoreName := google.Underscore(p.Name) - if lineage == "" { - return underscoreName - } - return fmt.Sprintf("%s.0.%s", lineage, underscoreName) -} - -// AddExtraFields processes properties and adds supplementary fields based on property types. -// It handles write-only properties, labels, and annotations. -func (r *Resource) AddExtraFields(props []*Type, parent *Type, lineage string) []*Type { +func (r *Resource) AddLabelsRelatedFields(props []*Type, parent *Type) []*Type { for _, p := range props { - currentPropLineage := r.buildCurrentPropLineage(p, lineage) - if p.WriteOnly && !strings.HasSuffix(p.Name, "Wo") { - props = r.addWriteOnlyFields(props, p, currentPropLineage) - p.WriteOnly = false - p.Required = false - } if p.IsA("KeyValueLabels") { props = r.addLabelsFields(props, parent, p) } else if p.IsA("KeyValueAnnotations") { props = r.addAnnotationsFields(props, parent, p) } else if p.IsA("NestedObject") && len(p.AllProperties()) > 0 { - p.Properties = r.AddExtraFields(p.AllProperties(), p, currentPropLineage) + p.Properties = r.AddLabelsRelatedFields(p.AllProperties(), p) } } return props @@ -843,7 +763,6 @@ func (r *Resource) addLabelsFields(props []*Type, parent *Type, labels *Type) [] terraformLabelsField := buildTerraformLabelsField("labels", parent, labels) effectiveLabelsField := buildEffectiveLabelsField("labels", labels) - props = append(props, terraformLabelsField, effectiveLabelsField) // The effective_labels field is used to write to API, instead of the labels field. @@ -880,7 +799,6 @@ func (r *Resource) addAnnotationsFields(props []*Type, parent *Type, annotations } effectiveAnnotationsField := buildEffectiveLabelsField("annotations", annotations) - props = append(props, effectiveAnnotationsField) return props } diff --git a/mmv1/api/resource_test.go b/mmv1/api/resource_test.go index 46a34b8b8036..ad7dd327b288 100644 --- a/mmv1/api/resource_test.go +++ b/mmv1/api/resource_test.go @@ -4,7 +4,6 @@ import ( "os" "path/filepath" "reflect" - "slices" "strings" "testing" @@ -504,332 +503,3 @@ func TestHasPostCreateComputedFields(t *testing.T) { }) } } - -func TestResourceAddExtraFields(t *testing.T) { - t.Parallel() - - createTestResource := func(name string) *Resource { - return &Resource{ - Name: name, - ProductMetadata: &Product{ - Name: "testproduct", - }, - } - } - - createTestType := func(name, typeStr string, options ...func(*Type)) *Type { - t := &Type{ - Name: name, - Type: typeStr, - } - for _, option := range options { - option(t) - } - return t - } - - withWriteOnly := func(writeOnly bool) func(*Type) { - return func(t *Type) { t.WriteOnly = writeOnly } - } - withRequired := func(required bool) func(*Type) { - return func(t *Type) { t.Required = required } - } - withDescription := func(desc string) func(*Type) { - return func(t *Type) { t.Description = desc } - } - withProperties := func(props []*Type) func(*Type) { - return func(t *Type) { t.Properties = props } - } - - t.Run("WriteOnly property adds companion fields", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - writeOnlyProp := createTestType("password", "String", - withWriteOnly(true), - withRequired(true), - withDescription("A password field"), - ) - - props := []*Type{writeOnlyProp} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 3 { - t.Errorf("Expected 3 properties after adding WriteOnly fields, got %d", len(result)) - } - - if writeOnlyProp.WriteOnly { - t.Error("Original WriteOnly property should have WriteOnly set to false after processing") - } - if writeOnlyProp.Required { - t.Error("Original WriteOnly property should have Required set to false after processing") - } - - var foundWoField, foundVersionField bool - for _, prop := range result { - if prop.Name == "passwordWo" { - foundWoField = true - if !prop.WriteOnly { - t.Error("passwordWo field should have WriteOnly=true") - } - } - if prop.Name == "passwordWoVersion" { - foundVersionField = true - if !prop.ClientSide { - t.Error("passwordWoVersion field should have ClientSide=true") - } - } - } - - if !foundWoField { - t.Error("Expected to find passwordWo field") - } - if !foundVersionField { - t.Error("Expected to find passwordWoVersion field") - } - }) - - t.Run("KeyValueLabels property adds terraform and effective labels", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - Description: "Resource labels", - } - - props := []*Type{labelsType} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 3 { - t.Errorf("Expected 3 properties after adding labels fields, got %d", len(result)) - } - - if !labelsType.IgnoreWrite { - t.Error("Original labels field should have IgnoreWrite=true after processing") - } - if !strings.Contains(labelsType.Description, "**Note**") { - t.Error("Original labels field description should contain note after processing") - } - - var foundTerraformLabels, foundEffectiveLabels bool - for _, prop := range result { - if prop.Name == "terraformLabels" { - foundTerraformLabels = true - if prop.Type != "KeyValueTerraformLabels" { - t.Errorf("terraformLabels should have type KeyValueTerraformLabels, got %s", prop.Type) - } - } - if prop.Name == "effectiveLabels" { - foundEffectiveLabels = true - if prop.Type != "KeyValueEffectiveLabels" { - t.Errorf("effectiveLabels should have type KeyValueEffectiveLabels, got %s", prop.Type) - } - } - } - - if !foundTerraformLabels { - t.Error("Expected to find terraformLabels field") - } - if !foundEffectiveLabels { - t.Error("Expected to find effectiveLabels field") - } - - expectedDiff := "tpgresource.SetLabelsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueLabels with ExcludeAttributionLabel adds different CustomDiff", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - resource.ExcludeAttributionLabel = true - - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - } - - props := []*Type{labelsType} - resource.AddExtraFields(props, nil, "") - - expectedDiff := "tpgresource.SetLabelsDiffWithoutAttributionLabel" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueLabels with metadata parent adds metadata CustomDiff", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - parent := &Type{Name: "metadata"} - - labelsType := &Type{ - Name: "labels", - Type: "KeyValueLabels", - } - - props := []*Type{labelsType} - resource.AddExtraFields(props, parent, "") - - expectedDiff := "tpgresource.SetMetadataLabelsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("KeyValueAnnotations property adds effective annotations", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - annotationsType := &Type{ - Name: "annotations", - Type: "KeyValueAnnotations", - Description: "Resource annotations", - } - - props := []*Type{annotationsType} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 2 { - t.Errorf("Expected 2 properties after adding annotations fields, got %d", len(result)) - } - - if !annotationsType.IgnoreWrite { - t.Error("Original annotations field should have IgnoreWrite=true after processing") - } - - var foundEffectiveAnnotations bool - for _, prop := range result { - if prop.Name == "effectiveAnnotations" { - foundEffectiveAnnotations = true - if prop.Type != "KeyValueEffectiveLabels" { - t.Errorf("effectiveAnnotations should have type KeyValueEffectiveLabels, got %s", prop.Type) - } - } - } - - if !foundEffectiveAnnotations { - t.Error("Expected to find effectiveAnnotations field") - } - - expectedDiff := "tpgresource.SetAnnotationsDiff" - if !slices.Contains(resource.CustomDiff, expectedDiff) { - t.Errorf("Expected CustomDiff to contain %s", expectedDiff) - } - }) - - t.Run("NestedObject with properties processes recursively", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - - nestedWriteOnly := createTestType("nestedPassword", "String", withWriteOnly(true)) - nestedObject := createTestType("config", "NestedObject", withProperties([]*Type{nestedWriteOnly})) - - props := []*Type{nestedObject} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 top-level property, got %d", len(result)) - } - - if len(nestedObject.Properties) != 3 { - t.Errorf("Expected 3 nested properties after recursive processing, got %d", len(nestedObject.Properties)) - } - - if nestedWriteOnly.WriteOnly { - t.Error("Nested WriteOnly property should have WriteOnly=false after processing") - } - }) - - t.Run("Empty NestedObject properties are not processed", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - emptyNestedObject := createTestType("config", "NestedObject", withProperties([]*Type{})) - - props := []*Type{emptyNestedObject} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property, got %d", len(result)) - } - if len(emptyNestedObject.Properties) != 0 { - t.Errorf("Expected 0 nested properties, got %d", len(emptyNestedObject.Properties)) - } - }) - - t.Run("WriteOnly property already ending with Wo is skipped", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - woProperty := createTestType("passwordWo", "String", withWriteOnly(true)) - - props := []*Type{woProperty} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property for Wo-suffixed field, got %d", len(result)) - } - - if !woProperty.WriteOnly { - t.Error("Wo-suffixed property should remain WriteOnly=true") - } - }) - - t.Run("Regular properties are passed through unchanged", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - regularProp := createTestType("name", "String", withRequired(true)) - - props := []*Type{regularProp} - result := resource.AddExtraFields(props, nil, "") - - if len(result) != 1 { - t.Errorf("Expected 1 property for regular field, got %d", len(result)) - } - - if result[0] != regularProp { - t.Error("Regular property should be passed through unchanged") - } - if !regularProp.Required { - t.Error("Regular property Required should be unchanged") - } - }) - - t.Run("Multiple property types processed correctly", func(t *testing.T) { - t.Parallel() - - resource := createTestResource("testresource") - - regularProp := createTestType("name", "String") - writeOnlyProp := createTestType("password", "String", withWriteOnly(true)) - labelsType := &Type{Name: "labels", Type: "KeyValueLabels"} - - props := []*Type{regularProp, writeOnlyProp, labelsType} - result := resource.AddExtraFields(props, nil, "") - - // Should have: name + password + passwordWo + passwordWoVersion + labels + terraformLabels + effectiveLabels = 7 - if len(result) != 7 { - t.Errorf("Expected 7 properties total, got %d", len(result)) - } - - names := make(map[string]bool) - for _, prop := range result { - names[prop.Name] = true - } - - expectedNames := []string{"name", "password", "passwordWo", "passwordWoVersion", "labels", "terraformLabels", "effectiveLabels"} - for _, expected := range expectedNames { - if !names[expected] { - t.Errorf("Expected to find property named %s", expected) - } - } - }) -} diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 078775772585..690687db966d 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -624,6 +624,7 @@ func (t Type) ExactlyOneOfList() []string { if t.ResourceMetadata == nil { return []string{} } + return t.ExactlyOneOf } @@ -1013,54 +1014,6 @@ func propertyWithIgnoreWrite(ignoreWrite bool) func(*Type) { } } -func propertyWithRequired(required bool) func(*Type) { - return func(p *Type) { - p.Required = required - } -} - -func propertyWithWriteOnly(writeOnly bool) func(*Type) { - return func(p *Type) { - p.WriteOnly = writeOnly - } -} - -func propertyWithIgnoreRead(ignoreRead bool) func(*Type) { - return func(p *Type) { - p.IgnoreRead = ignoreRead - } -} - -func propertyWithConflicts(conflicts []string) func(*Type) { - return func(p *Type) { - p.Conflicts = conflicts - } -} - -func propertyWithRequiredWith(requiredWith []string) func(*Type) { - return func(p *Type) { - p.RequiredWith = requiredWith - } -} - -func propertyWithExactlyOneOf(exactlyOneOf []string) func(*Type) { - return func(p *Type) { - p.ExactlyOneOf = exactlyOneOf - } -} - -func propertyWithAtLeastOneOf(atLeastOneOf []string) func(*Type) { - return func(p *Type) { - p.AtLeastOneOf = atLeastOneOf - } -} - -func propertyWithApiName(apiName string) func(*Type) { - return func(p *Type) { - p.ApiName = apiName - } -} - func (t *Type) validateLabelsField() { productName := t.ResourceMetadata.ProductMetadata.Name resourceName := t.ResourceMetadata.Name diff --git a/mmv1/api/type_test.go b/mmv1/api/type_test.go index f6a738d248f2..3d46d120a31f 100644 --- a/mmv1/api/type_test.go +++ b/mmv1/api/type_test.go @@ -361,7 +361,7 @@ func TestProviderOnly(t *testing.T) { }, }, } - labeled.Properties = labeled.AddExtraFields(labeled.PropertiesWithExcluded(), nil, "") + labeled.Properties = labeled.AddLabelsRelatedFields(labeled.PropertiesWithExcluded(), nil) labeled.SetDefault(nil) cases := []struct { diff --git a/mmv1/main.go b/mmv1/main.go index 9e7c0fc88612..ef62e7ba5f88 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -235,7 +235,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod resource.SourceYamlFile = resourceYamlPath resource.TargetVersionName = version - resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") + resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) @@ -268,7 +268,7 @@ func GenerateProduct(version, providerName, productName, outputPath string, prod } resource.TargetVersionName = version - resource.Properties = resource.AddExtraFields(resource.PropertiesWithExcluded(), nil, "") + resource.Properties = resource.AddLabelsRelatedFields(resource.PropertiesWithExcluded(), nil) resource.SetDefault(productApi) resource.Validate() resources = append(resources, resource) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index b9bb9cf4b137..df88222fc7b7 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,10 +222,30 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: + - name: 'secretAccessKeyWoVersion' + type: Integer + url_param_only: true + required_with: + - 'sensitive_params.0.secretAccessKeyWo' + description: | + The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | The Secret Access Key of the AWS account transferring data from. sensitive: true + at_least_one_of: + - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secretAccessKeyWo' + conflicts: + - 'sensitive_params.0.secretAccessKeyWo' + - name: 'secretAccessKeyWo' # Wo is convention for write-only properties + type: String + description: | + The Secret Access Key of the AWS account transferring data from. write_only: true - required: true + at_least_one_of: + - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secretAccessKey' + conflicts: + - 'sensitive_params.0.secretAccessKey' diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 9affd60788a1..67f7fdd0e791 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -246,10 +246,27 @@ properties: - name: 'password' type: String description: The password to authenticate. - required: true - write_only: true + exactly_one_of: + - 'password' + - 'password_wo' sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' + - name: 'passwordWo' + type: String + description: The password to authenticate. + exactly_one_of: + - 'passwordWo' + - 'password' + required_with: + - 'http_check.0.auth_info.0.password_wo_version' + write_only: true + - name: 'passwordWoVersion' + type: String + immutable: true + ignore_read: true + description: The password write-only version. + required_with: + - 'http_check.0.auth_info.0.password_wo' - name: 'username' type: String description: The username to authenticate. diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index ac840f29e772..d3e0335ee2bd 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -160,6 +160,22 @@ properties: type: String description: The secret data. Must be no larger than 64KiB. api_name: data + conflicts: + - 'secretDataWo' immutable: true sensitive: true + - name: 'secretDataWo' + type: String + description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + api_name: data + required_with: + - 'SecretDataWoVersion' + conflicts: + - 'payload.0.secretData' write_only: true + - name: 'SecretDataWoVersion' + type: Integer + default_value: 0 + url_param_only: true + description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + immutable: true diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index 92387f432fff..cb0fbb7a76a8 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -18,7 +18,7 @@ {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - {{- if or (and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler)) $.ClientSide }} + {{- if and $.IgnoreRead (not $.ResourceMetadata.IsTgcCompiler) }} return d.Get("{{ $.TerraformLineage }}") {{- else if $.IsA "NestedObject" }} if v == nil { From 4cf8df05be6723917d4c13a29c7808037654c54c Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Thu, 21 Aug 2025 15:32:38 -0700 Subject: [PATCH 055/201] tgc-revival: add google_apigee_instance (#14924) Co-authored-by: Thomas Rodgers --- mmv1/products/apigee/Instance.yaml | 1 + .../ancestrymanager/ancestrymanager.go | 8 ++++- .../tgc_next/test/assert_test_files.go | 34 +++++++++++++------ 3 files changed, 31 insertions(+), 12 deletions(-) diff --git a/mmv1/products/apigee/Instance.yaml b/mmv1/products/apigee/Instance.yaml index b51712e9f528..219d6223d8f9 100644 --- a/mmv1/products/apigee/Instance.yaml +++ b/mmv1/products/apigee/Instance.yaml @@ -47,6 +47,7 @@ custom_code: error_retry_predicates: - 'transport_tpg.IsApigeeRetryableError' exclude_sweeper: true +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'apigee_instance_basic' vars: diff --git a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go index 1d70d9619bff..1c408a7ab36f 100644 --- a/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go +++ b/mmv1/third_party/tgc_next/pkg/tfplan2cai/ancestrymanager/ancestrymanager.go @@ -196,7 +196,13 @@ func (m *manager) fetchAncestors(config *transport_tpg.Config, tfData tpgresourc return []string{unknownOrg}, nil } key = projectKey - + case "apigee.googleapis.com/Instance": + // Project is used to find the ancestors. + // org_id in resource `google_apigee_instance` is the apigee org id under a project. + if projectKey == "" { + return []string{unknownOrg}, nil + } + key = projectKey default: switch { case orgOK: diff --git a/mmv1/third_party/tgc_next/test/assert_test_files.go b/mmv1/third_party/tgc_next/test/assert_test_files.go index fd33ed62dc1b..9c278dc867b4 100644 --- a/mmv1/third_party/tgc_next/test/assert_test_files.go +++ b/mmv1/third_party/tgc_next/test/assert_test_files.go @@ -188,8 +188,8 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData // Compare roundtrip_config with export_config to ensure they are identical. // Convert the export config to roundtrip assets and then convert the roundtrip assets back to roundtrip config - ancestryCache := getAncestryCache(assets) - roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, logger, ignoredAssetFields) + ancestryCache, defaultProject := getAncestryCache(assets) + roundtripAssets, roundtripConfigData, err := getRoundtripConfig(t, testName, tfDir, ancestryCache, defaultProject, logger, ignoredAssetFields) if err != nil { return fmt.Errorf("error when converting the round-trip config: %#v", err) } @@ -249,9 +249,10 @@ func testSingleResource(t *testing.T, testName string, testData ResourceTestData return nil } -// Gets the ancestry cache for tfplan2cai conversion -func getAncestryCache(assets []caiasset.Asset) map[string]string { +// Gets the ancestry cache for tfplan2cai conversion and the default project +func getAncestryCache(assets []caiasset.Asset) (map[string]string, string) { ancestryCache := make(map[string]string, 0) + defaultProject := "" for _, asset := range assets { ancestors := asset.Ancestors @@ -268,18 +269,29 @@ func getAncestryCache(assets []caiasset.Asset) map[string]string { if _, ok := ancestryCache[ancestors[0]]; !ok { ancestryCache[ancestors[0]] = path + if defaultProject == "" { + if s, hasPrefix := strings.CutPrefix(ancestors[0], "projects/"); hasPrefix { + defaultProject = s + } + } } project := utils.ParseFieldValue(asset.Name, "projects") - projectKey := fmt.Sprintf("projects/%s", project) - if strings.HasPrefix(ancestors[0], "projects") && ancestors[0] != projectKey { - if _, ok := ancestryCache[projectKey]; !ok { - ancestryCache[projectKey] = path + if project != "" { + projectKey := fmt.Sprintf("projects/%s", project) + if strings.HasPrefix(ancestors[0], "projects") && ancestors[0] != projectKey { + if _, ok := ancestryCache[projectKey]; !ok { + ancestryCache[projectKey] = path + } + } + + if defaultProject == "" { + defaultProject = project } } } } - return ancestryCache + return ancestryCache, defaultProject } // Compares HCL and finds all of the keys in map1 that are not in map2 @@ -331,7 +343,7 @@ func isIgnored(key string, ignoredFields map[string]struct{}) bool { } // Converts a tfplan to CAI asset, and then converts the CAI asset into HCL -func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, logger *zap.Logger, ignoredAssetFields []string) ([]caiasset.Asset, []byte, error) { +func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCache map[string]string, defaultProject string, logger *zap.Logger, ignoredAssetFields []string) ([]caiasset.Asset, []byte, error) { fileName := fmt.Sprintf("%s_export", testName) // Run terraform init and terraform apply to generate tfplan.json files @@ -348,7 +360,7 @@ func getRoundtripConfig(t *testing.T, testName string, tfDir string, ancestryCac roundtripAssets, err := tfplan2cai.Convert(ctx, jsonPlan, &tfplan2cai.Options{ ErrorLogger: logger, Offline: true, - DefaultProject: "ci-test-project-nightly-beta", + DefaultProject: defaultProject, DefaultRegion: "", DefaultZone: "", UserAgent: "", From 7b15bdcb5ac006445419f2ec4a7fcd4b7291fdd6 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 21 Aug 2025 15:54:55 -0700 Subject: [PATCH 056/201] Standardized required_with behavior for write-only fields (#14941) --- .../products/bigquerydatatransfer/Config.yaml | 28 ++++++++++--------- .../monitoring/UptimeCheckConfig.yaml | 8 +++--- .../products/secretmanager/SecretVersion.yaml | 10 ++++--- .../services/sql/resource_sql_user.go | 1 + .../services/sql/resource_sql_user_test.go | 3 +- .../guides/version_7_upgrade.html.markdown | 18 ++++++++++++ 6 files changed, 46 insertions(+), 22 deletions(-) diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index df88222fc7b7..138d8b327e66 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -222,30 +222,32 @@ properties: to a different credential configuration in the config will require an apply to update state. url_param_only: true properties: - - name: 'secretAccessKeyWoVersion' - type: Integer - url_param_only: true - required_with: - - 'sensitive_params.0.secretAccessKeyWo' - description: | - The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) - name: 'secretAccessKey' type: String description: | The Secret Access Key of the AWS account transferring data from. sensitive: true at_least_one_of: - - 'sensitive_params.0.secretAccessKey' - - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secret_access_key' + - 'sensitive_params.0.secret_access_key_wo' conflicts: - - 'sensitive_params.0.secretAccessKeyWo' + - 'sensitive_params.0.secret_access_key_wo' - name: 'secretAccessKeyWo' # Wo is convention for write-only properties type: String description: | The Secret Access Key of the AWS account transferring data from. write_only: true at_least_one_of: - - 'sensitive_params.0.secretAccessKeyWo' - - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secret_access_key_wo' + - 'sensitive_params.0.secret_access_key' conflicts: - - 'sensitive_params.0.secretAccessKey' + - 'sensitive_params.0.secret_access_key' + required_with: + - 'sensitive_params.0.secret_access_key_wo_version' + - name: 'secretAccessKeyWoVersion' + type: Integer + url_param_only: true + required_with: + - 'sensitive_params.0.secret_access_key_wo' + description: | + The version of the sensitive params - used to trigger updates of the write-only params. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 67f7fdd0e791..7f6bf290d24e 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -247,16 +247,16 @@ properties: type: String description: The password to authenticate. exactly_one_of: - - 'password' - - 'password_wo' + - 'http_check.0.auth_info.0.password_wo' + - 'http_check.0.auth_info.0.password' sensitive: true custom_flatten: 'templates/terraform/custom_flatten/uptime_check_http_password.tmpl' - name: 'passwordWo' type: String description: The password to authenticate. exactly_one_of: - - 'passwordWo' - - 'password' + - 'http_check.0.auth_info.0.password_wo' + - 'http_check.0.auth_info.0.password' required_with: - 'http_check.0.auth_info.0.password_wo_version' write_only: true diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index d3e0335ee2bd..ab26d83a4bda 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -161,7 +161,7 @@ properties: description: The secret data. Must be no larger than 64KiB. api_name: data conflicts: - - 'secretDataWo' + - 'payload.0.secret_data_wo' immutable: true sensitive: true - name: 'secretDataWo' @@ -169,13 +169,15 @@ properties: description: The secret data. Must be no larger than 64KiB. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) api_name: data required_with: - - 'SecretDataWoVersion' + - 'payload.0.secret_data_wo_version' conflicts: - - 'payload.0.secretData' + - 'payload.0.secret_data' write_only: true - - name: 'SecretDataWoVersion' + - name: 'secretDataWoVersion' type: Integer default_value: 0 url_param_only: true description: Triggers update of secret data write-only. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) immutable: true + required_with: + - 'payload.0.secret_data_wo' diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user.go b/mmv1/third_party/terraform/services/sql/resource_sql_user.go index 5fec5c13ceb2..7273955d0a40 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user.go @@ -103,6 +103,7 @@ func ResourceSqlUser() *schema.Resource { Optional: true, WriteOnly: true, ConflictsWith: []string{"password"}, + RequiredWith: []string{"password_wo_version"}, Description: `The password for the user. Can be updated. For Postgres instances this is a Required field, unless type is set to either CLOUD_IAM_USER or CLOUD_IAM_SERVICE_ACCOUNT.`, }, diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go b/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go index b392d8ffd6b7..c4e5cc4404b7 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go +++ b/mmv1/third_party/terraform/services/sql/resource_sql_user_test.go @@ -439,6 +439,7 @@ resource "google_sql_user" "user1" { instance = google_sql_database_instance.instance.name host = "gmail.com" password_wo = "%s" + password_wo_version = 1 } `, instance, password) } @@ -460,7 +461,7 @@ resource "google_sql_user" "user1" { instance = google_sql_database_instance.instance.name host = "gmail.com" password_wo = "%s" - password_wo_version = 1 + password_wo_version = 2 } `, instance, password) } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 19b0f73ef806..fdd64c56c4ca 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -217,6 +217,12 @@ Remove `description` from your configuration after upgrade. Remove `post_startup_script_config` from your configuration after upgrade. +## Resource: `google_monitoring_uptime_check_config` + +### Exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` must be set + +At least one must be set, and setting both would make it unclear which was being used. + ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required @@ -249,6 +255,18 @@ Remove `service_config.service` from your configuration after upgrade. Remove `template.containers.depends_on` from your configuration after upgrade. +## Resource: `google_secret_manager_secret_version` + +### `secret_data_wo` and `secret_data_wo_version` must be set together + +This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. + +## Resource: `google_sql_user` + +### `password_wo_version` is now required when `password_wo` is set + +This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. + ## Resource: `google_vertex_ai_endpoint` ### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. From c42046fafa04e10c408ec3f84742f1c4fef6d06b Mon Sep 17 00:00:00 2001 From: Scott Suarez Date: Thu, 21 Aug 2025 15:57:43 -0700 Subject: [PATCH 057/201] fix other scenario for node_pool not exist (#14943) --- .../container/resource_container_node_pool.go.tmpl | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index f324f5dc45a8..1356a211a42e 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -968,11 +968,6 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( clusterNodePoolsGetCall.Header().Add("X-Goog-User-Project", nodePoolInfo.project) } _, err = clusterNodePoolsGetCall.Do() -{{- else }} - npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) - _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) -{{- end }} - if err != nil { if err = transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Container NodePool %s", name)); err == nil { return false, nil @@ -980,6 +975,15 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( // There was some other error in reading the resource return true, err } +{{- else }} + npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) + _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) + if err != nil { + log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) + d.SetId("") + return false, nil + } +{{- end }} return true, nil } From 9ab437d1fd15c51d062e395d7c65ee9c0f14c4a4 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 21 Aug 2025 16:04:20 -0700 Subject: [PATCH 058/201] Improve test data ingestion (#14944) --- .../cmd/collect_nightly_test_status.go | 47 ++++++++++++------- .../cmd/create_test_failure_ticket.go | 6 +-- 2 files changed, 34 insertions(+), 19 deletions(-) diff --git a/.ci/magician/cmd/collect_nightly_test_status.go b/.ci/magician/cmd/collect_nightly_test_status.go index 31de8b6373f1..ad20ddf56213 100644 --- a/.ci/magician/cmd/collect_nightly_test_status.go +++ b/.ci/magician/cmd/collect_nightly_test_status.go @@ -30,7 +30,8 @@ import ( ) const ( - NightlyDataBucket = "nightly-test-data" + nightlyDataBucket = "nightly-test-data" + tcTimeFormat = "20060102T150405Z0700" ) var cntsRequiredEnvironmentVariables = [...]string{ @@ -38,16 +39,16 @@ var cntsRequiredEnvironmentVariables = [...]string{ } type TestInfo struct { - Name string `json:"name"` - Status string `json:"status"` - Service string `json:"service"` - ErrorMessage string `json:"error_message"` - LogLink string `json"log_link` - ProviderVersion string `json:"provider_version"` - QueuedDate string `json:"queuedDate"` - StartDate string `json:"startDate"` - FinishDate string `json:"finishDate"` - Duration int `json:"duration"` + Name string `json:"name"` + Status string `json:"status"` + Service string `json:"service"` + ErrorMessage string `json:"error_message"` + LogLink string `json:"log_link"` + ProviderVersion string `json:"provider_version"` + QueuedDate time.Time `json:"queued_date"` + StartDate time.Time `json:"start_date"` + FinishDate time.Time `json:"finish_date"` + Duration int `json:"duration"` } // collectNightlyTestStatusCmd represents the collectNightlyTestStatus command @@ -168,17 +169,31 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto if testResult.Status == "FAILURE" || testResult.Status == "UNKNOWN" { errorMessage = convertErrorMessage(testResult.ErrorMessage) } + + queuedTime, err := time.Parse(tcTimeFormat, build.QueuedDate) + if err != nil { + return fmt.Errorf("failed to parse QueuedDate: %v", err) + } + startTime, err := time.Parse(tcTimeFormat, build.StartDate) + if err != nil { + return fmt.Errorf("failed to parse StartDate: %v", err) + } + finishTime, err := time.Parse(tcTimeFormat, build.FinishDate) + if err != nil { + return fmt.Errorf("failed to parse FinishDate: %v", err) + } + testInfoList = append(testInfoList, TestInfo{ Name: testResult.Name, Status: testResult.Status, Service: serviceName, ErrorMessage: errorMessage, LogLink: logLink, - ProviderVersion: pVersion.String(), + ProviderVersion: strings.ToUpper(pVersion.String()), Duration: testResult.Duration, - QueuedDate: build.QueuedDate, - StartDate: build.StartDate, - FinishDate: build.FinishDate, + QueuedDate: queuedTime, + StartDate: startTime, + FinishDate: finishTime, }) } } @@ -193,7 +208,7 @@ func createTestReport(pVersion provider.Version, tc TeamcityClient, gcs Cloudsto // Upload test status data file to gcs bucket objectName := fmt.Sprintf("test-metadata/%s/%s", pVersion.String(), testStatusFileName) - err = gcs.WriteToGCSBucket(NightlyDataBucket, objectName, testStatusFileName) + err = gcs.WriteToGCSBucket(nightlyDataBucket, objectName, testStatusFileName) if err != nil { return err } diff --git a/.ci/magician/cmd/create_test_failure_ticket.go b/.ci/magician/cmd/create_test_failure_ticket.go index 41b4716794cb..ab60d1aa7ed0 100644 --- a/.ci/magician/cmd/create_test_failure_ticket.go +++ b/.ci/magician/cmd/create_test_failure_ticket.go @@ -268,7 +268,7 @@ func getTestInfoList(pVersion provider.Version, date time.Time, gcs Cloudstorage objectName := fmt.Sprintf("test-metadata/%s/%s", pVersion.String(), testStatusFileName) var testInfoList []TestInfo - err := gcs.DownloadFile(NightlyDataBucket, objectName, testStatusFileName) + err := gcs.DownloadFile(nightlyDataBucket, objectName, testStatusFileName) if err != nil { return testInfoList, err } @@ -506,13 +506,13 @@ func storeErrorMessage(pVersion provider.Version, gcs CloudstorageClient, errorM // upload file to GCS objectName := fmt.Sprintf("test-errors/%s/%s/%s", pVersion.String(), date, fileName) - err = gcs.WriteToGCSBucket(NightlyDataBucket, objectName, fileName) + err = gcs.WriteToGCSBucket(nightlyDataBucket, objectName, fileName) if err != nil { return "", fmt.Errorf("failed to upload error message file %s to GCS bucket: %w", objectName, err) } // compute object view path - link := fmt.Sprintf("https://storage.cloud.google.com/%s/%s", NightlyDataBucket, objectName) + link := fmt.Sprintf("https://storage.cloud.google.com/%s/%s", nightlyDataBucket, objectName) return link, nil } From 87b4ef0a14e7b29a18b81105f3ccd7e2d894a278 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Fri, 22 Aug 2025 07:04:00 -0700 Subject: [PATCH 059/201] allows difftest tests to be skipped if they fail, not pass (#14915) --- .../terraform/scripts/teamcitytestscripts/teamcity.go | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go b/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go index 4e169e4ceffd..93675c2e5384 100644 --- a/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go +++ b/mmv1/third_party/terraform/scripts/teamcitytestscripts/teamcity.go @@ -22,7 +22,8 @@ const ( ) var ( - end = regexp.MustCompile(`--- (PASS|SKIP|FAIL):\s+([a-zA-Z_]\S*) \(([\.\d]+)\)`) + // Looks for the final status line, accommodating both simple and full summaries. + end = regexp.MustCompile(`\n(PASS|SKIP|FAIL)(?:[\t\s]+(.*)\s+([0-9\.]+[a-z]+))?\s*$`) diff = regexp.MustCompile(`\[Diff\] (.*)`) paniced = regexp.MustCompile(`panic:\s+(.*)\s+\[recovered\]\n`) //suite = regexp.MustCompile("^(ok|FAIL)\\s+([^\\s]+)\\s+([\\.\\d]+)s") @@ -86,7 +87,8 @@ func (test *TeamCityTest) FormatTestOutput() string { } if test.Fail { - output.WriteString(fmt.Sprintf(TeamCityTestFailed, now, test.Name)) + // skip failures for diff tests + output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) return output.String() } @@ -107,8 +109,8 @@ func (test *TeamCityTest) FormatTestOutput() string { return output.String() } - // test passes if no diff, even if failure (failure artifacts will be in regular_failure_file.log) - output.WriteString(fmt.Sprintf(TeamCityTestFinished, now, test.Name)) + // instead of failing when something unexpected happens, we skip the test now + output.WriteString(fmt.Sprintf(TeamCityTestIgnored, now, test.Name)) return output.String() } From 1edf717f8b3f283b0f7f7e7c3277966599982d3a Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Fri, 22 Aug 2025 09:34:32 -0500 Subject: [PATCH 060/201] last merge conflict --- mmv1/products/securesourcemanager/Instance.yaml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index abb3c22885d8..b8078c6d244d 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -81,9 +81,7 @@ examples: 'deletion_policy': '"DELETE"' ignore_read_extra: - 'update_time' -<<<<<<< HEAD - 'deletion_policy' -======= - name: 'secure_source_manager_instance_private_trusted_cert' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' @@ -91,8 +89,8 @@ examples: instance_id: 'my-instance' ignore_read_extra: - 'update_time' + - 'deletion_policy' exclude_docs: true ->>>>>>> 87b4ef0a14e7b29a18b81105f3ccd7e2d894a278 - name: 'secure_source_manager_instance_private' primary_resource_id: 'default' primary_resource_name: 'fmt.Sprintf("tf-test-my-instance%s", context["random_suffix"])' From fd62c38302087df4545b6a3496df8aef38a9e02a Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Mon, 25 Aug 2025 15:45:56 -0500 Subject: [PATCH 061/201] Plugin Framework feature branch merge (#14977) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: drfaust92 Signed-off-by: Cezary Sobczak Signed-off-by: James Alseth Signed-off-by: Misha Efimov Co-authored-by: Nick Elliot Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Co-authored-by: paridhishah18 <166548459+paridhishah18@users.noreply.github.com> Co-authored-by: William Yardley Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: shantstepanian <17996546+shantstepanian@users.noreply.github.com> Co-authored-by: Zhenhua Li Co-authored-by: Yanwei Guo Co-authored-by: Scott Suarez Co-authored-by: skysarthak Co-authored-by: Sarthak Tandon Co-authored-by: Ramon Vermeulen Co-authored-by: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Co-authored-by: Stephen Lewis (Burrows) Co-authored-by: govardhanitallam Co-authored-by: xuebaoZ Co-authored-by: Michael Turgeman Co-authored-by: gurusai-voleti Co-authored-by: jkrish-c <31221535+jkrish-c@users.noreply.github.com> Co-authored-by: Wiktor Niesiobędzki Co-authored-by: dorianverna Co-authored-by: Andras Kerekes Co-authored-by: Raj Anand <88097156+raazanand@users.noreply.github.com> Co-authored-by: ML Co-authored-by: Marek Lipert Co-authored-by: James Cherry Co-authored-by: Hengfeng Li Co-authored-by: Jeremie Stordeur Co-authored-by: abhilashsamgoogle Co-authored-by: ArtoriaRen Co-authored-by: Nandini Agrawal Co-authored-by: Aiden Grossman Co-authored-by: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Co-authored-by: ma-g-22 <123424520+ma-g-22@users.noreply.github.com> Co-authored-by: Rajesh Guptha Co-authored-by: Parker DeWilde Co-authored-by: Thomas Rodgers Co-authored-by: sachin purohit Co-authored-by: karolgorc Co-authored-by: Rachel Thornton Co-authored-by: Alex Morozov Co-authored-by: Alex Morozov Co-authored-by: Lingkai Shen Co-authored-by: Ilia Lazebnik Co-authored-by: Betto Cerrillos <32439055+Berro321@users.noreply.github.com> Co-authored-by: Riley Karson Co-authored-by: Ron Gal <125445217+ron-gal@users.noreply.github.com> Co-authored-by: Akshat Jindal <67505646+akshat-jindal-nit@users.noreply.github.com> Co-authored-by: Swamita Gupta <55314843+swamitagupta@users.noreply.github.com> Co-authored-by: Bob "Wombat" Hogg Co-authored-by: hao-nan-li <100219545+hao-nan-li@users.noreply.github.com> Co-authored-by: NA2047 <12290725+NA2047@users.noreply.github.com> Co-authored-by: sahil-mahajan-google Co-authored-by: Arpit Gupta Co-authored-by: ahmed-laiq Co-authored-by: stevenyang72 Co-authored-by: Sam Levenick Co-authored-by: Haoting.C <34197666+plus-1s@users.noreply.github.com> Co-authored-by: Daniel Dubnikov Co-authored-by: Pawel Jasinski Co-authored-by: Sachin_R Co-authored-by: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Co-authored-by: wj-chen Co-authored-by: Kian Jones <11655409+kianjones9@users.noreply.github.com> Co-authored-by: uaditya70 Co-authored-by: pujawadare Co-authored-by: Sachin_R Co-authored-by: Jaylon McShan Co-authored-by: tulika-aakriti Co-authored-by: anthonyrtong Co-authored-by: Chris Hawk Co-authored-by: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Co-authored-by: Shrishty Chandra Co-authored-by: Sharan Teja M Co-authored-by: James Alseth Co-authored-by: stevenyang72 Co-authored-by: oferhandel-google Co-authored-by: Jatin Miglani Co-authored-by: translucens Co-authored-by: Sing Co-authored-by: Ronson Xaviour <50081163+ronsonx@users.noreply.github.com> Co-authored-by: Ronson Xaviour Co-authored-by: Iris Chen <10179943+iyabchen@users.noreply.github.com> Co-authored-by: Or Sela Co-authored-by: Samir Ribeiro <42391123+Samir-Cit@users.noreply.github.com> Co-authored-by: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Co-authored-by: kigesui Co-authored-by: Meng Yang Co-authored-by: Ashwin G Co-authored-by: Allison Fisher Co-authored-by: mihhalj Co-authored-by: Guy Bidkar <5646214+gbidkar@users.noreply.github.com> Co-authored-by: Dawid212 Co-authored-by: Michael Lopez Co-authored-by: kautikdk <144651627+kautikdk@users.noreply.github.com> Co-authored-by: harshithpatte-g Co-authored-by: ML Co-authored-by: James Alseth Co-authored-by: Madhura Phadnis Co-authored-by: YashTayal04 <47032845+YashTayal04@users.noreply.github.com> Co-authored-by: Misha Efimov Co-authored-by: Aiden Grossman Co-authored-by: MatthewVu-dev Co-authored-by: Madhu Suraj Co-authored-by: Matheus Guilherme Souza Aleixo <82680416+matheusaleixo-cit@users.noreply.github.com> Co-authored-by: Jun Luo Co-authored-by: Tommy Reddad --- .ci/magician/cmd/test_terraform_vcr.go | 1 - mmv1/api/resource.go | 3 + mmv1/api/type.go | 48 ++ mmv1/products/datafusion/Instance.yaml | 1 + mmv1/provider/template_data.go | 9 + mmv1/provider/terraform.go | 9 +- .../fw_datafusion_instance_update.go.tmpl | 35 + mmv1/templates/terraform/resource_fw.go.tmpl | 764 ++++++++++++++++++ .../terraform/schema_property_fw.go.tmpl | 52 ++ .../terraform/update_mask_fw.go.tmpl | 27 + .../terraform/acctest/vcr_utils.go | 12 + .../fwprovider/framework_provider.go.tmpl | 8 +- .../terraform/fwresource/field_helpers.go | 23 +- .../terraform/fwresource/framework_import.go | 192 +++++ .../fwresource/framework_import_test.go | 183 +++++ .../terraform/fwtransport/framework_utils.go | 321 +++++++- .../fwvalidators/framework_validators.go | 80 ++ .../fwvalidators/framework_validators_test.go | 138 ++++ mmv1/third_party/terraform/go.mod | 1 + mmv1/third_party/terraform/go.sum | 8 + .../provider/provider_mmv1_resources.go.tmpl | 2 - ..._apigee_keystores_aliases_key_cert_file.go | 533 ++++++++++++ ..._apigee_keystores_aliases_key_cert_file.go | 692 ---------------- ...data_source_google_compute_network.go.tmpl | 187 +++++ ...data_source_google_compute_network_test.go | 86 ++ .../terraform/services/compute/image.go | 1 - ...ce_dataflow_flex_template_job_test.go.tmpl | 2 +- .../resource_dns_managed_zone_test.go.tmpl | 2 +- ...google_firebase_android_app_config.go.tmpl | 2 +- ...e_google_firebase_apple_app_config.go.tmpl | 2 +- ...rce_google_firebase_web_app_config.go.tmpl | 2 +- .../fw_resource_pubsub_lite_reservation.go | 383 +++++++++ ...w_resource_pubsub_lite_reservation_test.go | 56 ++ .../services/sql/fw_resource_sql_user.go | 507 ++++++++++++ .../services/sql/fw_resource_sql_user_test.go | 90 +++ .../fw_resource_storage_notification.go | 325 ++++++++ ...fw_storage_notification_state_upgraders.go | 100 +++ .../storage/resource_storage_notification.go | 196 ----- .../resource_storage_notification_test.go | 16 +- ...stores_aliases_key_cert_file.html.markdown | 2 +- 40 files changed, 4190 insertions(+), 911 deletions(-) create mode 100644 mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl create mode 100644 mmv1/templates/terraform/resource_fw.go.tmpl create mode 100644 mmv1/templates/terraform/schema_property_fw.go.tmpl create mode 100644 mmv1/templates/terraform/update_mask_fw.go.tmpl create mode 100644 mmv1/third_party/terraform/fwresource/framework_import.go create mode 100644 mmv1/third_party/terraform/fwresource/framework_import_test.go create mode 100644 mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go delete mode 100644 mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go create mode 100644 mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go create mode 100644 mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go create mode 100644 mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go create mode 100644 mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go create mode 100644 mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go create mode 100644 mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go create mode 100644 mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go delete mode 100644 mmv1/third_party/terraform/services/storage/resource_storage_notification.go diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 74f7673744d7..1cca9513652e 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -237,7 +237,6 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, } notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) - postReplayData := postReplay{ RunFullVCR: runFullVCR, AffectedServices: sort.StringSlice(servicesArr), diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 194ae8ede6ad..386487ac696f 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -308,6 +308,9 @@ type Resource struct { // control if a resource is continuously generated from public OpenAPI docs AutogenStatus string `yaml:"autogen_status"` + // If true, this resource generates with the new plugin framework resource template + FrameworkResource bool `yaml:"plugin_framework,omitempty"` + // The three groups of []*Type fields are expected to be strictly ordered within a yaml file // in the sequence of Virtual Fields -> Parameters -> Properties diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 2ab006fcda8f..ca664c774a8c 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -551,6 +551,15 @@ func (t Type) ResourceType() string { return path[len(path)-1] } +func (t Type) FWResourceType() string { + r := t.ResourceRef() + if r == nil { + return "" + } + path := strings.Split(r.BaseUrl, "/") + return path[len(path)-1] +} + // TODO rewrite: validation // func (t *Type) check_default_value_property() { // return if @default_value.nil? @@ -821,6 +830,45 @@ func (t Type) TFType(s string) string { return "schema.TypeString" } +func (t Type) GetFWType() string { + switch t.Type { + case "Boolean": + return "Bool" + case "Double": + return "Float64" + case "Integer": + return "Int64" + case "String": + return "String" + case "Time": + return "String" + case "Enum": + return "String" + case "ResourceRef": + return "String" + case "NestedObject": + return "Nested" + case "Array": + return "List" + case "KeyValuePairs": + return "Map" + case "KeyValueLabels": + return "Map" + case "KeyValueTerraformLabels": + return "Map" + case "KeyValueEffectiveLabels": + return "Map" + case "KeyValueAnnotations": + return "Map" + case "Map": + return "Map" + case "Fingerprint": + return "String" + } + + return "String" +} + // TODO rewrite: validation // // Represents an enum, and store is valid values // class Enum < Primitive diff --git a/mmv1/products/datafusion/Instance.yaml b/mmv1/products/datafusion/Instance.yaml index 665c7e1f7a31..2809e987d1ed 100644 --- a/mmv1/products/datafusion/Instance.yaml +++ b/mmv1/products/datafusion/Instance.yaml @@ -13,6 +13,7 @@ --- name: 'Instance' +# plugin_framework: true description: | Represents a Data Fusion instance. references: diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 645da0d1463f..6964d5de5f9b 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -85,6 +85,15 @@ func (td *TemplateData) GenerateResourceFile(filePath string, resource api.Resou td.GenerateFile(filePath, templatePath, resource, true, templates...) } +func (td *TemplateData) GenerateFWResourceFile(filePath string, resource api.Resource) { + templatePath := "templates/terraform/resource_fw.go.tmpl" + templates := []string{ + templatePath, + "templates/terraform/schema_property_fw.go.tmpl", + } + td.GenerateFile(filePath, templatePath, resource, true, templates...) +} + func (td *TemplateData) GenerateMetadataFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/metadata.yaml.tmpl" templates := []string{ diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 17abcb4d4def..3e96ce7f58ea 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -128,8 +128,13 @@ func (t *Terraform) GenerateResource(object api.Resource, templateData TemplateD if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) } - targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.ResourceGoFilename(object))) - templateData.GenerateResourceFile(targetFilePath, object) + if object.FrameworkResource { + targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_fw_%s.go", t.ResourceGoFilename(object))) + templateData.GenerateFWResourceFile(targetFilePath, object) + } else { + targetFilePath := path.Join(targetFolder, fmt.Sprintf("resource_%s.go", t.ResourceGoFilename(object))) + templateData.GenerateResourceFile(targetFilePath, object) + } } if generateDocs { diff --git a/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl b/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl new file mode 100644 index 000000000000..8b3b701a19b7 --- /dev/null +++ b/mmv1/templates/terraform/pre_update/fw_datafusion_instance_update.go.tmpl @@ -0,0 +1,35 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +updateMask := []string{} + +if !plan.EnableStackdriverLogging.Equal(state.EnableStackdriverLogging) { + updateMask = append(updateMask, "enableStackdriverLogging") +} + +if !plan.EnableStackdriverMonitoring.Equal(state.EnableStackdriverMonitoring) { + updateMask = append(updateMask, "enableStackdriverMonitoring") +} + +if !plan.EnableRbac.Equal(state.EnableRbac) { + updateMask = append(updateMask, "enableRbac") +} + + +// updateMask is a URL parameter but not present in the schema, so ReplaceVars +// won't set it + +url, err := transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) +if err != nil { + resp.Diagnostics.AddError("Error, failure building update mask query parameters in {{ $.Name -}}", err.Error()) + return +} \ No newline at end of file diff --git a/mmv1/templates/terraform/resource_fw.go.tmpl b/mmv1/templates/terraform/resource_fw.go.tmpl new file mode 100644 index 000000000000..c73454a0e4b3 --- /dev/null +++ b/mmv1/templates/terraform/resource_fw.go.tmpl @@ -0,0 +1,764 @@ +{{/* The license inside this block applies to this file + Copyright 2025 Google LLC. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. */ -}} +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +{{/*{{$.CodeHeader TemplatePath}}*/}} + +package {{ lower $.ProductMetadata.Name }} + +import ( + + "fmt" + "log" + "net/http" + "reflect" +{{- if $.SupportsIndirectUserProjectOverride }} + "regexp" +{{- end }} +{{- if or (and (not $.Immutable) ($.UpdateMask)) $.LegacyLongFormProject }} + "strings" +{{- end }} + "time" + +{{/* # We list all the v2 imports here, because we run 'goimports' to guess the correct */}} +{{/* # set of imports, which will never guess the major version correctly. */}} +{{/* + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/structure" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" + */}} + "github.com/hashicorp/go-cty/cty" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "{{ $.ImportPath }}/fwmodels" + "{{ $.ImportPath }}/fwresource" + "{{ $.ImportPath }}/fwtransport" + + "{{ $.ImportPath }}/tpgresource" + transport_tpg "{{ $.ImportPath }}/transport" + "{{ $.ImportPath }}/verify" + +{{ if $.FlattenedProperties }} + "google.golang.org/api/googleapi" +{{- end}} +) + +{{if $.CustomCode.Constants -}} + {{- $.CustomTemplate $.CustomCode.Constants true -}} +{{- end}} + +var ( + _ resource.Resource = &{{$.ResourceName}}FWResource{} + _ resource.ResourceWithConfigure = &{{$.ResourceName}}FWResource{} +) + +func New{{$.ResourceName}}FWResource() resource.Resource { + return &{{$.ResourceName}}FWResource{} +} + +type {{$.ResourceName}}FWResource struct { + {{/*client *sqladmin.Service*/}} + providerConfig *transport_tpg.Config +} + +type {{$.ResourceName}}FWModel struct { + {{- range $prop := $.OrderProperties $.AllUserProperties }} + {{camelize $prop.Name "upper"}} types.{{$prop.GetFWType}} `tfsdk:"{{underscore $prop.Name}}"` + {{- end }} + {{ if $.HasProject -}} + Project types.String `tfsdk:"project"` + {{- end }} + + Id types.String `tfsdk:"id"`{{/* TODO should this be gated behind a greenfield/brownfield flag? */}} + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +// Metadata returns the resource type name. +func (d *{{$.ResourceName}}FWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fw_{{ underscore $.ResourceName}}" +} + +func (r *{{$.ResourceName}}FWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + {{/* TODO non-client equivalent? */}} + {{/* + r.client = p.NewSqlAdminClient(p.UserAgent) + if resp.Diagnostics.HasError() { + return + }*/}} + r.providerConfig = p +} + +func (d *{{$.ResourceName}}FWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "A resource to represent a SQL User object.", + + Attributes: map[string]schema.Attribute{ +{{- range $prop := $.OrderProperties $.AllUserProperties }} + {{template "SchemaFieldsFW" $prop -}} +{{- end }} +{{- range $prop := $.VirtualFields }} + {{template "SchemaFieldsFW" $prop -}} +{{- end }} +{{- if $.CustomCode.ExtraSchemaEntry }} + {{ $.CustomTemplate $.CustomCode.ExtraSchemaEntry false -}} +{{- end}} +{{ if $.HasProject -}} + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, +{{- end}} +{{- if $.HasSelfLink }} + "self_link": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, +{{- end}} + // This is included for backwards compatibility with the original, SDK-implemented resource. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + } +} + +func (r *{{$.ResourceName}}FWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel +{{ if $.CustomCode.CustomCreate -}} + {{ $.CustomTemplate $.CustomCode.CustomCreate false -}} +{{ else -}} + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var project, billingProject types.String +{{ if $.HasProject -}} + project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{ if $.LegacyLongFormProject -}} + billingProject = strings.TrimPrefix(project, "projects/") +{{ else -}} + billingProject = project +{{- end }} +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + +{{- range $prop := $.OrderProperties $.AllUserProperties }} + {{$prop.ApiName}}Prop, diags := data.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop +{{- end }} + + + {{/* TODO default timeouts */}} + createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.CreateUri}}") + if resp.Diagnostics.HasError() { + return + } + + log.Printf("[DEBUG] Creating new {{ $.Name -}}: %#v", obj) + + {{/* Nested Query block */}} + + headers := make(http.Header) +{{- if $.CustomCode.PreCreate }} + {{ $.CustomTemplate $.CustomCode.PreCreate false -}} +{{- end}} + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.CreateVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: createTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { +{{- if and ($.CustomCode.PostCreateFailure) (not $.GetAsync) -}} + resource{{ $.ResourceName -}}PostCreateFailure(d, meta) +{{- end}} + return + } + + tflog.Trace(ctx, "created {{$.Name}} resource") + + data.Id = types.StringValue("{{ $.IdFormat -}}") +{{ if $.HasProject -}} + data.Project = project +{{- end }} +{{ if $.HasRegion -}} + data.Region = region +{{- end }} +{{ if $.HasZone -}} + data.Zone = zone +{{- end }} + +{{if and $.GetAsync ($.GetAsync.Allow "Create") -}} +{{ if ($.GetAsync.IsA "OpAsync") -}} +{{ if and $.GetAsync.Result.ResourceInsideResponse $.HasPostCreateComputedFields -}} + // Use the resource in the operation response to populate + // identity fields and d.Id() before read + var opRes map[string]interface{} + err = {{ $.ClientNamePascal -}}OperationWaitTimeWithResponse( + r.providerConfig, res, &opRes, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Creating {{ $.Name -}}", userAgent, + createTimeout) + if err != nil { + {{/* Postcreate Failure */}} +{{- if not $.TaintResourceOnFailedCreate -}} + // The resource didn't actually create + resp.State.RemoveResource(ctx){{/* TODO verify this works */}} +{{ end -}} + resp.Diagnostics.AddError("Error, failure waiting to create {{ $.Name -}}", err.Error()) + return + } + + {{/* CustomCode.Decoder */}} + {{/* NestedQuery */}} + {{/* if $.HasPostCreateComputedFields */}} + {{/* This may have caused the ID to update - update it if so. */}} +{{ else -}}{{/* $.GetAsync.Result.ResourceInsideResponse */}} + err := {{ $.ClientNamePascal -}}OperationWaitTime( + r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Creating {{ $.Name -}}", userAgent, + createTimeout) + + if err != nil { + + {{/* Postcreate Failure */}} +{{- if not $.TaintResourceOnFailedCreate -}} + // The resource didn't actually create + resp.State.RemoveResource(ctx){{/* TODO verify this works */}} +{{ end -}} + resp.Diagnostics.AddError("Error, failure waiting to create {{ $.Name -}}", err.Error()) + return + } + +{{ end -}}{{/* $.GetAsync.Result.ResourceInsideResponse */}} +{{ end -}}{{/*if ($.GetAsync.IsA "OpAsync")*/}} +{{end -}}{{/*if and $.GetAsync ($.GetAsync.Allow "Create")*/}} +{{if $.CustomCode.PostCreate -}} + {{- $.CustomTemplate $.CustomCode.PostCreate false -}} +{{- end}} + + + // read back {{$.Name}} + r.{{$.ResourceName}}FWRefresh(ctx, &data, &resp.State, req, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) + + log.Printf("[DEBUG] Finished creating {{ $.Name }} %q: %#v", data.Id.ValueString(), res) +{{ end }} {{/* if CustomCreate */}} +} + + +func (r *{{$.ResourceName}}FWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "read {{$.Name}} resource") + + // read back {{$.Name}} + r.{{$.ResourceName}}FWRefresh(ctx, &data, &resp.State, req, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + + +func (r *{{$.ResourceName}}FWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var state, plan {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + var project, billingProject types.String +{{ if $.HasProject -}} + project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{ if $.LegacyLongFormProject -}} + billingProject = strings.TrimPrefix(project, "projects/") +{{ else -}} + billingProject = project +{{- end }} +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(plan.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(plan.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + +{{- range $prop := $.OrderProperties $.UpdateBodyProperties }} + if !plan.{{camelize $prop.Name "upper"}}.Equal(state.{{camelize $prop.Name "upper"}}) { + {{$prop.ApiName}}Prop, diags := plan.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop + } +{{- end }} + + {{/* TODO default timeouts */}} + updateTimeout, diags := data.Timeouts.Update(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.CreateUri}}") + if resp.Diagnostics.HasError() { + return + } + + log.Printf("[DEBUG] Updating {{ $.Name -}}: %#v", obj) + + headers := make(http.Header) + +{{- if $.UpdateMask }} +{{ $.CustomTemplate "templates/terraform/update_mask_fw.go.tmpl" false -}} +{{ end}} + +{{- if $.CustomCode.PreUpdate }} + {{ $.CustomTemplate $.CustomCode.PreUpdate false -}} +{{- end}} + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.UpdateVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: updateTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + +{{if and ($.GetAsync) ($.GetAsync.Allow "update") -}} +{{ if $.GetAsync.IsA "OpAsync" -}} + err = {{ $.ClientNamePascal -}}OperationWaitTime( + r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Updating {{ $.Name -}}", userAgent, + updateTimeout) + if err != nil { + resp.Diagnostics.AddError("Error, failure waiting to update {{ $.Name -}}", err.Error()) + return + } +{{- else if $.GetAsync.IsA "PollAsync" -}} + err = transport_tpg.PollingWaitTime(resource{{ $.ResourceName -}}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncExistence -}}, "Updating {{ $.Name -}}", d.Timeout(schema.TimeoutUpdate), {{ $.GetAsync.TargetOccurrences -}}) + if err != nil { +{{- if $.GetAsync.SuppressError -}} + log.Printf("[ERROR] Unable to confirm eventually consistent {{ $.Name -}} %q finished updating: %q", data.Id.ValueString(), err) +{{- else -}} + resp.Diagnostics.AddError("Error, failure polling for update in {{ $.Name -}}", err.Error()) + return +{{- end}} + } +{{- end}}{{/* if $.GetAsync.IsA "OpAsync" */}} +{{- end}}{{/* if and ($.GetAsync) ($.GetAsync.Allow "update") */}} + + tflog.Trace(ctx, "updated {{$.Name}} resource") + + // read back {{$.Name}} + r.{{$.ResourceName}}FWRefresh(ctx, &plan, &resp.State, req, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + + +func (r *{{$.ResourceName}}FWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data {{$.ResourceName}}FWModel + var metaData *fwmodels.ProviderMetaModel + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } +{{- if $.ExcludeDelete }} + log.Printf("[WARNING] {{ $.ProductMetadata.Name }}{{" "}}{{ $.Name }} resources" + + " cannot be deleted from Google Cloud. The resource %s will be removed from Terraform" + + " state, but will still be present on Google Cloud.", data.Id.ValueString()) + r.SetId("") + + return nil +{{- else }} + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + var project, billingProject types.String +{{ if $.HasProject -}} + project = fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{ if $.LegacyLongFormProject -}} + billingProject = strings.TrimPrefix(project, "projects/") +{{ else -}} + billingProject = project +{{- end }} +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.DeleteUri}}") + if resp.Diagnostics.HasError() { + return + } + +{{ if $.CustomCode.CustomDelete }} +{{ $.CustomTemplate $.CustomCode.CustomDelete false -}} +{{- else }} + headers := make(http.Header) + {{- if $.CustomCode.PreDelete }} + {{ $.CustomTemplate $.CustomCode.PreDelete false -}} + {{- end }} + + log.Printf("[DEBUG] Deleting {{ $.Name }} %q", data.Id.ValueString()) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.DeleteVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: deleteTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + diags.AddError(fmt.Sprintf("Error deleting {{ $.Name -}}: %s", data.Id.ValueString()), err.Error()) + return + } +{{if and $.GetAsync ($.GetAsync.Allow "Delete") -}} +{{ if $.GetAsync.IsA "PollAsync" }} + err := transport_tpg.PollingWaitTime(resource{{ $.ResourceName }}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncAbsence }}, "Deleting {{ $.Name }}", d.Timeout(schema.TimeoutCreate), {{ $.Async.TargetOccurrences }}) + if err != nil { +{{- if $.Async.SuppressError }} + log.Printf("[ERROR] Unable to confirm eventually consistent {{ $.Name -}} %q finished updating: %q", data.Id.ValueString(), err) +{{- else }} + resp.Diagnostics.AddError("Error, failure polling for delete in {{ $.Name -}}", err.Error()) + return +{{- end }} + } +{{- else }} + err := {{ $.ClientNamePascal }}OperationWaitTime( + r.providerConfig, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project.ValueString()){{ else }}project.ValueString(){{ end }}, {{ end -}} "Deleting {{ $.Name -}}", userAgent, + deleteTimeout) + + if err != nil { + resp.Diagnostics.AddError("Error, failure waiting to delete {{ $.Name -}}", err.Error()) + return + } +{{- end }}{{/* if $.GetAsync.IsA "PollAsync" */}} +{{- end }}{{/* if and $.GetAsync ($.GetAsync.Allow "Delete") */}} + +{{- if $.CustomCode.PostDelete }} + {{ $.CustomTemplate $.CustomCode.PostDelete false -}} +{{- end }} + + log.Printf("[DEBUG] Finished deleting {{ $.Name }} %q: %#v", data.Id.ValueString(), res) + + +{{- end }}{{/* if CustomCode.CustomDelete */}} +{{- end }}{{/* if ExcludeDelete */}} +} + +func (r *{{$.ResourceName}}FWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *{{$.ResourceName}}FWResource) {{$.ResourceName}}FWRefresh(ctx context.Context, data *{{$.ResourceName}}FWModel, state *tfsdk.State, req interface{}, diag *diag.Diagnostics) { + var metaData *fwmodels.ProviderMetaModel + //load default values +{{ if $.HasProject -}} + project := fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasRegion -}} + region := fwresource.GetRegionFramework(data.Region, types.StringValue(r.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} +{{ if $.HasZone -}} + zone := fwresource.GetZoneFramework(data.Zone, types.StringValue(r.providerConfig.Zone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } +{{- end }} + + var schemaDefaultVals fwtransport.DefaultVars +{{ if $.HasProject -}} + schemaDefaultVals.Project = project +{{- end }} +{{ if $.HasRegion -}} + schemaDefaultVals.Region = region +{{- end }} +{{ if $.HasZone -}} + schemaDefaultVals.Zone = zone +{{- end }} + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.SelfLinkUri}}{{$.ReadQueryParams}}") + if resp.Diagnostics.HasError() { + return + } + + log.Printf("[DEBUG] Refreshing {{ $.Name -}} data: %s", data.Id.ValueString()) + + headers := make(http.Header) +{{- if $.CustomCode.PreRead }} + {{ $.CustomTemplate $.CustomCode.PreRead false -}} +{{- end}} + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "{{ upper $.ReadVerb -}}", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Timeout: createTimeout, + Headers: headers, +{{- if $.ErrorRetryPredicates }} + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorRetryPredicates "," -}}{{"}"}}, +{{- end}} +{{- if $.ErrorAbortPredicates }} + ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{{"{"}}{{ join $.ErrorAbortPredicates "," -}}{{"}"}}, +{{- end}} + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("{{ $.ResourceName }} %s", data.Id.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + +{{ range $prop := $.OrderProperties $.AllUserProperties }} + data.{{camelize $prop.Name "upper"}} = res["{{ $prop.ApiName -}}"] + {{$prop.ApiName}}Prop, diags := data.{{camelize $prop.Name "upper"}}.To{{$prop.GetFWType}}Value(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +{{ end }} + + tflog.Trace(ctx, "refreshed {{$.Name}} resource data") + + +} \ No newline at end of file diff --git a/mmv1/templates/terraform/schema_property_fw.go.tmpl b/mmv1/templates/terraform/schema_property_fw.go.tmpl new file mode 100644 index 000000000000..03bcbb82e896 --- /dev/null +++ b/mmv1/templates/terraform/schema_property_fw.go.tmpl @@ -0,0 +1,52 @@ +{{/*# The license inside this block applies to this file. + # Copyright 2024 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +*/}} +{{- define "SchemaFieldsFW"}} +{{- if .FlattenObject -}} + {{- range $prop := .ResourceMetadata.OrderProperties .UserProperties -}} + {{ template "SchemaFieldsFW" $prop }} + {{ end -}} +{{- else -}} +"{{underscore .Name -}}": schema.{{.GetFWType}}Attribute{ +{{ if .DefaultFromApi -}} + Optional: true, + Computed: true, +{{ else if .Required -}} + Required: true, +{{ else if .Output -}} + Computed: true, +{{ else -}} + Optional: true, +{{ end -}} +{{ if .DeprecationMessage -}} + DeprecationMessage: "{{ .DeprecationMessage }}", +{{ end -}} +{{ if .Sensitive -}} + Sensitive: true, +{{ end -}} +{{ if or .IsForceNew .DefaultFromApi -}} + PlanModifiers: []planmodifier.{{.GetFWType}}{ + + {{ if .IsForceNew -}} + {{lower .GetFWType}}planmodifier.RequiresReplace(), + {{ end -}} + + {{ if .DefaultFromApi -}} + {{lower .GetFWType}}planmodifier.UseStateForUnknown(), + {{ end -}} + }, +{{ end -}} +}, +{{- end -}} +{{- end -}} \ No newline at end of file diff --git a/mmv1/templates/terraform/update_mask_fw.go.tmpl b/mmv1/templates/terraform/update_mask_fw.go.tmpl new file mode 100644 index 000000000000..8ad689215da2 --- /dev/null +++ b/mmv1/templates/terraform/update_mask_fw.go.tmpl @@ -0,0 +1,27 @@ +{{- /* + The license inside this block applies to this file + Copyright 2025 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +updateMask := []string{} +{{- $maskGroups := $.GetPropertyUpdateMasksGroups $.UpdateBodyProperties "" }} +{{- range $key := $.GetPropertyUpdateMasksGroupKeys $.UpdateBodyProperties }} + +if !plan.{{camelize $key "upper"}}.Equal(state.{{camelize $key "upper"}}) { + updateMask = append(updateMask, "{{ join (index $maskGroups $key) "\",\n\""}}") +} +{{- end }} +// updateMask is a URL parameter but not present in the schema, so ReplaceVars +// won't set it +url, err := transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) +if err != nil { + resp.Diagnostics.AddError("Error, failure building update mask query parameters in {{ $.Name -}}", err.Error()) + return +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/acctest/vcr_utils.go b/mmv1/third_party/terraform/acctest/vcr_utils.go index a726982a3a5d..117aca3b4169 100644 --- a/mmv1/third_party/terraform/acctest/vcr_utils.go +++ b/mmv1/third_party/terraform/acctest/vcr_utils.go @@ -22,6 +22,9 @@ import ( "github.com/hashicorp/terraform-provider-google/google/fwprovider" tpgprovider "github.com/hashicorp/terraform-provider-google/google/provider" + "github.com/hashicorp/terraform-provider-google/google/services/compute" + "github.com/hashicorp/terraform-provider-google/google/services/pubsublite" + "github.com/hashicorp/terraform-provider-google/google/services/sql" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -29,6 +32,8 @@ import ( "github.com/dnaeon/go-vcr/recorder" "github.com/hashicorp/terraform-plugin-framework/datasource" + fwResource "github.com/hashicorp/terraform-plugin-framework/resource" + fwDiags "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/provider" "github.com/hashicorp/terraform-plugin-log/tflog" @@ -339,9 +344,16 @@ func (p *frameworkTestProvider) Configure(ctx context.Context, req provider.Conf func (p *frameworkTestProvider) DataSources(ctx context.Context) []func() datasource.DataSource { ds := p.FrameworkProvider.DataSources(ctx) ds = append(ds, fwprovider.NewGoogleProviderConfigPluginFrameworkDataSource) // google_provider_config_plugin_framework + ds = append(ds, compute.NewComputeNetworkFWDataSource) // google_fw_compute_network return ds } +func (p *frameworkTestProvider) Resources(ctx context.Context) []func() fwResource.Resource { + r := p.FrameworkProvider.Resources(ctx) + r = append(r, pubsublite.NewGooglePubsubLiteReservationFWResource, sql.NewSQLUserFWResource) // google_fwprovider_pubsub_lite_reservation + return r +} + // GetSDKProvider gets the SDK provider for use in acceptance tests // If VCR is in use, the configure function is overwritten. // See usage in MuxedProviders diff --git a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl index f72d12124f68..3ceb0f566e7d 100644 --- a/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl +++ b/mmv1/third_party/terraform/fwprovider/framework_provider.go.tmpl @@ -21,10 +21,13 @@ import ( "github.com/hashicorp/terraform-provider-google/google/functions" "github.com/hashicorp/terraform-provider-google/google/fwmodels" "github.com/hashicorp/terraform-provider-google/google/services/resourcemanager" + "github.com/hashicorp/terraform-provider-google/google/services/apigee" + "github.com/hashicorp/terraform-provider-google/version" {{- if ne $.TargetVersionName "ga" }} "github.com/hashicorp/terraform-provider-google/google/services/firebase" {{- end }} + "github.com/hashicorp/terraform-provider-google/google/services/storage" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -356,7 +359,10 @@ func (p *FrameworkProvider) DataSources(_ context.Context) []func() datasource.D // Resources defines the resources implemented in the provider. func (p *FrameworkProvider) Resources(_ context.Context) []func() resource.Resource { - return nil + return []func() resource.Resource{ + apigee.NewApigeeKeystoresAliasesKeyCertFileResource, + storage.NewStorageNotificationResource, + } } // Functions defines the provider functions implemented in the provider. diff --git a/mmv1/third_party/terraform/fwresource/field_helpers.go b/mmv1/third_party/terraform/fwresource/field_helpers.go index 54788d8346e7..40c170a85935 100644 --- a/mmv1/third_party/terraform/fwresource/field_helpers.go +++ b/mmv1/third_party/terraform/fwresource/field_helpers.go @@ -17,10 +17,18 @@ import ( // back to the provider's value if not given. If the provider's value is not // given, an error is returned. func GetProjectFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { - return getProjectFromFrameworkSchema("project", rVal, pVal, diags) + return getProviderDefaultFromFrameworkSchema("project", rVal, pVal, diags) } -func getProjectFromFrameworkSchema(projectSchemaField string, rVal, pVal types.String, diags *diag.Diagnostics) types.String { +func GetRegionFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { + return getProviderDefaultFromFrameworkSchema("region", rVal, pVal, diags) +} + +func GetZoneFramework(rVal, pVal types.String, diags *diag.Diagnostics) types.String { + return getProviderDefaultFromFrameworkSchema("zone", rVal, pVal, diags) +} + +func getProviderDefaultFromFrameworkSchema(schemaField string, rVal, pVal types.String, diags *diag.Diagnostics) types.String { if !rVal.IsNull() && rVal.ValueString() != "" { return rVal } @@ -29,7 +37,7 @@ func getProjectFromFrameworkSchema(projectSchemaField string, rVal, pVal types.S return pVal } - diags.AddError("required field is not set", fmt.Sprintf("%s is not set", projectSchemaField)) + diags.AddError("required field is not set", fmt.Sprintf("%s is not set", schemaField)) return types.String{} } @@ -54,7 +62,7 @@ func ParseProjectFieldValueFramework(resourceType, fieldValue, projectSchemaFiel } } - project := getProjectFromFrameworkSchema(projectSchemaField, rVal, pVal, diags) + project := getProviderDefaultFromFrameworkSchema(projectSchemaField, rVal, pVal, diags) if diags.HasError() { return nil } @@ -111,3 +119,10 @@ func ReplaceVarsForFrameworkTest(prov *transport_tpg.Config, rs *terraform.Resou return re.ReplaceAllStringFunc(linkTmpl, replaceFunc), nil } + +func FlattenStringEmptyToNull(configuredValue types.String, apiValue string) types.String { + if configuredValue.IsNull() && apiValue == "" { + return types.StringNull() + } + return types.StringValue(apiValue) +} diff --git a/mmv1/third_party/terraform/fwresource/framework_import.go b/mmv1/third_party/terraform/fwresource/framework_import.go new file mode 100644 index 000000000000..3e344332c0ff --- /dev/null +++ b/mmv1/third_party/terraform/fwresource/framework_import.go @@ -0,0 +1,192 @@ +package fwresource + +import ( + "context" + "fmt" + "regexp" + "strconv" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// ParseImportId uses a list of regular expressions to parse a resource's import ID. +// It extracts named capture groups from the regex and converts them to their +// corresponding type-safe attribute values based on the provided resource schema. +// It also handles setting default values (project, region, etc) if they are not +// present in the import ID. +func ParseImportId( + ctx context.Context, + req resource.ImportStateRequest, + resourceSchema schema.Schema, + providerConfig *transport_tpg.Config, + idRegexes []string, +) (map[string]attr.Value, diag.Diagnostics) { + var diags diag.Diagnostics + parsedAttributes := make(map[string]attr.Value) + + var matchFound bool + for _, idFormat := range idRegexes { + re, err := regexp.Compile(idFormat) + if err != nil { + diags.AddError( + "Invalid Import Regex", + fmt.Sprintf("Provider developer error: could not compile regex %q. Please report this issue. Error: %s", idFormat, err), + ) + // This is a developer error, so we stop immediately. + return nil, diags + } + + if match := re.FindStringSubmatch(req.ID); match != nil { + matchFound = true + subexpNames := re.SubexpNames() + for i, valueStr := range match { + // Index 0 is the full match, so we skip it. + if i == 0 { + continue + } + + fieldName := subexpNames[i] + if fieldName == "" { + continue + } + + // Look up the attribute in the resource's schema. + attribute, ok := resourceSchema.Attributes[fieldName] + if !ok { + diags.AddWarning( + "Unknown Import Field", + fmt.Sprintf("Parsed field %q from import ID but it is not defined in the resource schema.", fieldName), + ) + continue + } + + // Convert the parsed string value to the correct attr.Value type. + attrVal, conversionDiags := convertToAttrValue(valueStr, attribute) + diags.Append(conversionDiags...) + if conversionDiags.HasError() { + continue + } + parsedAttributes[fieldName] = attrVal + } + // Once a match is found, we stop. The most specific regex should be first. + break + } + } + + if !matchFound { + diags.AddError( + "Invalid Import ID", + fmt.Sprintf("Import ID %q doesn't match any of the accepted formats: %v", req.ID, idRegexes), + ) + return nil, diags + } + + // Handle default values like project, region, and zone. + defaultDiags := addDefaultValues(ctx, parsedAttributes, providerConfig, resourceSchema, idRegexes[0]) + diags.Append(defaultDiags...) + + return parsedAttributes, diags +} + +// convertToAttrValue converts a string to the appropriate attr.Value based on the schema attribute type. +func convertToAttrValue(valueStr string, attr schema.Attribute) (attr.Value, diag.Diagnostics) { + var diags diag.Diagnostics + + switch attr.(type) { + case schema.StringAttribute: + return types.StringValue(valueStr), nil + case schema.Int64Attribute: + intVal, err := strconv.ParseInt(valueStr, 10, 64) + if err != nil { + diags.AddError( + "Import Value Conversion Error", + fmt.Sprintf("Failed to parse %q as an integer: %s", valueStr, err), + ) + return nil, diags + } + return types.Int64Value(intVal), nil + case schema.BoolAttribute: + boolVal, err := strconv.ParseBool(valueStr) + if err != nil { + diags.AddError( + "Import Value Conversion Error", + fmt.Sprintf("Failed to parse %q as a boolean: %s", valueStr, err), + ) + return nil, diags + } + return types.BoolValue(boolVal), nil + case schema.Float64Attribute: + floatVal, err := strconv.ParseFloat(valueStr, 64) + if err != nil { + diags.AddError( + "Import Value Conversion Error", + fmt.Sprintf("Failed to parse %q as a float: %s", valueStr, err), + ) + return nil, diags + } + return types.Float64Value(floatVal), nil + default: + // For complex types like List, Object, etc., a simple string conversion is not feasible. + // The assumption is that import IDs will only contain primitive types. + diags.AddError( + "Unsupported Import Attribute Type", + fmt.Sprintf("Importing attributes of type %T is not supported. This is a provider developer issue.", attr), + ) + return nil, diags + } +} + +// addDefaultValues checks for common provider-level defaults (project, region, zone) +// and adds them to the parsed attributes map if they were not already set from the import ID. +func addDefaultValues( + ctx context.Context, + parsedAttributes map[string]attr.Value, + config *transport_tpg.Config, + resourceSchema schema.Schema, + primaryRegex string, +) diag.Diagnostics { + var diags diag.Diagnostics + + defaults := map[string]func(*transport_tpg.Config) (string, error){ + "project": func(c *transport_tpg.Config) (string, error) { return c.Project, nil }, + "region": func(c *transport_tpg.Config) (string, error) { return c.Region, nil }, + "zone": func(c *transport_tpg.Config) (string, error) { return c.Zone, nil }, + } + + for field, getDefault := range defaults { + // Check if the primary regex expects this field. + if !strings.Contains(primaryRegex, fmt.Sprintf("(?P<%s>", field)) { + continue + } + // Check if the resource schema actually has this attribute. + if _, ok := resourceSchema.Attributes[field]; !ok { + continue + } + // Check if the value was already parsed from the import ID. + if _, ok := parsedAttributes[field]; ok { + continue + } + + // Get the default value from the provider configuration. + value, err := getDefault(config) + if err != nil { + diags.AddError( + fmt.Sprintf("Failed to get default value for %s", field), + err.Error(), + ) + continue + } + + if value != "" { + parsedAttributes[field] = types.StringValue(value) + } + } + + return diags +} diff --git a/mmv1/third_party/terraform/fwresource/framework_import_test.go b/mmv1/third_party/terraform/fwresource/framework_import_test.go new file mode 100644 index 000000000000..278b55dd9a19 --- /dev/null +++ b/mmv1/third_party/terraform/fwresource/framework_import_test.go @@ -0,0 +1,183 @@ +package fwresource + +import ( + "context" + "reflect" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestParseImportId(t *testing.T) { + testSchema := schema.Schema{ + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "name": schema.StringAttribute{ + Required: true, + }, + "zone": schema.StringAttribute{ + Required: true, + }, + "instance_id": schema.Int64Attribute{ + Required: true, + PlanModifiers: []planmodifier.Int64{ + int64planmodifier.RequiresReplace(), + }, + }, + }, + } + + cases := map[string]struct { + importId string + idRegexes []string + resourceSchema schema.Schema + providerConfig *transport_tpg.Config + expectedAttributes map[string]attr.Value + expectError bool + errorContains string + }{ + "successfully parses full resource ID format": { + importId: "projects/my-project/zones/us-central1-a/instances/12345", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("my-project"), + "zone": types.StringValue("us-central1-a"), + "instance_id": types.Int64Value(12345), + }, + }, + "successfully parses shorter ID format": { + importId: "my-project/us-central1-a/12345", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("my-project"), + "zone": types.StringValue("us-central1-a"), + "instance_id": types.Int64Value(12345), + }, + }, + "successfully uses provider default for project": { + importId: "us-central1-a/my-instance/12345", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)/(?P[^/]+)", // Most specific + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{ + Project: "default-provider-project", + }, + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("default-provider-project"), + "zone": types.StringValue("us-central1-a"), + "name": types.StringValue("my-instance"), + "instance_id": types.Int64Value(12345), + }, + }, + "returns error for non-matching ID": { + importId: "invalid-id-format", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectError: true, + errorContains: "doesn't match any of the accepted formats", + }, + "returns error for value that cannot be converted to type": { + importId: "projects/my-project/zones/us-central1-a/instances/not-an-integer", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectError: true, + errorContains: "Failed to parse \"not-an-integer\" as an integer", + }, + "returns error for invalid regex pattern": { + importId: "any/id", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/+", // Invalid regex with unclosed bracket + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + expectError: true, + errorContains: "could not compile regex", + }, + "warns about field in regex not present in schema": { + importId: "projects/my-project/zones/us-central1-a/instances/12345/extra/field", + idRegexes: []string{ + "projects/(?P[^/]+)/zones/(?P[^/]+)/instances/(?P[^/]+)/extra/(?P[^/]+)", + }, + resourceSchema: testSchema, + providerConfig: &transport_tpg.Config{}, + // We expect success, but with a warning diagnostic. The valid fields should still be parsed. + expectedAttributes: map[string]attr.Value{ + "project": types.StringValue("my-project"), + "zone": types.StringValue("us-central1-a"), + "instance_id": types.Int64Value(12345), + }, + }, + } + + for name, tc := range cases { + t.Run(name, func(t *testing.T) { + ctx := context.Background() + req := resource.ImportStateRequest{ + ID: tc.importId, + } + + parsedAttributes, diags := ParseImportId(ctx, req, tc.resourceSchema, tc.providerConfig, tc.idRegexes) + + if diags.HasError() { + if tc.expectError { + // Check if the error message contains the expected substring. + if tc.errorContains != "" { + found := false + for _, d := range diags.Errors() { + if strings.Contains(d.Detail(), tc.errorContains) { + found = true + break + } + } + if !found { + t.Fatalf("expected error to contain %q, but it did not. Got: %v", tc.errorContains, diags.Errors()) + } + } + // Correctly handled an expected error. + return + } + t.Fatalf("unexpected error: %v", diags) + } + + if tc.expectError { + t.Fatal("expected an error, but got none") + } + + if !reflect.DeepEqual(tc.expectedAttributes, parsedAttributes) { + t.Fatalf("incorrect attributes parsed.\n- got: %v\n- want: %v", parsedAttributes, tc.expectedAttributes) + } + }) + } +} diff --git a/mmv1/third_party/terraform/fwtransport/framework_utils.go b/mmv1/third_party/terraform/fwtransport/framework_utils.go index b297b475cb25..238670b1b581 100644 --- a/mmv1/third_party/terraform/fwtransport/framework_utils.go +++ b/mmv1/third_party/terraform/fwtransport/framework_utils.go @@ -1,17 +1,29 @@ package fwtransport import ( + "bytes" "context" + "encoding/json" "fmt" + "net/http" "os" + "reflect" + "regexp" "strings" + "time" "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/googleapi" ) const uaEnvVar = "TF_APPEND_USER_AGENT" @@ -38,7 +50,7 @@ func GenerateFrameworkUserAgentString(metaData *fwmodels.ProviderMetaModel, curr return currUserAgent } -func HandleDatasourceNotFoundError(ctx context.Context, err error, state *tfsdk.State, resource string, diags *diag.Diagnostics) { +func HandleNotFoundError(ctx context.Context, err error, state *tfsdk.State, resource string, diags *diag.Diagnostics) { if transport_tpg.IsGoogleApiErrorWithCode(err, 404) { tflog.Warn(ctx, fmt.Sprintf("Removing %s because it's gone", resource)) // The resource doesn't exist anymore @@ -47,3 +59,310 @@ func HandleDatasourceNotFoundError(ctx context.Context, err error, state *tfsdk. diags.AddError(fmt.Sprintf("Error when reading or editing %s", resource), err.Error()) } + +var DefaultRequestTimeout = 5 * time.Minute + +type SendRequestOptions struct { + Config *transport_tpg.Config + Method string + Project string + RawURL string + UserAgent string + Body map[string]any + Timeout time.Duration + Headers http.Header + ErrorRetryPredicates []transport_tpg.RetryErrorPredicateFunc + ErrorAbortPredicates []transport_tpg.RetryErrorPredicateFunc +} + +func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) map[string]interface{} { + reqHeaders := opt.Headers + if reqHeaders == nil { + reqHeaders = make(http.Header) + } + reqHeaders.Set("User-Agent", opt.UserAgent) + reqHeaders.Set("Content-Type", "application/json") + + if opt.Config.UserProjectOverride && opt.Project != "" { + // When opt.Project is "NO_BILLING_PROJECT_OVERRIDE" in the function GetCurrentUserEmail, + // set the header X-Goog-User-Project to be empty string. + if opt.Project == "NO_BILLING_PROJECT_OVERRIDE" { + reqHeaders.Set("X-Goog-User-Project", "") + } else { + // Pass the project into this fn instead of parsing it from the URL because + // both project names and URLs can have colons in them. + reqHeaders.Set("X-Goog-User-Project", opt.Project) + } + } + + if opt.Timeout == 0 { + opt.Timeout = DefaultRequestTimeout + } + + var res *http.Response + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + var buf bytes.Buffer + if opt.Body != nil { + err := json.NewEncoder(&buf).Encode(opt.Body) + if err != nil { + return err + } + } + + u, err := transport_tpg.AddQueryParams(opt.RawURL, map[string]string{"alt": "json"}) + if err != nil { + return err + } + req, err := http.NewRequest(opt.Method, u, &buf) + if err != nil { + return err + } + + req.Header = reqHeaders + res, err = opt.Config.Client.Do(req) + if err != nil { + return err + } + + if err := googleapi.CheckResponse(res); err != nil { + googleapi.CloseBody(res) + return err + } + + return nil + }, + Timeout: opt.Timeout, + ErrorRetryPredicates: opt.ErrorRetryPredicates, + ErrorAbortPredicates: opt.ErrorAbortPredicates, + }) + if err != nil { + diags.AddError("Error when sending HTTP request: ", err.Error()) + return nil + } + + if res == nil { + diags.AddError("Unable to parse server response. This is most likely a terraform problem, please file a bug at https://github.com/hashicorp/terraform-provider-google/issues.", "") + return nil + } + + // The defer call must be made outside of the retryFunc otherwise it's closed too soon. + defer googleapi.CloseBody(res) + + // 204 responses will have no body, so we're going to error with "EOF" if we + // try to parse it. Instead, we can just return nil. + if res.StatusCode == 204 { + return nil + } + result := make(map[string]interface{}) + if err := json.NewDecoder(res.Body).Decode(&result); err != nil { + diags.AddError("Error when sending HTTP request: ", err.Error()) + return nil + } + + return result +} + +type DefaultVars struct { + BillingProject types.String + Project types.String + Region types.String + Zone types.String +} + +func ReplaceVars(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string) string { + return ReplaceVarsRecursive(ctx, req, diags, data, config, linkTmpl, false, 0) +} + +// relaceVarsForId shortens variables by running them through GetResourceNameFromSelfLink +// this allows us to use long forms of variables from configs without needing +// custom id formats. For instance: +// accessPolicies/{{access_policy}}/accessLevels/{{access_level}} +// with values: +// access_policy: accessPolicies/foo +// access_level: accessPolicies/foo/accessLevels/bar +// becomes accessPolicies/foo/accessLevels/bar +func ReplaceVarsForId(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string) string { + return ReplaceVarsRecursive(ctx, req, diags, data, config, linkTmpl, true, 0) +} + +// ReplaceVars must be done recursively because there are baseUrls that can contain references to regions +// (eg cloudrun service) there aren't any cases known for 2+ recursion but we will track a run away +// substitution as 10+ calls to allow for future use cases. +func ReplaceVarsRecursive(ctx context.Context, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string, shorten bool, depth int) string { + if depth > 10 { + diags.AddError("url building error", "Recursive substitution detected.") + } + + // https://github.com/google/re2/wiki/Syntax + re := regexp.MustCompile("{{([%[:word:]]+)}}") + f := BuildReplacementFunc(ctx, re, req, diags, data, config, linkTmpl, shorten) + if diags.HasError() { + return "" + } + final := re.ReplaceAllStringFunc(linkTmpl, f) + + if re.Match([]byte(final)) { + return ReplaceVarsRecursive(ctx, req, diags, data, config, final, shorten, depth+1) + } + + return final +} + +// This function replaces references to Terraform properties (in the form of {{var}}) with their value in Terraform +// It also replaces {{project}}, {{project_id_or_project}}, {{region}}, and {{zone}} with their appropriate values +// This function supports URL-encoding the result by prepending '%' to the field name e.g. {{%var}} +func BuildReplacementFunc(ctx context.Context, re *regexp.Regexp, req interface{}, diags *diag.Diagnostics, data DefaultVars, config *transport_tpg.Config, linkTmpl string, shorten bool) func(string) string { + var project, region, zone string + var projectID types.String + + if strings.Contains(linkTmpl, "{{project}}") { + project = fwresource.GetProjectFramework(data.Project, types.StringValue(config.Project), diags).ValueString() + if diags.HasError() { + return nil + } + if shorten { + project = strings.TrimPrefix(project, "projects/") + } + } + + if strings.Contains(linkTmpl, "{{project_id_or_project}}") { + var diagInfo diag.Diagnostics + switch req.(type) { + case resource.CreateRequest: + pReq := req.(resource.CreateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root("project_id"), &projectID) + case resource.UpdateRequest: + pReq := req.(resource.UpdateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root("project_id"), &projectID) + case resource.ReadRequest: + sReq := req.(resource.ReadRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root("project_id"), &projectID) + case resource.DeleteRequest: + sReq := req.(resource.DeleteRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root("project_id"), &projectID) + } + diags.Append(diagInfo...) + if diags.HasError() { + return nil + } + if projectID.ValueString() != "" { + project = fwresource.GetProjectFramework(data.Project, types.StringValue(config.Project), diags).ValueString() + if diags.HasError() { + return nil + } + } + if shorten { + project = strings.TrimPrefix(project, "projects/") + projectID = types.StringValue(strings.TrimPrefix(projectID.ValueString(), "projects/")) + } + } + + if strings.Contains(linkTmpl, "{{region}}") { + region = fwresource.GetRegionFramework(data.Region, types.StringValue(config.Region), diags).ValueString() + if diags.HasError() { + return nil + } + if shorten { + region = strings.TrimPrefix(region, "regions/") + } + } + + if strings.Contains(linkTmpl, "{{zone}}") { + zone = fwresource.GetRegionFramework(data.Zone, types.StringValue(config.Zone), diags).ValueString() + if diags.HasError() { + return nil + } + if shorten { + zone = strings.TrimPrefix(region, "zones/") + } + } + + f := func(s string) string { + + m := re.FindStringSubmatch(s)[1] + if m == "project" { + return project + } + if m == "project_id_or_project" { + if projectID.ValueString() != "" { + return projectID.ValueString() + } + return project + } + if m == "region" { + return region + } + if m == "zone" { + return zone + } + if string(m[0]) == "%" { + var v types.String + var diagInfo diag.Diagnostics + switch req.(type) { + case resource.CreateRequest: + pReq := req.(resource.CreateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m[1:]), &v) + case resource.UpdateRequest: + pReq := req.(resource.UpdateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m[1:]), &v) + case resource.ReadRequest: + sReq := req.(resource.ReadRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m[1:]), &v) + case resource.DeleteRequest: + sReq := req.(resource.DeleteRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m[1:]), &v) + } + //an error here means the attribute was not found, we want to do nothing in that case + if !diagInfo.HasError() { + diags.Append(diagInfo...) + if v.ValueString() != "" { + if shorten { + return tpgresource.GetResourceNameFromSelfLink(fmt.Sprintf("%v", v.ValueString())) + } else { + return fmt.Sprintf("%v", v.ValueString()) + } + } + } + } else { + var v types.String + var diagInfo diag.Diagnostics + switch req.(type) { + case resource.CreateRequest: + pReq := req.(resource.CreateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m), &v) + case resource.UpdateRequest: + pReq := req.(resource.UpdateRequest) + diagInfo = pReq.Plan.GetAttribute(ctx, path.Root(m), &v) + case resource.ReadRequest: + sReq := req.(resource.ReadRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m), &v) + case resource.DeleteRequest: + sReq := req.(resource.DeleteRequest) + diagInfo = sReq.State.GetAttribute(ctx, path.Root(m), &v) + } + //an error here means the attribute was not found, we want to do nothing in that case + if !diagInfo.HasError() { + diags.Append(diagInfo...) + if v.ValueString() != "" { + if shorten { + return tpgresource.GetResourceNameFromSelfLink(fmt.Sprintf("%v", v.ValueString())) + } else { + return fmt.Sprintf("%v", v.ValueString()) + } + } + } + } + + // terraform-google-conversion doesn't provide a provider config in tests. + if config != nil { + // Attempt to draw values from the provider config if it's present. + if f := reflect.Indirect(reflect.ValueOf(config)).FieldByName(m); f.IsValid() { + return f.String() + } + } + return "" + } + + return f +} diff --git a/mmv1/third_party/terraform/fwvalidators/framework_validators.go b/mmv1/third_party/terraform/fwvalidators/framework_validators.go index b0da8417591e..eed3a32e98f9 100644 --- a/mmv1/third_party/terraform/fwvalidators/framework_validators.go +++ b/mmv1/third_party/terraform/fwvalidators/framework_validators.go @@ -9,6 +9,8 @@ import ( "strings" "time" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" googleoauth "golang.org/x/oauth2/google" @@ -262,3 +264,81 @@ func (v jwtValidator) ValidateString(ctx context.Context, request validator.Stri func JWTValidator() validator.String { return jwtValidator{} } + +// stringValuesInSetValidator validates that all string elements in a set +// are present in the configured list of valid strings. +type stringValuesInSetValidator struct { + ValidStrings []string +} + +func (v stringValuesInSetValidator) Description(_ context.Context) string { + return fmt.Sprintf("all elements must be one of: %q", v.ValidStrings) +} + +func (v stringValuesInSetValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} + +func (v stringValuesInSetValidator) ValidateSet(ctx context.Context, req validator.SetRequest, resp *validator.SetResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + valid := make(map[string]struct{}, len(v.ValidStrings)) + for _, s := range v.ValidStrings { + valid[s] = struct{}{} + } + + var elements []types.String + resp.Diagnostics.Append(req.ConfigValue.ElementsAs(ctx, &elements, false)...) + if resp.Diagnostics.HasError() { + return + } + + for _, el := range elements { + if _, ok := valid[el.ValueString()]; !ok { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Set Element", + fmt.Sprintf("Element %q is not a valid value. %s.", el.ValueString(), v.Description(ctx)), + ) + } + } +} + +func StringValuesInSet(validStrings ...string) validator.Set { + return stringValuesInSetValidator{ + ValidStrings: validStrings, + } +} + +type TopicPrefixValidator struct{} + +func (v TopicPrefixValidator) Description(ctx context.Context) string { + return "ensures the topic does not start with '//pubsub.googleapis.com/'" +} + +func (v TopicPrefixValidator) MarkdownDescription(ctx context.Context) string { + return "Ensures the topic does not start with `//pubsub.googleapis.com/`." +} + +func (v TopicPrefixValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + value := req.ConfigValue.ValueString() + forbiddenPrefix := "//pubsub.googleapis.com/" + + if strings.HasPrefix(value, forbiddenPrefix) { + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid Topic Format", + fmt.Sprintf("The topic must not start with '%s', please use the format projects/{project}/topics/{topic} instead.", forbiddenPrefix), + ) + } +} + +func NewTopicPrefixValidator() validator.String { + return TopicPrefixValidator{} +} diff --git a/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go b/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go index 07e2378e8bf3..8d8a285584fd 100644 --- a/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go +++ b/mmv1/third_party/terraform/fwvalidators/framework_validators_test.go @@ -307,3 +307,141 @@ func TestBoundedDuration(t *testing.T) { }) } } + +func TestStringValuesInSetValidator(t *testing.T) { + t.Parallel() + + // Define the set of valid strings for the validator + validStrings := []string{"APPLE", "BANANA", "CHERRY"} + + stringSet := func(elems []string) types.Set { + if elems == nil { + return types.SetNull(types.StringType) + } + val, diags := types.SetValueFrom(context.Background(), types.StringType, elems) + if diags.HasError() { + t.Fatalf("Failed to create test set: %v", diags) + } + return val + } + + cases := map[string]struct { + ConfigValue types.Set + ExpectedErrorCount int + }{ + "valid set with one element": { + ConfigValue: stringSet([]string{"APPLE"}), + ExpectedErrorCount: 0, + }, + "valid set with multiple elements": { + ConfigValue: stringSet([]string{"BANANA", "CHERRY"}), + ExpectedErrorCount: 0, + }, + "valid empty set": { + ConfigValue: stringSet([]string{}), + ExpectedErrorCount: 0, + }, + "null set is valid": { + ConfigValue: stringSet(nil), + ExpectedErrorCount: 0, + }, + "unknown set is valid": { + ConfigValue: types.SetUnknown(types.StringType), + ExpectedErrorCount: 0, + }, + "invalid set with one element": { + ConfigValue: stringSet([]string{"DURIAN"}), + ExpectedErrorCount: 1, + }, + "invalid set with multiple elements": { + ConfigValue: stringSet([]string{"DURIAN", "ELDERBERRY"}), + ExpectedErrorCount: 2, + }, + "set with mixed valid and invalid elements": { + ConfigValue: stringSet([]string{"APPLE", "DURIAN", "CHERRY"}), + ExpectedErrorCount: 1, + }, + } + + for tn, tc := range cases { + tn, tc := tn, tc + t.Run(tn, func(t *testing.T) { + t.Parallel() + + req := validator.SetRequest{ + Path: path.Root("test_attribute"), + ConfigValue: tc.ConfigValue, + } + resp := &validator.SetResponse{ + Diagnostics: diag.Diagnostics{}, + } + v := fwvalidators.StringValuesInSet(validStrings...) + + v.ValidateSet(context.Background(), req, resp) + + if resp.Diagnostics.ErrorsCount() != tc.ExpectedErrorCount { + t.Errorf("Expected %d errors, but got %d. Errors: %v", tc.ExpectedErrorCount, resp.Diagnostics.ErrorsCount(), resp.Diagnostics.Errors()) + } + }) + } +} + +func TestTopicPrefixValidator(t *testing.T) { + t.Parallel() + + type testCase struct { + value types.String + expectError bool + errorContains string + } + + tests := map[string]testCase{ + "valid topic format": { + value: types.StringValue("projects/my-project/topics/my-topic"), + expectError: false, + }, + "invalid topic format - starts with pubsub prefix": { + value: types.StringValue("//pubsub.googleapis.com/projects/my-project/topics/my-topic"), + expectError: true, + errorContains: "The topic must not start with '//pubsub.googleapis.com/', please use the format projects/{project}/topics/{topic} instead.", + }, + } + + for name, test := range tests { + name, test := name, test + t.Run(name, func(t *testing.T) { + t.Parallel() + + request := validator.StringRequest{ + Path: path.Root("test_topic"), + PathExpression: path.MatchRoot("test_topic"), + ConfigValue: test.value, + } + response := validator.StringResponse{} + v := fwvalidators.NewTopicPrefixValidator() + + v.ValidateString(context.Background(), request, &response) + + if test.expectError && !response.Diagnostics.HasError() { + t.Errorf("expected error, got none for value: %q", test.value.ValueString()) + } + + if !test.expectError && response.Diagnostics.HasError() { + t.Errorf("got unexpected error for value: %q: %s", test.value.ValueString(), response.Diagnostics.Errors()) + } + + if test.errorContains != "" { + foundError := false + for _, err := range response.Diagnostics.Errors() { + if err.Detail() == test.errorContains { + foundError = true + break + } + } + if !foundError { + t.Errorf("expected error with detail %q, got none", test.errorContains) + } + } + }) + } +} diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index d6d72af47b3d..026850fd2bd3 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -20,6 +20,7 @@ require ( github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/terraform-json v0.25.0 github.com/hashicorp/terraform-plugin-framework v1.15.0 + github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 github.com/hashicorp/terraform-plugin-go v0.28.0 github.com/hashicorp/terraform-plugin-log v0.9.0 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index 3889c6c81471..ed7608cc7167 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -162,12 +162,20 @@ github.com/hashicorp/hcl/v2 v2.23.0 h1:Fphj1/gCylPxHutVSEOf2fBOh1VE4AuLV7+kbJf3q github.com/hashicorp/hcl/v2 v2.23.0/go.mod h1:62ZYHrXgPoX8xBnzl8QzbWq4dyDsDtfCRgIq1rbJEvA= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/terraform-exec v0.22.0 h1:G5+4Sz6jYZfRYUCg6eQgDsqTzkNXV+fP8l+uRmZHj64= +github.com/hashicorp/terraform-exec v0.22.0/go.mod h1:bjVbsncaeh8jVdhttWYZuBGj21FcYw6Ia/XfHcNO7lQ= github.com/hashicorp/terraform-exec v0.23.0 h1:MUiBM1s0CNlRFsCLJuM5wXZrzA3MnPYEsiXmzATMW/I= github.com/hashicorp/terraform-exec v0.23.0/go.mod h1:mA+qnx1R8eePycfwKkCRk3Wy65mwInvlpAeOwmA7vlY= +github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4NbeATsYz8Q= +github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ= github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= +github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= +github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= github.com/hashicorp/terraform-plugin-framework v1.15.0 h1:LQ2rsOfmDLxcn5EeIwdXFtr03FVsNktbbBci8cOKdb4= github.com/hashicorp/terraform-plugin-framework v1.15.0/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= +github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 h1:I/N0g/eLZ1ZkLZXUQ0oRSXa8YG/EF0CEuQP1wXdrzKw= +github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0/go.mod h1:t339KhmxnaF4SzdpxmqW8HnQBHVGYazwtfxU0qCs4eE= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 h1:LYz4bXh3t7bTEydXOmPDPupRRnA480B/9+jV8yZvxBA= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0/go.mod h1:+BVERsnfdlhYR2YkXMBtPnmn9UsL19U3qUtSZ+Y/5MY= github.com/hashicorp/terraform-plugin-go v0.28.0 h1:zJmu2UDwhVN0J+J20RE5huiF3XXlTYVIleaevHZgKPA= diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 85012670afa3..b0e968f31749 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -356,7 +356,6 @@ var handwrittenResources = map[string]*schema.Resource{ "google_apigee_sharedflow_deployment": apigee.ResourceApigeeSharedFlowDeployment(), "google_apigee_flowhook": apigee.ResourceApigeeFlowhook(), "google_apigee_keystores_aliases_pkcs12": apigee.ResourceApigeeKeystoresAliasesPkcs12(), - "google_apigee_keystores_aliases_key_cert_file": apigee.ResourceApigeeKeystoresAliasesKeyCertFile(), "google_bigquery_table": bigquery.ResourceBigQueryTable(), "google_bigtable_gc_policy": bigtable.ResourceBigtableGCPolicy(), "google_bigtable_instance": bigtable.ResourceBigtableInstance(), @@ -447,7 +446,6 @@ var handwrittenResources = map[string]*schema.Resource{ "google_storage_bucket_object": storage.ResourceStorageBucketObject(), "google_storage_object_acl": storage.ResourceStorageObjectAcl(), "google_storage_default_object_acl": storage.ResourceStorageDefaultObjectAcl(), - "google_storage_notification": storage.ResourceStorageNotification(), "google_storage_transfer_job": storagetransfer.ResourceStorageTransferJob(), "google_tags_location_tag_binding": tags.ResourceTagsLocationTagBinding(), // ####### END handwritten resources ########### diff --git a/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go new file mode 100644 index 000000000000..787f84425f65 --- /dev/null +++ b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go @@ -0,0 +1,533 @@ +package apigee + +import ( + "bytes" + "context" + "fmt" + "mime/multipart" + "time" + + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var ( + _ resource.Resource = &ApigeeKeystoresAliasesKeyCertFileResource{} + _ resource.ResourceWithConfigure = &ApigeeKeystoresAliasesKeyCertFileResource{} + _ resource.ResourceWithImportState = &ApigeeKeystoresAliasesKeyCertFileResource{} +) + +func NewApigeeKeystoresAliasesKeyCertFileResource() resource.Resource { + return &ApigeeKeystoresAliasesKeyCertFileResource{} +} + +type ApigeeKeystoresAliasesKeyCertFileResource struct { + providerConfig *transport_tpg.Config +} + +type ApigeeKeystoresAliasesKeyCertFileResourceModel struct { + Id types.String `tfsdk:"id"` + OrgId types.String `tfsdk:"org_id"` + Environment types.String `tfsdk:"environment"` + Keystore types.String `tfsdk:"keystore"` + Alias types.String `tfsdk:"alias"` + Cert types.String `tfsdk:"cert"` + Key types.String `tfsdk:"key"` + Password types.String `tfsdk:"password"` + Type types.String `tfsdk:"type"` + CertsInfo types.List `tfsdk:"certs_info"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +type CertInfoDetailModel struct { + BasicConstraints types.String `tfsdk:"basic_constraints"` + ExpiryDate types.String `tfsdk:"expiry_date"` + IsValid types.String `tfsdk:"is_valid"` + Issuer types.String `tfsdk:"issuer"` + PublicKey types.String `tfsdk:"public_key"` + SerialNumber types.String `tfsdk:"serial_number"` + SigAlgName types.String `tfsdk:"sig_alg_name"` + Subject types.String `tfsdk:"subject"` + SubjectAlternativeNames types.List `tfsdk:"subject_alternative_names"` + ValidFrom types.String `tfsdk:"valid_from"` + Version types.Int64 `tfsdk:"version"` +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_apigee_keystores_aliases_key_cert_file" +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if req.ProviderData == nil { + return + } + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + r.providerConfig = p +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "An alias from a key/cert file.", + Attributes: map[string]schema.Attribute{ + "org_id": schema.StringAttribute{ + Description: "Organization ID associated with the alias.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "environment": schema.StringAttribute{ + Description: "Environment associated with the alias.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "keystore": schema.StringAttribute{ + Description: "Keystore Name.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "alias": schema.StringAttribute{ + Description: "Alias Name.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "cert": schema.StringAttribute{ + Description: "Cert content.", + Required: true, + }, + "key": schema.StringAttribute{ + Description: "Private Key content, omit if uploading to truststore.", + Optional: true, + Sensitive: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "password": schema.StringAttribute{ + Description: "Password for the Private Key if it's encrypted.", + Optional: true, + Sensitive: true, + }, + "type": schema.StringAttribute{ + Description: "Optional. Type of Alias.", + Computed: true, + }, + "id": schema.StringAttribute{ + Description: "Project identifier", + Computed: true, + }, + "certs_info": schema.ListAttribute{ + Description: "Chain of certificates under this alias.", + Computed: true, + ElementType: types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "basic_constraints": types.StringType, + "expiry_date": types.StringType, + "is_valid": types.StringType, + "issuer": types.StringType, + "public_key": types.StringType, + "serial_number": types.StringType, + "sig_alg_name": types.StringType, + "subject": types.StringType, + "subject_alternative_names": types.ListType{ElemType: types.StringType}, + "valid_from": types.StringType, + "version": types.Int64Type, + }, + }, + }, + }, + Blocks: map[string]schema.Block{ + "timeouts": timeouts.Block(ctx, timeouts.Opts{ + Create: true, + Read: true, + Update: true, + Delete: true, + }), + }, + } +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan ApigeeKeystoresAliasesKeyCertFileResourceModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + createTimeout, diags := plan.Timeouts.Create(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + buf := new(bytes.Buffer) + bw := multipart.NewWriter(buf) + if !plan.Key.IsNull() && !plan.Key.IsUnknown() { + keyFilePartWriter, _ := bw.CreateFormField("keyFile") + keyFilePartWriter.Write([]byte(plan.Key.ValueString())) + } + if !plan.Password.IsNull() && !plan.Password.IsUnknown() { + keyFilePartWriter, _ := bw.CreateFormField("password") + keyFilePartWriter.Write([]byte(plan.Password.ValueString())) + } + certFilePartWriter, _ := bw.CreateFormField("certFile") + certFilePartWriter.Write([]byte(plan.Cert.ValueString())) + bw.Close() + + billingProject := types.StringValue(r.providerConfig.BillingProject) + + var schemaDefaultVals fwtransport.DefaultVars + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases?format=keycertfile&alias={{alias}}&ignoreExpiryValidation=true") + if resp.Diagnostics.HasError() { + return + } + + res, err := sendRequestRawBodyWithTimeout(r.providerConfig, "POST", billingProject.ValueString(), url, userAgent, buf, bw.FormDataContentType(), createTimeout) + if err != nil { + resp.Diagnostics.AddError("Error, failure to create key cert file", err.Error()) + return + } + + tflog.Trace(ctx, "Successfully created Apigee Keystore Alias", map[string]interface{}{"response": res}) + + id := fmt.Sprintf("organizations/%s/environments/%s/keystores/%s/aliases/%s", + plan.OrgId.ValueString(), + plan.Environment.ValueString(), + plan.Keystore.ValueString(), + plan.Alias.ValueString(), + ) + plan.Id = types.StringValue(id) + + r.refresh(ctx, req, &plan, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state ApigeeKeystoresAliasesKeyCertFileResourceModel + + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + r.refresh(ctx, req, &state, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var plan ApigeeKeystoresAliasesKeyCertFileResourceModel + var state ApigeeKeystoresAliasesKeyCertFileResourceModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + updateTimeout, diags := plan.Timeouts.Update(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + buf := new(bytes.Buffer) + bw := multipart.NewWriter(buf) + certFilePartWriter, err := bw.CreateFormField("certFile") + if err != nil { + resp.Diagnostics.AddError("Unable to create form field for certificate", err.Error()) + return + } + certFilePartWriter.Write([]byte(plan.Cert.ValueString())) + bw.Close() + + billingProject := types.StringValue(r.providerConfig.BillingProject) + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + var schemaDefaultVals fwtransport.DefaultVars + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}?ignoreExpiryValidation=true") + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "Updating Apigee Keystore Alias", map[string]interface{}{"url": url}) + res, err := sendRequestRawBodyWithTimeout(r.providerConfig, "PUT", billingProject.ValueString(), url, userAgent, buf, bw.FormDataContentType(), updateTimeout) + + if err != nil { + resp.Diagnostics.AddError("Error, failure to update key cert file", err.Error()) + return + } + + tflog.Trace(ctx, "Successfully sent update request for Apigee Keystore Alias", map[string]interface{}{"response": res}) + + r.refresh(ctx, req, &plan, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data ApigeeKeystoresAliasesKeyCertFileResourceModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + var schemaDefaultVals fwtransport.DefaultVars + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "Deleting Apigee Keystore Alias", map[string]interface{}{"url": url}) + + _ = fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "DELETE", + Project: data.OrgId.ValueString(), + RawURL: url, + UserAgent: userAgent, + Timeout: deleteTimeout, + }, &resp.Diagnostics) + + tflog.Trace(ctx, "Successfully deleted Apigee Keystore Alias.") +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) refresh(ctx context.Context, req interface{}, data *ApigeeKeystoresAliasesKeyCertFileResourceModel, state *tfsdk.State, diags *diag.Diagnostics) { + var metaData *fwmodels.ProviderMetaModel + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.providerConfig.UserAgent) + + var schemaDefaultVals fwtransport.DefaultVars + url := fwtransport.ReplaceVars(ctx, req, diags, schemaDefaultVals, r.providerConfig, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") + if diags.HasError() { + return + } + + readTimeout, timeoutDiags := data.Timeouts.Read(ctx, 20*time.Minute) + diags.Append(timeoutDiags...) + if diags.HasError() { + return + } + + tflog.Trace(ctx, "Refreshing Apigee Keystore Alias", map[string]interface{}{"url": url}) + + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: r.providerConfig, + Method: "GET", + Project: data.OrgId.ValueString(), + RawURL: url, + UserAgent: userAgent, + Timeout: readTimeout, + }, diags) + + if diags.HasError() { + return + } + + tflog.Trace(ctx, "Successfully refreshed Apigee Keystore Alias", map[string]interface{}{"response": res}) + + id := fmt.Sprintf("organizations/%s/environments/%s/keystores/%s/aliases/%s", + data.OrgId.ValueString(), + data.Environment.ValueString(), + data.Keystore.ValueString(), + data.Alias.ValueString(), + ) + data.Id = types.StringValue(id) + + data.Type = types.StringValue(res["type"].(string)) + + flattenedCertsInfo, certDiags := flattenCertsInfo(res["certsInfo"]) + diags.Append(certDiags...) + if diags.HasError() { + return + } + data.CertsInfo = flattenedCertsInfo +} + +var certInfoObjectType = types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "basic_constraints": types.StringType, + "expiry_date": types.StringType, + "is_valid": types.StringType, + "issuer": types.StringType, + "public_key": types.StringType, + "serial_number": types.StringType, + "sig_alg_name": types.StringType, + "subject": types.StringType, + "subject_alternative_names": types.ListType{ElemType: types.StringType}, + "valid_from": types.StringType, + "version": types.Int64Type, + }, +} + +func flattenCertsInfo(v interface{}) (types.List, diag.Diagnostics) { + if v == nil { + return types.ListNull(certInfoObjectType), nil + } + + var diags diag.Diagnostics + + certsInfoMap, ok := v.(map[string]interface{}) + if !ok { + diags.AddError("Invalid Type", "Cannot flatten certs_info: input is not a map.") + return types.ListNull(certInfoObjectType), diags + } + if len(certsInfoMap) == 0 { + return types.ListNull(certInfoObjectType), nil + } + + certInfoListRaw, ok := certsInfoMap["certInfo"].([]interface{}) + if !ok || len(certInfoListRaw) == 0 { + return types.ListNull(certInfoObjectType), nil + } + + var certInfoDetails []CertInfoDetailModel + for _, rawCertInfo := range certInfoListRaw { + certInfo, ok := rawCertInfo.(map[string]interface{}) + if !ok || len(certInfo) == 0 { + continue + } + getStringValue := func(key string) types.String { + if val, ok := certInfo[key].(string); ok { + return types.StringValue(val) + } + return types.StringNull() + } + var sansValue types.List + if sansRaw, ok := certInfo["subjectAlternativeNames"].([]interface{}); ok { + sans := make([]string, 0, len(sansRaw)) + for _, san := range sansRaw { + if s, ok := san.(string); ok { + sans = append(sans, s) + } + } + var listDiags diag.Diagnostics + sansValue, listDiags = types.ListValueFrom(context.Background(), types.StringType, sans) + diags.Append(listDiags...) + } else { + sansValue = types.ListNull(types.StringType) + } + var versionValue types.Int64 + if versionRaw, ok := certInfo["version"]; ok { + switch v := versionRaw.(type) { + case float64: + versionValue = types.Int64Value(int64(v)) + case string: + versionValue = types.Int64Null() + default: + versionValue = types.Int64Null() + } + } else { + versionValue = types.Int64Null() + } + detail := CertInfoDetailModel{ + BasicConstraints: getStringValue("basicConstraints"), + ExpiryDate: getStringValue("expiryDate"), + IsValid: getStringValue("isValid"), + Issuer: getStringValue("issuer"), + PublicKey: getStringValue("publicKey"), + SerialNumber: getStringValue("serialNumber"), + SigAlgName: getStringValue("sigAlgName"), + Subject: getStringValue("subject"), + ValidFrom: getStringValue("validFrom"), + SubjectAlternativeNames: sansValue, + Version: versionValue, + } + certInfoDetails = append(certInfoDetails, detail) + } + + if diags.HasError() { + return types.ListNull(certInfoObjectType), diags + } + + flattenedList, listDiags := types.ListValueFrom(context.Background(), certInfoObjectType, certInfoDetails) + diags.Append(listDiags...) + + return flattenedList, diags +} + +func (r *ApigeeKeystoresAliasesKeyCertFileResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + idRegexes := []string{ + "organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)", + "(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)", + } + + var resourceSchemaResp resource.SchemaResponse + r.Schema(ctx, resource.SchemaRequest{}, &resourceSchemaResp) + if resourceSchemaResp.Diagnostics.HasError() { + resp.Diagnostics.Append(resourceSchemaResp.Diagnostics...) + return + } + + parsedAttributes, diags := fwresource.ParseImportId(ctx, req, resourceSchemaResp.Schema, r.providerConfig, idRegexes) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + for name, value := range parsedAttributes { + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root(name), value)...) + } +} diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go deleted file mode 100644 index 03d842bfc305..000000000000 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_keystores_aliases_key_cert_file.go +++ /dev/null @@ -1,692 +0,0 @@ -package apigee - -import ( - "bytes" - "context" - "fmt" - "log" - "mime/multipart" - "reflect" - "time" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func ResourceApigeeKeystoresAliasesKeyCertFile() *schema.Resource { - return &schema.Resource{ - Create: resourceApigeeKeystoresAliasesKeyCertFileCreate, - Read: resourceApigeeKeystoresAliasesKeyCertFileRead, - Update: resourceApigeeKeystoresAliasesKeyCertFileUpdate, - Delete: resourceApigeeKeystoresAliasesKeyCertFileDelete, - - Importer: &schema.ResourceImporter{ - State: resourceApigeeKeystoresAliasesKeyCertFileImport, - }, - - CustomizeDiff: customdiff.All( - /* - If cert is changed then an update is expected, so we tell Terraform core to expect update on certs_info - */ - - customdiff.ComputedIf("certs_info", func(_ context.Context, diff *schema.ResourceDiff, v interface{}) bool { - return diff.HasChange("cert") - }), - ), - - Timeouts: &schema.ResourceTimeout{ - Create: schema.DefaultTimeout(20 * time.Minute), - Read: schema.DefaultTimeout(20 * time.Minute), - Update: schema.DefaultTimeout(20 * time.Minute), - Delete: schema.DefaultTimeout(20 * time.Minute), - }, - - Schema: map[string]*schema.Schema{ - "alias": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Alias Name`, - }, - "cert": { - Type: schema.TypeString, - Required: true, - Description: `Cert content`, - }, - "environment": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Environment associated with the alias`, - }, - "keystore": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Keystore Name`, - }, - "org_id": { - Type: schema.TypeString, - ForceNew: true, - Required: true, - Description: `Organization ID associated with the alias`, - }, - "certs_info": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `Chain of certificates under this alias.`, - MaxItems: 1, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "cert_info": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `List of all properties in the object.`, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "basic_constraints": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 basic constraints extension.`, - }, - "expiry_date": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 notAfter validity period in milliseconds since epoch.`, - }, - "is_valid": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Flag that specifies whether the certificate is valid. -Flag is set to Yes if the certificate is valid, No if expired, or Not yet if not yet valid.`, - }, - "issuer": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 issuer.`, - }, - "public_key": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `Public key component of the X.509 subject public key info.`, - }, - "serial_number": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 serial number.`, - }, - "sig_alg_name": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 signatureAlgorithm.`, - }, - "subject": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 subject.`, - }, - "subject_alternative_names": { - Type: schema.TypeList, - Optional: true, - Computed: true, - Description: `X.509 subject alternative names (SANs) extension.`, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "valid_from": { - Type: schema.TypeString, - Optional: true, - Computed: true, - Description: `X.509 notBefore validity period in milliseconds since epoch.`, - }, - "version": { - Type: schema.TypeInt, - Optional: true, - Computed: true, - Description: `X.509 version.`, - }, - }, - }, - }, - }, - }, - }, - "key": { - Type: schema.TypeString, - ForceNew: true, - Optional: true, - Sensitive: true, - Description: `Private Key content, omit if uploading to truststore`, - }, - "password": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: `Password for the Private Key if it's encrypted`, - }, - "type": { - Type: schema.TypeString, - Computed: true, - Description: `Optional.Type of Alias`, - }, - }, - UseJSONNumber: true, - } -} - -func resourceApigeeKeystoresAliasesKeyCertFileCreate(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - buf := new(bytes.Buffer) - bw := multipart.NewWriter(buf) - if key, ok := d.GetOkExists("key"); ok { - keyFilePartWriter, _ := bw.CreateFormField("keyFile") - keyFilePartWriter.Write([]byte(key.(string))) - } - if password, ok := d.GetOkExists("password"); ok { - keyFilePartWriter, _ := bw.CreateFormField("password") - keyFilePartWriter.Write([]byte(password.(string))) - } - certFilePartWriter, _ := bw.CreateFormField("certFile") - certFilePartWriter.Write([]byte(d.Get("cert").(string))) - bw.Close() - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases?format=keycertfile&alias={{alias}}&ignoreExpiryValidation=true") - if err != nil { - return err - } - - log.Printf("[DEBUG] Creating new KeystoresAliasesKeyCertFile") - billingProject := "" - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := sendRequestRawBodyWithTimeout(config, "POST", billingProject, url, userAgent, buf, "multipart/form-data; boundary="+bw.Boundary(), d.Timeout(schema.TimeoutCreate)) - if err != nil { - return fmt.Errorf("Error creating KeystoresAliasesKeyCertFile: %s", err) - } - - // Store the ID now - id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - log.Printf("[DEBUG] Finished creating KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) - - return resourceApigeeKeystoresAliasesKeyCertFileRead(d, meta) -} - -func resourceApigeeKeystoresAliasesKeyCertFileRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return err - } - - billingProject := "" - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("ApigeeKeystoresAliasesKeyCertFile %q", d.Id())) - } - - if err := d.Set("alias", flattenApigeeKeystoresAliasesKeyCertFileAlias(res["alias"], d, config)); err != nil { - return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) - } - - if err := d.Set("certs_info", flattenApigeeKeystoresAliasesKeyCertFileCertsInfo(res["certsInfo"], d, config)); err != nil { - return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) - } - if err := d.Set("type", flattenApigeeKeystoresAliasesKeyCertFileType(res["type"], d, config)); err != nil { - return fmt.Errorf("Error reading KeystoresAliasesKeyCertFile: %s", err) - } - - return nil -} - -func resourceApigeeKeystoresAliasesKeyCertFileUpdate(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - billingProject := "" - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}?ignoreExpiryValidation=true") - if err != nil { - return err - } - - log.Printf("[DEBUG] Updating KeystoresAliasesKeyCertFile %q", d.Id()) - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - buf := new(bytes.Buffer) - bw := multipart.NewWriter(buf) - certFilePartWriter, _ := bw.CreateFormField("certFile") - certFilePartWriter.Write([]byte(d.Get("cert").(string))) - bw.Close() - - res, err := sendRequestRawBodyWithTimeout(config, "PUT", billingProject, url, userAgent, buf, "multipart/form-data; boundary="+bw.Boundary(), d.Timeout(schema.TimeoutCreate)) - - if err != nil { - return fmt.Errorf("Error updating KeystoresAliasesKeyCertFile %q: %s", d.Id(), err) - } else { - log.Printf("[DEBUG] Finished updating KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) - } - - return resourceApigeeKeystoresAliasesKeyCertFileRead(d, meta) -} - -func resourceApigeeKeystoresAliasesKeyCertFileDelete(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - billingProject := "" - - url, err := tpgresource.ReplaceVars(d, config, "{{ApigeeBasePath}}organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return err - } - - var obj map[string]interface{} - log.Printf("[DEBUG] Deleting KeystoresAliasesKeyCertFile %q", d.Id()) - - // err == nil indicates that the billing_project value was found - if bp, err := tpgresource.GetBillingProject(d, config); err == nil { - billingProject = bp - } - - res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "DELETE", - Project: billingProject, - RawURL: url, - UserAgent: userAgent, - Body: obj, - Timeout: d.Timeout(schema.TimeoutDelete), - }) - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, "KeystoresAliasesKeyCertFile") - } - - log.Printf("[DEBUG] Finished deleting KeystoresAliasesKeyCertFile %q: %#v", d.Id(), res) - return nil -} - -func resourceApigeeKeystoresAliasesKeyCertFileImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { - config := meta.(*transport_tpg.Config) - if err := tpgresource.ParseImportId([]string{ - "^organizations/(?P[^/]+)/environments/(?P[^/]+)/keystores/(?P[^/]+)/aliases/(?P[^/]+)$", - "^(?P[^/]+)/(?P[^/]+)/(?P[^/]+)/(?P[^/]+)$", - }, d, config); err != nil { - return nil, err - } - - // Replace import id for the resource id - id, err := tpgresource.ReplaceVars(d, config, "organizations/{{org_id}}/environments/{{environment}}/keystores/{{keystore}}/aliases/{{alias}}") - if err != nil { - return nil, fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - - return []*schema.ResourceData{d}, nil -} - -func flattenApigeeKeystoresAliasesKeyCertFileOrgId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileEnvironment(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileKeystore(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileAlias(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileKey(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFilePassword(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCert(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return nil - } - original := v.(map[string]interface{}) - if len(original) == 0 { - return nil - } - transformed := make(map[string]interface{}) - transformed["cert_info"] = - flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(original["certInfo"], d, config) - return []interface{}{transformed} -} -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - if v == nil { - return v - } - l := v.([]interface{}) - transformed := make([]interface{}, 0, len(l)) - for _, raw := range l { - original := raw.(map[string]interface{}) - if len(original) < 1 { - // Do not include empty json objects coming back from the api - continue - } - transformed = append(transformed, map[string]interface{}{ - "version": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(original["version"], d, config), - "subject": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(original["subject"], d, config), - "issuer": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(original["issuer"], d, config), - "expiry_date": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(original["expiryDate"], d, config), - "valid_from": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(original["validFrom"], d, config), - "is_valid": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(original["isValid"], d, config), - "subject_alternative_names": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(original["subjectAlternativeNames"], d, config), - "sig_alg_name": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(original["sigAlgName"], d, config), - "public_key": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(original["publicKey"], d, config), - "basic_constraints": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(original["basicConstraints"], d, config), - "serial_number": flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(original["serialNumber"], d, config), - }) - } - return transformed -} -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - // Handles the string fixed64 format - if strVal, ok := v.(string); ok { - if intVal, err := tpgresource.StringToFixed64(strVal); err == nil { - return intVal - } - } - - // number values are represented as float64 - if floatVal, ok := v.(float64); ok { - intVal := int(floatVal) - return intVal - } - - return v // let terraform core handle it otherwise -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func flattenApigeeKeystoresAliasesKeyCertFileType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - return v -} - -func expandApigeeKeystoresAliasesKeyCertFileOrgId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileEnvironment(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileKeystore(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileAlias(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileKey(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFilePassword(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCert(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfo(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - l := v.([]interface{}) - if len(l) == 0 || l[0] == nil { - return nil, nil - } - raw := l[0] - original := raw.(map[string]interface{}) - transformed := make(map[string]interface{}) - - transformedCertInfo, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(original["cert_info"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedCertInfo); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["certInfo"] = transformedCertInfo - } - - return transformed, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfo(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - l := v.([]interface{}) - req := make([]interface{}, 0, len(l)) - for _, raw := range l { - if raw == nil { - continue - } - original := raw.(map[string]interface{}) - transformed := make(map[string]interface{}) - - transformedVersion, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(original["version"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedVersion); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["version"] = transformedVersion - } - - transformedSubject, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(original["subject"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSubject); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["subject"] = transformedSubject - } - - transformedIssuer, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(original["issuer"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedIssuer); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["issuer"] = transformedIssuer - } - - transformedExpiryDate, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(original["expiry_date"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedExpiryDate); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["expiryDate"] = transformedExpiryDate - } - - transformedValidFrom, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(original["valid_from"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedValidFrom); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["validFrom"] = transformedValidFrom - } - - transformedIsValid, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(original["is_valid"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedIsValid); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["isValid"] = transformedIsValid - } - - transformedSubjectAlternativeNames, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(original["subject_alternative_names"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSubjectAlternativeNames); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["subjectAlternativeNames"] = transformedSubjectAlternativeNames - } - - transformedSigAlgName, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(original["sig_alg_name"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSigAlgName); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["sigAlgName"] = transformedSigAlgName - } - - transformedPublicKey, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(original["public_key"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedPublicKey); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["publicKey"] = transformedPublicKey - } - - transformedBasicConstraints, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(original["basic_constraints"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedBasicConstraints); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["basicConstraints"] = transformedBasicConstraints - } - - transformedSerialNumber, err := expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(original["serial_number"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedSerialNumber); val.IsValid() && !tpgresource.IsEmptyValue(val) { - transformed["serialNumber"] = transformedSerialNumber - } - - req = append(req, transformed) - } - return req, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoVersion(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubject(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIssuer(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoExpiryDate(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoValidFrom(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoIsValid(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSubjectAlternativeNames(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSigAlgName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoPublicKey(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoBasicConstraints(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} - -func expandApigeeKeystoresAliasesKeyCertFileCertsInfoCertInfoSerialNumber(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { - return v, nil -} diff --git a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl new file mode 100644 index 000000000000..b08deb9e3c65 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network.go.tmpl @@ -0,0 +1,187 @@ +package compute + +import ( + "context" + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + {{ if eq $.TargetVersionName `ga` }} + "google.golang.org/api/compute/v1" + {{- else }} + compute "google.golang.org/api/compute/v0.beta" + {{- end }} + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ datasource.DataSource = &ComputeNetworkFWDataSource{} + _ datasource.DataSourceWithConfigure = &ComputeNetworkFWDataSource{} +) + +// NewComputeNetworkFWDataSource is a helper function to simplify the provider implementation. +func NewComputeNetworkFWDataSource() datasource.DataSource { + return &ComputeNetworkFWDataSource{} +} + +// ComputeNetworkFWDataSource is the data source implementation. +type ComputeNetworkFWDataSource struct { + client *compute.Service + providerConfig *transport_tpg.Config +} + +type ComputeNetworkModel struct { + Id types.String `tfsdk:"id"` + Project types.String `tfsdk:"project"` + Name types.String `tfsdk:"name"` + Description types.String `tfsdk:"description"` + NetworkId types.Int64 `tfsdk:"network_id"` + NumericId types.String `tfsdk:"numeric_id"` + GatewayIpv4 types.String `tfsdk:"gateway_ipv4"` + InternalIpv6Range types.String `tfsdk:"internal_ipv6_range"` + SelfLink types.String `tfsdk:"self_link"` + // NetworkProfile types.String `tfsdk:"network_profile"` + // SubnetworksSelfLinks types.List `tfsdk:"subnetworks_self_links"` +} + +// Metadata returns the data source type name. +func (d *ComputeNetworkFWDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fw_compute_network" +} + +func (d *ComputeNetworkFWDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = p.NewComputeClient(p.UserAgent) + if resp.Diagnostics.HasError() { + return + } + d.providerConfig = p +} + +// Schema defines the schema for the data source. +func (d *ComputeNetworkFWDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "A data source to get network details.", + + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Description: `The project name.`, + MarkdownDescription: `The project name.`, + Optional: true, + }, + "name": schema.StringAttribute{ + Description: `The name of the Compute network.`, + MarkdownDescription: `The name of the Compute network.`, + Required: true, + }, + "description": schema.StringAttribute{ + Description: `The description of the network.`, + MarkdownDescription: `The description of the network.`, + Computed: true, + }, + "network_id": schema.Int64Attribute{ + Description: `The network ID.`, + MarkdownDescription: `The network ID.`, + Computed: true, + }, + "numeric_id": schema.StringAttribute{ + Description: `The numeric ID of the network. Deprecated in favor of network_id.`, + MarkdownDescription: `The numeric ID of the network. Deprecated in favor of network_id.`, + Computed: true, + DeprecationMessage: "`numeric_id` is deprecated and will be removed in a future major release. Use `network_id` instead.", + }, + "gateway_ipv4": schema.StringAttribute{ + Description: `The gateway address for default routing out of the network.`, + MarkdownDescription: `The gateway address for default routing out of the network.`, + Computed: true, + }, + "internal_ipv6_range": schema.StringAttribute{ + Description: `The internal ipv6 address range of the network.`, + MarkdownDescription: `The internal ipv6 address range of the network.`, + Computed: true, + }, + "self_link": schema.StringAttribute{ + Description: `The network self link.`, + MarkdownDescription: `The network self link.`, + Computed: true, + }, + // This is included for backwards compatibility with the original, SDK-implemented data source. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + }, + }, + } +} + +// Read refreshes the Terraform state with the latest data. +func (d *ComputeNetworkFWDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data ComputeNetworkModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + + project := fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // GET Request + clientResp, err := d.client.Networks.Get(project.ValueString(), data.Name.ValueString()).Do() + if err != nil { + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceComputeNetwork %q", data.Name.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + tflog.Trace(ctx, "read compute network data source") + + // Put data in model + id := fmt.Sprintf("projects/%s/global/networks/%s", project.ValueString(), clientResp.Name) + data.Id = types.StringValue(id) + data.Description = types.StringValue(clientResp.Description) + data.NetworkId = types.Int64Value(int64(clientResp.Id)) + data.NumericId = types.StringValue(strconv.Itoa(int(clientResp.Id))) + data.GatewayIpv4 = types.StringValue(clientResp.GatewayIPv4) + data.InternalIpv6Range = types.StringValue(clientResp.InternalIpv6Range) + data.SelfLink = types.StringValue(clientResp.SelfLink) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go new file mode 100644 index 000000000000..ae82326c793d --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/fw_data_source_google_compute_network_test.go @@ -0,0 +1,86 @@ +package compute_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestAccDataSourceGoogleFWNetwork(t *testing.T) { + t.Parallel() + + networkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleNetworkFWConfig(networkName), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceGoogleFWNetworkCheck("data.google_fw_compute_network.my_network", "google_compute_network.foobar"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFWNetworkCheck(data_source_name string, resource_name string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[data_source_name] + if !ok { + return fmt.Errorf("root module has no resource called %s", data_source_name) + } + + rs, ok := s.RootModule().Resources[resource_name] + if !ok { + return fmt.Errorf("can't find %s in state", resource_name) + } + + ds_attr := ds.Primary.Attributes + rs_attr := rs.Primary.Attributes + network_attrs_to_test := []string{ + "id", + "name", + "network_id", + "numeric_id", + "description", + "internal_ipv6_range", + } + + for _, attr_to_check := range network_attrs_to_test { + if ds_attr[attr_to_check] != rs_attr[attr_to_check] { + return fmt.Errorf( + "%s is %s; want %s", + attr_to_check, + ds_attr[attr_to_check], + rs_attr[attr_to_check], + ) + } + } + + if !tpgresource.CompareSelfLinkOrResourceName("", ds_attr["self_link"], rs_attr["self_link"], nil) && ds_attr["self_link"] != rs_attr["self_link"] { + return fmt.Errorf("self link does not match: %s vs %s", ds_attr["self_link"], rs_attr["self_link"]) + } + + return nil + } +} + +func testAccDataSourceGoogleNetworkFWConfig(name string) string { + return fmt.Sprintf(` +resource "google_compute_network" "foobar" { + name = "%s" + description = "my-description" + enable_ula_internal_ipv6 = true + auto_create_subnetworks = false +} + +data "google_fw_compute_network" "my_network" { + name = google_compute_network.foobar.name +} +`, name) +} diff --git a/mmv1/third_party/terraform/services/compute/image.go b/mmv1/third_party/terraform/services/compute/image.go index c51547f83f35..7b020823db98 100644 --- a/mmv1/third_party/terraform/services/compute/image.go +++ b/mmv1/third_party/terraform/services/compute/image.go @@ -108,7 +108,6 @@ func ResolveImage(c *transport_tpg.Config, project, name, userAgent string) (str break } } - switch { case resolveImageLink.MatchString(name): // https://www.googleapis.com/compute/v1/projects/xyz/global/images/xyz return name, nil diff --git a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl index a0659cc90aed..8210ff4ae2ec 100644 --- a/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dataflow/resource_dataflow_flex_template_job_test.go.tmpl @@ -2157,4 +2157,4 @@ resource "google_dataflow_flex_template_job" "flex_job" { `, context) } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl b/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl index ad473a06dc1b..881f888f3188 100644 --- a/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl +++ b/mmv1/third_party/terraform/services/dns/resource_dns_managed_zone_test.go.tmpl @@ -828,4 +828,4 @@ resource "google_compute_network" "network" { } `, context) } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl index 0755b2f3740a..6b735ae83e81 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_android_app_config.go.tmpl @@ -139,7 +139,7 @@ func (d *GoogleFirebaseAndroidAppConfigDataSource) Read(ctx context.Context, req appName := fmt.Sprintf("projects/%s/androidApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl index 8dfdf61f9dc2..e64e5949608f 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_apple_app_config.go.tmpl @@ -137,7 +137,7 @@ func (d *GoogleFirebaseAppleAppConfigDataSource) Read(ctx context.Context, req d appName := fmt.Sprintf("projects/%s/iosApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl index 7626b3d5b902..29891e565551 100644 --- a/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl +++ b/mmv1/third_party/terraform/services/firebase/data_source_google_firebase_web_app_config.go.tmpl @@ -184,7 +184,7 @@ func (d *GoogleFirebaseWebAppConfigDataSource) Read(ctx context.Context, req dat appName := fmt.Sprintf("projects/%s/webApps/%s/config", data.Project.ValueString(), data.WebAppId.ValueString()) clientResp, err := service.GetConfig(appName).Do() if err != nil { - fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) + fwtransport.HandleNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) if resp.Diagnostics.HasError() { return } diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go new file mode 100644 index 000000000000..685428eca2de --- /dev/null +++ b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go @@ -0,0 +1,383 @@ +package pubsublite + +import ( + "context" + "fmt" + "net/http" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/pubsublite/v1" +) + +// Ensure the implementation satisfies the expected interfaces. +var ( + _ resource.Resource = &GooglePubsubLiteReservationFWResource{} + _ resource.ResourceWithConfigure = &GooglePubsubLiteReservationFWResource{} +) + +// NewGooglePubsubLiteReservationResource is a helper function to simplify the provider implementation. +func NewGooglePubsubLiteReservationFWResource() resource.Resource { + return &GooglePubsubLiteReservationFWResource{} +} + +// GooglePubsubLiteReservationResource is the resource implementation. +type GooglePubsubLiteReservationFWResource struct { + client *pubsublite.Service + providerConfig *transport_tpg.Config +} + +type GooglePubsubLiteReservationModel struct { + Id types.String `tfsdk:"id"` + Project types.String `tfsdk:"project"` + Region types.String `tfsdk:"region"` + Name types.String `tfsdk:"name"` + ThroughputCapacity types.Int64 `tfsdk:"throughput_capacity"` +} + +// Metadata returns the resource type name. +func (d *GooglePubsubLiteReservationFWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fwprovider_pubsub_lite_reservation" +} + +func (d *GooglePubsubLiteReservationFWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.providerConfig = p +} + +// Schema defines the schema for the data source. +func (d *GooglePubsubLiteReservationFWResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "Pubsub Lite Reservation resource description", + + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Description: "The project id of the Pubsub Lite Reservation.", + MarkdownDescription: "The project id of the Pubsub Lite Reservation.", + Required: true, + }, + "region": schema.StringAttribute{ + Description: "The region of the Pubsub Lite Reservation.", + MarkdownDescription: "The region of the Pubsub Lite Reservation.", + Required: true, + }, + "name": schema.StringAttribute{ + Description: `The display name of the project.`, + MarkdownDescription: `The display name of the project.`, + Required: true, + }, + "throughput_capacity": schema.Int64Attribute{ + Description: `The reserved throughput capacity. Every unit of throughput capacity is equivalent to 1 MiB/s of published messages or 2 MiB/s of subscribed messages.`, + MarkdownDescription: `The reserved throughput capacity. Every unit of throughput capacity is equivalent to 1 MiB/s of published messages or 2 MiB/s of subscribed messages.`, + Required: true, + }, + // This is included for backwards compatibility with the original, SDK-implemented data source. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + }, + }, + } +} + +func (d *GooglePubsubLiteReservationFWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + obj["throughputCapacity"] = data.ThroughputCapacity.ValueInt64() + + data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := data.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = data.Project + schemaDefaultVals.Region = data.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubsubLiteBasePath}}projects/{{project}}/locations/{{region}}/reservations?reservationId={{name}}") + if resp.Diagnostics.HasError() { + return + } + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Creating new Reservation: %#v", obj)) + + headers := make(http.Header) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "POST", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "create fwprovider google_pubsub_lite resource") + + // Put data in model + data.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/reservations/%s", data.Project.ValueString(), data.Region.ValueString(), data.Name.ValueString())) + data.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +// Read refreshes the Terraform state with the latest data. +func (d *GooglePubsubLiteReservationFWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := data.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = data.Project + schemaDefaultVals.Region = data.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") + + if resp.Diagnostics.HasError() { + return + } + + headers := make(http.Header) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "GET", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "read fwprovider google_pubsub_lite resource") + + // Put data in model + data.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/instances/%s", data.Project.ValueString(), data.Region.ValueString(), data.Name.ValueString())) + data.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (d *GooglePubsubLiteReservationFWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var plan, state GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + obj["throughputCapacity"] = plan.ThroughputCapacity.ValueInt64() + + plan.Project = fwresource.GetProjectFramework(plan.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + plan.Region = fwresource.GetRegionFramework(plan.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := plan.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = plan.Project + schemaDefaultVals.Region = plan.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") + + if resp.Diagnostics.HasError() { + return + } + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Updating Reservation: %#v", obj)) + + headers := make(http.Header) + + updateMask := []string{} + if !plan.ThroughputCapacity.Equal(state.ThroughputCapacity) { + updateMask = append(updateMask, "throughputCapacity") + } + + // updateMask is a URL parameter but not present in the schema, so ReplaceVars + // won't set it + var err error + url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + resp.Diagnostics.AddError("Error when sending HTTP request: ", err.Error()) + return + } + + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "PATCH", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, "update fwprovider google_pubsub_lite resource") + + // Put data in model + plan.Id = types.StringValue(fmt.Sprintf("projects/%s/locations/%s/instances/%s", plan.Project.ValueString(), plan.Region.ValueString(), plan.Name.ValueString())) + plan.ThroughputCapacity = types.Int64Value(res["throughputCapacity"].(int64)) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} +func (d *GooglePubsubLiteReservationFWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data GooglePubsubLiteReservationModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + // Use provider_meta to set User-Agent + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, d.providerConfig.UserAgent) + + obj := make(map[string]interface{}) + + data.Project = fwresource.GetProjectFramework(data.Project, types.StringValue(d.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + data.Region = fwresource.GetRegionFramework(data.Region, types.StringValue(d.providerConfig.Region), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + billingProject := data.Project + + var schemaDefaultVals fwtransport.DefaultVars + schemaDefaultVals.Project = data.Project + schemaDefaultVals.Region = data.Region + + url := fwtransport.ReplaceVars(ctx, req, &resp.Diagnostics, schemaDefaultVals, d.providerConfig, "{{PubSubLiteBasePath}}projects/{{project}}/locations/{{region}}/instances/{{name}}") + + if resp.Diagnostics.HasError() { + return + } + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleting Reservation: %#v", obj)) + + headers := make(http.Header) + res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + Config: d.providerConfig, + Method: "DELETE", + Project: billingProject.ValueString(), + RawURL: url, + UserAgent: userAgent, + Body: obj, + Headers: headers, + }, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleted Reservation: %#v", res)) +} diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go new file mode 100644 index 000000000000..e4507dfaec41 --- /dev/null +++ b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go @@ -0,0 +1,56 @@ +package pubsublite_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccResourceFWPubsubLiteReservation_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccResourceFWPubsubLiteReservation_basic(context), + }, + { + Config: testAccResourceFWPubsubLiteReservation_upgrade(context), + }, + }, + }) +} + +func testAccResourceFWPubsubLiteReservation_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_fwprovider_pubsub_lite_reservation" "basic" { + name = "tf-test-example-reservation%{random_suffix}" + region = "us-central1" + project = data.google_project.project.number + throughput_capacity = 2 +} + +data "google_project" "project" { +} +`, context) +} + +func testAccResourceFWPubsubLiteReservation_upgrade(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_fwprovider_pubsub_lite_reservation" "basic" { + name = "tf-test-example-reservation%{random_suffix}" + region = "us-central1" + project = data.google_project.project.number + throughput_capacity = 3 +} + +data "google_project" "project" { +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go new file mode 100644 index 000000000000..5e536900be96 --- /dev/null +++ b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user.go @@ -0,0 +1,507 @@ +package sql + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-plugin-framework-timeouts/resource/timeouts" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + "github.com/hashicorp/terraform-provider-google/google/transport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + sqladmin "google.golang.org/api/sqladmin/v1beta4" +) + +var ( + _ resource.Resource = &SQLUserFWResource{} + _ resource.ResourceWithConfigure = &SQLUserFWResource{} +) + +func NewSQLUserFWResource() resource.Resource { + return &SQLUserFWResource{} +} + +type SQLUserFWResource struct { + client *sqladmin.Service + providerConfig *transport_tpg.Config +} + +type SQLUserModel struct { + Id types.String `tfsdk:"id"` + Project types.String `tfsdk:"project"` + Name types.String `tfsdk:"name"` + Host types.String `tfsdk:"host"` + Instance types.String `tfsdk:"instance"` + Password types.String `tfsdk:"password"` + // PasswordWO types.String `tfsdk:"password_wo"` + // PasswordWOVersion types.String `tfsdk:"password_wo_version"` + Type types.String `tfsdk:"type"` + // SqlServerUserDetails types.List `tfsdk:"sql_server_user_details"` + // PasswordPolicy types.List `tfsdk:"password_policy"` + // DeletionPolicy types.String `tfsdk:"deletion_policy"` + Timeouts timeouts.Value `tfsdk:"timeouts"` +} + +// Metadata returns the resource type name. +func (d *SQLUserFWResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_fw_sql_user" +} + +func (r *SQLUserFWResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + r.client = p.NewSqlAdminClient(p.UserAgent) + if resp.Diagnostics.HasError() { + return + } + r.providerConfig = p +} + +func (d *SQLUserFWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "A resource to represent a SQL User object.", + + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, + "host": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + }, + "instance": schema.StringAttribute{ + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "name": schema.StringAttribute{ + Description: `The name of the user. Changing this forces a new resource to be created.`, + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + SQLUserNameIAMPlanModifier(), + }, + }, + "password": schema.StringAttribute{ + Optional: true, + Sensitive: true, + }, + "type": schema.StringAttribute{ + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + // TODO DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("BUILT_IN"), + }, + }, + // This is included for backwards compatibility with the original, SDK-implemented resource. + "id": schema.StringAttribute{ + Description: "Project identifier", + MarkdownDescription: "Project identifier", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + Blocks: map[string]schema.Block{ + "timeouts": timeouts.Block(ctx, timeouts.Opts{ + Create: true, + }), + }, + } +} + +func (r *SQLUserFWResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data SQLUserModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform plan data into the model + resp.Diagnostics.Append(req.Plan.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + project := fwresource.GetProjectFramework(data.Project, types.StringValue(r.providerConfig.Project), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + nameData, diags := data.Name.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + instanceData, diags := data.Instance.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + hostData, diags := data.Host.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + typeData, diags := data.Type.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + passwordData, diags := data.Password.ToStringValue(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + user := &sqladmin.User{ + Name: nameData.ValueString(), + Instance: instanceData.ValueString(), + Password: passwordData.ValueString(), + Host: hostData.ValueString(), + Type: typeData.ValueString(), + } + + transport_tpg.MutexStore.Lock(instanceMutexKey(project.ValueString(), instanceData.ValueString())) + defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project.ValueString(), instanceData.ValueString())) + + r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) + + // TODO host check logic + + var op *sqladmin.Operation + var err error + insertFunc := func() error { + op, err = r.client.Users.Insert(project.ValueString(), instanceData.ValueString(), + user).Do() + return err + } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: insertFunc, + Timeout: createTimeout, + }) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error, failed to insert "+ + "user %s into instance %s", nameData.ValueString(), instanceData.ValueString()), err.Error()) + return + } + + err = SqlAdminOperationWaitTime(r.providerConfig, op, project.ValueString(), "Insert User", r.client.UserAgent, createTimeout) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error, failure waiting to insert "+ + "user %s into instance %s", nameData.ValueString(), instanceData.ValueString()), err.Error()) + return + } + + tflog.Trace(ctx, "created sql user resource") + + // This will include a double-slash (//) for postgres instances, + // for which user.Host is an empty string. That's okay. + data.Id = types.StringValue(fmt.Sprintf("%s/%s/%s", user.Name, user.Host, user.Instance)) + data.Project = project + + // read back sql user + r.SQLUserRefresh(ctx, &data, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *SQLUserFWResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data SQLUserModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) + + tflog.Trace(ctx, "read sql user resource") + + // read back sql user + r.SQLUserRefresh(ctx, &data, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *SQLUserFWResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var old, new SQLUserModel + var metaData *fwmodels.ProviderMetaModel + + resp.Diagnostics.Append(req.State.Get(ctx, &old)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(req.Plan.Get(ctx, &new)...) + if resp.Diagnostics.HasError() { + return + } + + // Use provider_meta to set User-Agent + r.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, r.client.UserAgent) + + if !old.Password.Equal(new.Password) { + project := new.Project.ValueString() + instance := new.Instance.ValueString() + name := new.Name.ValueString() + host := new.Host.ValueString() + password := new.Password.ValueString() + + updateTimeout, diags := new.Timeouts.Update(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + user := &sqladmin.User{ + Name: name, + Instance: instance, + Password: password, + } + transport_tpg.MutexStore.Lock(instanceMutexKey(project, instance)) + defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) + var op *sqladmin.Operation + var err error + updateFunc := func() error { + op, err = r.client.Users.Update(project, instance, user).Host(host).Name(name).Do() + return err + } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: updateFunc, + Timeout: updateTimeout, + }) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failed to update"+ + "user %s in instance %s", name, instance), err.Error()) + return + } + + err = SqlAdminOperationWaitTime(r.providerConfig, op, project, "Update User", r.client.UserAgent, updateTimeout) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failure waiting for update"+ + "user %s in instance %s", name, instance), err.Error()) + return + } + + // read back sql user + r.SQLUserRefresh(ctx, &new, &resp.State, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &new)...) +} + +func (r *SQLUserFWResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data SQLUserModel + + // Read Terraform prior state data into the model + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + project := data.Project.ValueString() + instance := data.Instance.ValueString() + name := data.Name.ValueString() + host := data.Host.ValueString() + + deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + transport_tpg.MutexStore.Lock(instanceMutexKey(project, instance)) + defer transport_tpg.MutexStore.Unlock(instanceMutexKey(project, instance)) + var op *sqladmin.Operation + var err error + deleteFunc := func() error { + op, err = r.client.Users.Delete(project, instance).Host(host).Name(name).Do() + return err + } + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: deleteFunc, + Timeout: deleteTimeout, + }) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("failed to delete"+ + "user %s in instance %s", name, instance), err.Error()) + return + } + + err = SqlAdminOperationWaitTime(r.providerConfig, op, project, "Delete User", r.client.UserAgent, deleteTimeout) + + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error, failure waiting to delete "+ + "user %s", name), err.Error()) + return + } +} + +func (r *SQLUserFWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + idParts := strings.Split(req.ID, "/") + + // TODO recreate all import cases + if len(idParts) != 4 || idParts[0] == "" || idParts[1] == "" { + resp.Diagnostics.AddError( + "Unexpected Import Identifier", + fmt.Sprintf("Expected import identifier with format: project/instance/host/name. Got: %q", req.ID), + ) + return + } + + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("project"), idParts[0])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("instance"), idParts[1])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("host"), idParts[2])...) + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root("name"), idParts[3])...) +} + +func (r *SQLUserFWResource) SQLUserRefresh(ctx context.Context, data *SQLUserModel, state *tfsdk.State, diag *diag.Diagnostics) { + userReadResp, err := r.client.Users.Get(data.Project.ValueString(), data.Instance.ValueString(), data.Name.ValueString()).Host(data.Host.ValueString()).Do() + if err != nil { + // Treat HTTP 404 Not Found status as a signal to recreate resource + // and return early + if userReadResp != nil && transport.IsGoogleApiErrorWithCode(err, userReadResp.HTTPStatusCode) { + tflog.Trace(ctx, "sql user resource not found, removing from state") + state.RemoveResource(ctx) + return + } + diag.AddError(fmt.Sprintf("Error, failure waiting to read "+ + "user %s", data.Name.ValueString()), err.Error()) + return + } + + id := fmt.Sprintf("projects/%s/global/networks/%s", userReadResp.Project, userReadResp.Name) + data.Id = types.StringValue(id) + data.Project = types.StringValue(userReadResp.Project) + data.Instance = types.StringValue(userReadResp.Instance) + if userReadResp.Host != "" { + data.Host = types.StringValue(userReadResp.Host) + } + if userReadResp.Type != "" { + data.Type = types.StringValue(userReadResp.Type) + } +} + +// Plan Modifiers +func SQLUserNameIAMPlanModifier() planmodifier.String { + return &sqlUserNameIAMPlanModifier{} +} + +type sqlUserNameIAMPlanModifier struct { +} + +func (d *sqlUserNameIAMPlanModifier) Description(ctx context.Context) string { + return "Suppresses name diffs for IAM user types." +} +func (d *sqlUserNameIAMPlanModifier) MarkdownDescription(ctx context.Context) string { + return d.Description(ctx) +} + +// Plan modifier to emulate the SDK diffSuppressIamUserName +func (d *sqlUserNameIAMPlanModifier) PlanModifyString(ctx context.Context, req planmodifier.StringRequest, resp *planmodifier.StringResponse) { + // Retrieve relevant fields + var oldName types.String + diags := req.State.GetAttribute(ctx, path.Root("name"), &oldName) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var newName types.String + diags = req.Plan.GetAttribute(ctx, path.Root("name"), &newName) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var userType types.String + diags = req.Plan.GetAttribute(ctx, path.Root("type"), &userType) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + // Old diff suppress logic + strippedNewName := strings.Split(newName.ValueString(), "@")[0] + + if oldName.ValueString() == strippedNewName && strings.Contains(userType.ValueString(), "IAM") { + // Suppress the diff by setting the planned value to the old value + resp.PlanValue = oldName + } +} diff --git a/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go new file mode 100644 index 000000000000..80e78c4a0316 --- /dev/null +++ b/mmv1/third_party/terraform/services/sql/fw_resource_sql_user_test.go @@ -0,0 +1,90 @@ +package sql_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccSqlUserFW_mysql(t *testing.T) { + // Multiple fine-grained resources + acctest.SkipIfVcr(t) + t.Parallel() + + instance := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlUserDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlUserFW_mysql(instance, "password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user1"), + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user2"), + ), + }, + { + // Update password + Config: testGoogleSqlUserFW_mysql(instance, "new_password"), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user1"), + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user2"), + testAccCheckGoogleSqlUserExists(t, "google_fw_sql_user.user3"), + ), + }, + { + ResourceName: "google_fw_sql_user.user2", + ImportStateId: fmt.Sprintf("%s/%s/gmail.com/admin", envvar.GetTestProjectFromEnv(), instance), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + { + ResourceName: "google_fw_sql_user.user3", + ImportStateId: fmt.Sprintf("%s/%s/10.0.0.0/24/admin", envvar.GetTestProjectFromEnv(), instance), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"password"}, + }, + }, + }) +} + +func testGoogleSqlUserFW_mysql(instance, password string) string { + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "%s" + region = "us-central1" + database_version = "MYSQL_5_7" + deletion_protection = false + settings { + tier = "db-f1-micro" + } +} + +resource "google_fw_sql_user" "user1" { + name = "admin" + instance = google_sql_database_instance.instance.name + host = "google.com" + password = "%s" +} + +resource "google_fw_sql_user" "user2" { + name = "admin" + instance = google_sql_database_instance.instance.name + host = "gmail.com" + password = "hunter2" +} + +resource "google_fw_sql_user" "user3" { + name = "admin" + instance = google_sql_database_instance.instance.name + host = "10.0.0.0/24" + password = "hunter3" +} +`, instance, password) +} diff --git a/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go b/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go new file mode 100644 index 000000000000..e8f5fe15be0f --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/fw_resource_storage_notification.go @@ -0,0 +1,325 @@ +package storage + +import ( + "context" + "fmt" + "strings" + + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/mapplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + "google.golang.org/api/googleapi" + "google.golang.org/api/storage/v1" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + "github.com/hashicorp/terraform-provider-google/google/fwvalidators" + "github.com/hashicorp/terraform-provider-google/google/services/pubsub" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var ( + _ resource.Resource = &storageNotificationResource{} + _ resource.ResourceWithConfigure = &storageNotificationResource{} + _ resource.ResourceWithImportState = &storageNotificationResource{} + _ resource.ResourceWithUpgradeState = &storageNotificationResource{} +) + +func NewStorageNotificationResource() resource.Resource { + return &storageNotificationResource{} +} + +type storageNotificationResource struct { + config *transport_tpg.Config +} + +type storageNotificationModel struct { + Bucket types.String `tfsdk:"bucket"` + PayloadFormat types.String `tfsdk:"payload_format"` + Topic types.String `tfsdk:"topic"` + CustomAttributes types.Map `tfsdk:"custom_attributes"` + EventTypes types.Set `tfsdk:"event_types"` + ObjectNamePrefix types.String `tfsdk:"object_name_prefix"` + NotificationID types.String `tfsdk:"notification_id"` + SelfLink types.String `tfsdk:"self_link"` + Id types.String `tfsdk:"id"` +} + +func (r *storageNotificationResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_storage_notification" +} + +func (r *storageNotificationResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + if req.ProviderData == nil { + return + } + + config, ok := req.ProviderData.(*transport_tpg.Config) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *transport_tpg.Config, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + r.config = config +} + +func (r *storageNotificationResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Creates a new notification configuration on a specified bucket, establishing a flow of event notifications from GCS to a Cloud Pub/Sub topic.", + Version: 1, + Attributes: map[string]schema.Attribute{ + "bucket": schema.StringAttribute{ + Required: true, + Description: "The name of the bucket.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "payload_format": schema.StringAttribute{ + Required: true, + Description: `The desired content of the Payload. One of "JSON_API_V1" or "NONE".`, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.OneOf("JSON_API_V1", "NONE"), + }, + }, + "topic": schema.StringAttribute{ + Required: true, + Description: "The Cloud Pub/Sub topic to which this subscription publishes.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + fwvalidators.NewTopicPrefixValidator(), + }, + }, + "custom_attributes": schema.MapAttribute{ + ElementType: types.StringType, + Optional: true, + Description: "A set of key/value attribute pairs to attach to each Cloud Pub/Sub message published for this notification subscription.", + PlanModifiers: []planmodifier.Map{ + mapplanmodifier.RequiresReplace(), + }, + }, + "event_types": schema.SetAttribute{ + ElementType: types.StringType, + Optional: true, + Description: `List of event type filters for this notification config. If not specified, Cloud Storage will send notifications for all event types. The valid types are: "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"`, + PlanModifiers: []planmodifier.Set{ + setplanmodifier.RequiresReplace(), + }, + Validators: []validator.Set{ + fwvalidators.StringValuesInSet( + "OBJECT_FINALIZE", + "OBJECT_METADATA_UPDATE", + "OBJECT_DELETE", + "OBJECT_ARCHIVE", + ), + }, + }, + "object_name_prefix": schema.StringAttribute{ + Optional: true, + Description: "Specifies a prefix path filter for this notification config. Cloud Storage will only send notifications for objects in this bucket whose names begin with the specified prefix.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "notification_id": schema.StringAttribute{ + Computed: true, + Description: "The ID of the created notification.", + }, + "self_link": schema.StringAttribute{ + Computed: true, + Description: "The URI of the created resource.", + }, + "id": schema.StringAttribute{ + Computed: true, + }, + }, + } +} + +func (r *storageNotificationResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var plan storageNotificationModel + var metaData *fwmodels.ProviderMetaModel + resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + computedTopicName := pubsub.GetComputedTopicName("", plan.Topic.ValueString()) + + var customAttrs map[string]string + if !plan.CustomAttributes.IsNull() && !plan.CustomAttributes.IsUnknown() { + resp.Diagnostics.Append(plan.CustomAttributes.ElementsAs(ctx, &customAttrs, false)...) + if resp.Diagnostics.HasError() { + return + } + } + + var eventTypes []string + if !plan.EventTypes.IsNull() && !plan.EventTypes.IsUnknown() { + resp.Diagnostics.Append(plan.EventTypes.ElementsAs(ctx, &eventTypes, false)...) + if resp.Diagnostics.HasError() { + return + } + } + + storageNotification := &storage.Notification{ + CustomAttributes: customAttrs, + EventTypes: eventTypes, + ObjectNamePrefix: plan.ObjectNamePrefix.ValueString(), + PayloadFormat: plan.PayloadFormat.ValueString(), + Topic: computedTopicName, + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) + bucket := plan.Bucket.ValueString() + + res, err := r.config.NewStorageClient(userAgent).Notifications.Insert(bucket, storageNotification).Do() + if err != nil { + resp.Diagnostics.AddError(fmt.Sprintf("Error creating notification config for bucket %s", bucket), err.Error()) + return + } + + plan.Id = types.StringValue(fmt.Sprintf("%s/notificationConfigs/%s", bucket, res.Id)) + tflog.Info(ctx, "Created Storage Notification", map[string]interface{}{"id": plan.Id.ValueString()}) + + found := r.refresh(ctx, &plan, metaData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + if !found { + resp.Diagnostics.AddError("Newly created resource not found", "The Storage Notification was not found immediately after creation.") + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &plan)...) +} + +func (r *storageNotificationResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var state storageNotificationModel + var metaData *fwmodels.ProviderMetaModel + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + found := r.refresh(ctx, &state, metaData, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + if !found { + tflog.Warn(ctx, "Storage Notification not found, removing from state.", map[string]interface{}{"id": state.Id.ValueString()}) + resp.State.RemoveResource(ctx) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &state)...) +} + +// Update is not supported for this resource. +func (r *storageNotificationResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + // This resource is immutable and all configurable attributes are marked with `RequiresReplace`. + // This function should not get called. +} + +func (r *storageNotificationResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var state storageNotificationModel + var metaData *fwmodels.ProviderMetaModel + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + bucket, notificationID, err := ParseStorageNotificationID(state.Id.ValueString()) + if err != nil { + resp.Diagnostics.AddError("Invalid resource ID", err.Error()) + return + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) + + err = r.config.NewStorageClient(userAgent).Notifications.Delete(bucket, notificationID).Do() + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + // Resource is gone. This is a successful deletion. + return + } + resp.Diagnostics.AddError(fmt.Sprintf("Error deleting notification configuration %s for bucket %s", notificationID, bucket), err.Error()) + return + } +} + +func (r *storageNotificationResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) +} + +func (r *storageNotificationResource) refresh(ctx context.Context, model *storageNotificationModel, metaData *fwmodels.ProviderMetaModel, diags *diag.Diagnostics) bool { + bucket, notificationID, err := ParseStorageNotificationID(model.Id.ValueString()) + if err != nil { + diags.AddError("Invalid resource ID", err.Error()) + return false + } + + userAgent := fwtransport.GenerateFrameworkUserAgentString(metaData, r.config.UserAgent) + + res, err := r.config.NewStorageClient(userAgent).Notifications.Get(bucket, notificationID).Do() + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return false + } + diags.AddError("Error reading Storage Notification", err.Error()) + return false + } + + model.Bucket = types.StringValue(bucket) + model.NotificationID = types.StringValue(notificationID) + model.SelfLink = types.StringValue(res.SelfLink) + model.PayloadFormat = types.StringValue(res.PayloadFormat) + + configuredObjectNamePrefix := model.ObjectNamePrefix + apiObjectNamePrefix := res.ObjectNamePrefix + model.ObjectNamePrefix = fwresource.FlattenStringEmptyToNull(configuredObjectNamePrefix, apiObjectNamePrefix) + + // trim the fully qualified prefix + apiValue := res.Topic + model.Topic = types.StringValue(strings.TrimPrefix(apiValue, "//pubsub.googleapis.com/")) + + var eventTypesDiags diag.Diagnostics + model.EventTypes, eventTypesDiags = types.SetValueFrom(ctx, types.StringType, res.EventTypes) + diags.Append(eventTypesDiags...) + + var customAttrsDiags diag.Diagnostics + model.CustomAttributes, customAttrsDiags = types.MapValueFrom(ctx, types.StringType, res.CustomAttributes) + diags.Append(customAttrsDiags...) + + return !diags.HasError() +} + +// ParseStorageNotificationID replicates the logic from the SDKv2 helper. +func ParseStorageNotificationID(id string) (bucket string, notificationID string, err error) { + parts := strings.Split(id, "/") + if len(parts) != 3 || parts[1] != "notificationConfigs" { + return "", "", fmt.Errorf("invalid storage notification ID format, expected '{bucket}/notificationConfigs/{notification_id}', got '%s'", id) + } + return parts[0], parts[2], nil +} diff --git a/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go b/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go new file mode 100644 index 000000000000..d492237f7116 --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/fw_storage_notification_state_upgraders.go @@ -0,0 +1,100 @@ +package storage + +import ( + "context" + "strings" + + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// Represents the schema of the SDKv2 state +type storageNotificationModelV0 struct { + Bucket types.String `tfsdk:"bucket"` + PayloadFormat types.String `tfsdk:"payload_format"` + Topic types.String `tfsdk:"topic"` + CustomAttributes types.Map `tfsdk:"custom_attributes"` + EventTypes types.Set `tfsdk:"event_types"` + ObjectNamePrefix types.String `tfsdk:"object_name_prefix"` + NotificationID types.String `tfsdk:"notification_id"` + SelfLink types.String `tfsdk:"self_link"` + Id types.String `tfsdk:"id"` +} + +func (r *storageNotificationResource) UpgradeState(ctx context.Context) map[int64]resource.StateUpgrader { + return map[int64]resource.StateUpgrader{ + 0: { + PriorSchema: &schema.Schema{ + Attributes: map[string]schema.Attribute{ + "bucket": schema.StringAttribute{ + Required: true, + }, + "payload_format": schema.StringAttribute{ + Required: true, + }, + "topic": schema.StringAttribute{ + Required: true, + }, + "custom_attributes": schema.MapAttribute{ + ElementType: types.StringType, + Optional: true, + }, + "event_types": schema.SetAttribute{ + ElementType: types.StringType, + Optional: true, + }, + "object_name_prefix": schema.StringAttribute{ + Optional: true, + }, + "notification_id": schema.StringAttribute{ + Computed: true, + }, + "self_link": schema.StringAttribute{ + Computed: true, + }, + "id": schema.StringAttribute{ + Computed: true, + }, + }, + }, + StateUpgrader: func(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) { + var priorStateData storageNotificationModelV0 + + resp.Diagnostics.Append(req.State.Get(ctx, &priorStateData)...) + if resp.Diagnostics.HasError() { + return + } + + upgradedStateData := storageNotificationModel{ + Bucket: priorStateData.Bucket, + PayloadFormat: priorStateData.PayloadFormat, + CustomAttributes: priorStateData.CustomAttributes, + EventTypes: priorStateData.EventTypes, + ObjectNamePrefix: priorStateData.ObjectNamePrefix, + NotificationID: priorStateData.NotificationID, + SelfLink: priorStateData.SelfLink, + Id: priorStateData.Id, + } + + // topic - trim the fully qualified prefix + if !priorStateData.Topic.IsNull() && !priorStateData.Topic.IsUnknown() { + apiTopic := priorStateData.Topic.ValueString() + transformedTopic := strings.TrimPrefix(apiTopic, "//pubsub.googleapis.com/") + upgradedStateData.Topic = types.StringValue(transformedTopic) + } else { + upgradedStateData.Topic = priorStateData.Topic + } + + // ObjectNamePrefix - normalize "" to Null + if !priorStateData.ObjectNamePrefix.IsNull() && !priorStateData.ObjectNamePrefix.IsUnknown() && priorStateData.ObjectNamePrefix.ValueString() == "" { + upgradedStateData.ObjectNamePrefix = types.StringNull() + } else { + upgradedStateData.ObjectNamePrefix = priorStateData.ObjectNamePrefix + } + + resp.Diagnostics.Append(resp.State.Set(ctx, upgradedStateData)...) + }, + }, + } +} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_notification.go b/mmv1/third_party/terraform/services/storage/resource_storage_notification.go deleted file mode 100644 index 1bd4d46c84e5..000000000000 --- a/mmv1/third_party/terraform/services/storage/resource_storage_notification.go +++ /dev/null @@ -1,196 +0,0 @@ -package storage - -import ( - "fmt" - "strings" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" - - "github.com/hashicorp/terraform-provider-google/google/services/pubsub" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" - "google.golang.org/api/storage/v1" -) - -func ResourceStorageNotification() *schema.Resource { - return &schema.Resource{ - Create: resourceStorageNotificationCreate, - Read: resourceStorageNotificationRead, - Delete: resourceStorageNotificationDelete, - Importer: &schema.ResourceImporter{ - State: schema.ImportStatePassthrough, - }, - - Schema: map[string]*schema.Schema{ - "bucket": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The name of the bucket.`, - }, - - "payload_format": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"JSON_API_V1", "NONE"}, false), - Description: `The desired content of the Payload. One of "JSON_API_V1" or "NONE".`, - }, - - "topic": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The Cloud Pub/Sub topic to which this subscription publishes. Expects either the topic name, assumed to belong to the default GCP provider project, or the project-level name, i.e. projects/my-gcp-project/topics/my-topic or my-topic. If the project is not set in the provider, you will need to use the project-level name.`, - }, - - "custom_attributes": { - Type: schema.TypeMap, - Optional: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: ` A set of key/value attribute pairs to attach to each Cloud Pub/Sub message published for this notification subscription`, - }, - - "event_types": { - Type: schema.TypeSet, - Optional: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - ValidateFunc: validation.StringInSlice([]string{ - "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"}, - false), - }, - Description: `List of event type filters for this notification config. If not specified, Cloud Storage will send notifications for all event types. The valid types are: "OBJECT_FINALIZE", "OBJECT_METADATA_UPDATE", "OBJECT_DELETE", "OBJECT_ARCHIVE"`, - }, - - "object_name_prefix": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - Description: `Specifies a prefix path filter for this notification config. Cloud Storage will only send notifications for objects in this bucket whose names begin with the specified prefix.`, - }, - - "notification_id": { - Type: schema.TypeString, - Computed: true, - Description: `The ID of the created notification.`, - }, - - "self_link": { - Type: schema.TypeString, - Computed: true, - Description: `The URI of the created resource.`, - }, - }, - UseJSONNumber: true, - } -} - -func resourceStorageNotificationCreate(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - bucket := d.Get("bucket").(string) - - topicName := d.Get("topic").(string) - computedTopicName := pubsub.GetComputedTopicName("", topicName) - if computedTopicName != topicName { - project, err := tpgresource.GetProject(d, config) - if err != nil { - return err - } - computedTopicName = pubsub.GetComputedTopicName(project, topicName) - } - - storageNotification := &storage.Notification{ - CustomAttributes: tpgresource.ExpandStringMap(d, "custom_attributes"), - EventTypes: tpgresource.ConvertStringSet(d.Get("event_types").(*schema.Set)), - ObjectNamePrefix: d.Get("object_name_prefix").(string), - PayloadFormat: d.Get("payload_format").(string), - Topic: computedTopicName, - } - - res, err := config.NewStorageClient(userAgent).Notifications.Insert(bucket, storageNotification).Do() - if err != nil { - return fmt.Errorf("Error creating notification config for bucket %s: %v", bucket, err) - } - - d.SetId(fmt.Sprintf("%s/notificationConfigs/%s", bucket, res.Id)) - - return resourceStorageNotificationRead(d, meta) -} - -func resourceStorageNotificationRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - bucket, notificationID := ResourceStorageNotificationParseID(d.Id()) - - res, err := config.NewStorageClient(userAgent).Notifications.Get(bucket, notificationID).Do() - if err != nil { - return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Notification configuration %s for bucket %s", notificationID, bucket)) - } - - if err := d.Set("bucket", bucket); err != nil { - return fmt.Errorf("Error setting bucket: %s", err) - } - if err := d.Set("payload_format", res.PayloadFormat); err != nil { - return fmt.Errorf("Error setting payload_format: %s", err) - } - if err := d.Set("topic", res.Topic); err != nil { - return fmt.Errorf("Error setting topic: %s", err) - } - if err := d.Set("object_name_prefix", res.ObjectNamePrefix); err != nil { - return fmt.Errorf("Error setting object_name_prefix: %s", err) - } - if err := d.Set("event_types", res.EventTypes); err != nil { - return fmt.Errorf("Error setting event_types: %s", err) - } - if err := d.Set("notification_id", notificationID); err != nil { - return fmt.Errorf("Error setting notification_id: %s", err) - } - if err := d.Set("self_link", res.SelfLink); err != nil { - return fmt.Errorf("Error setting self_link: %s", err) - } - if err := d.Set("custom_attributes", res.CustomAttributes); err != nil { - return fmt.Errorf("Error setting custom_attributes: %s", err) - } - - return nil -} - -func resourceStorageNotificationDelete(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) - if err != nil { - return err - } - - bucket, notificationID := ResourceStorageNotificationParseID(d.Id()) - - err = config.NewStorageClient(userAgent).Notifications.Delete(bucket, notificationID).Do() - if err != nil { - return fmt.Errorf("Error deleting notification configuration %s for bucket %s: %v", notificationID, bucket, err) - } - - return nil -} - -func ResourceStorageNotificationParseID(id string) (string, string) { - //bucket, NotificationID - parts := strings.Split(id, "/") - - return parts[0], parts[2] -} diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go b/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go index a3650a829875..1e8ffccc5b90 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_notification_test.go @@ -27,7 +27,7 @@ func TestAccStorageNotification_basic(t *testing.T) { var notification storage.Notification bucketName := acctest.TestBucketName(t) topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt(t)) - topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + topic := fmt.Sprintf("projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -71,7 +71,7 @@ func TestAccStorageNotification_withEventsAndAttributes(t *testing.T) { var notification storage.Notification bucketName := acctest.TestBucketName(t) topicName := fmt.Sprintf("tf-pstopic-test-%d", acctest.RandInt(t)) - topic := fmt.Sprintf("//pubsub.googleapis.com/projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) + topic := fmt.Sprintf("projects/%s/topics/%s", os.Getenv("GOOGLE_PROJECT"), topicName) eventType1 := "OBJECT_FINALIZE" eventType2 := "OBJECT_ARCHIVE" @@ -115,9 +115,12 @@ func testAccStorageNotificationDestroyProducer(t *testing.T) func(s *terraform.S continue } - bucket, notificationID := tpgstorage.ResourceStorageNotificationParseID(rs.Primary.ID) + bucket, notificationID, err := tpgstorage.ParseStorageNotificationID(rs.Primary.ID) + if err != nil { + return err + } - _, err := config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() + _, err = config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() if err == nil { return fmt.Errorf("Notification configuration still exists") } @@ -140,7 +143,10 @@ func testAccCheckStorageNotificationExists(t *testing.T, resource string, notifi config := acctest.GoogleProviderConfig(t) - bucket, notificationID := tpgstorage.ResourceStorageNotificationParseID(rs.Primary.ID) + bucket, notificationID, err := tpgstorage.ParseStorageNotificationID(rs.Primary.ID) + if err != nil { + return err + } found, err := config.NewStorageClient(config.UserAgent).Notifications.Get(bucket, notificationID).Do() if err != nil { diff --git a/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown b/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown index 2660aaf170d6..1424e2a1b351 100644 --- a/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/apigee_keystores_aliases_key_cert_file.html.markdown @@ -66,7 +66,7 @@ In addition to the arguments listed above, the following computed attributes are Optional.Type of Alias -The `certs_info` block contains: +The `certs_info` list contains: * `cert_info` - (Output) From 983fceee42bd88a21bb3fc9604431b622f25d789 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Mon, 25 Aug 2025 14:29:03 -0700 Subject: [PATCH 062/201] Fix 7.0.0 upgrade guide (#14959) --- .../guides/version_7_upgrade.html.markdown | 180 +++++++++++------- 1 file changed, 109 insertions(+), 71 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index fdd64c56c4ca..16c85e5b33da 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -88,22 +88,12 @@ terraform { ## Provider -### Provider-level change example header - -Description of the change and how users should adjust their configuration (if needed). - ### Resource import formats have improved validation Throughout the provider there were many resources which erroneously gave false positives to poorly formatted import input if a subset of the provided input was valid to their configured import formats. All GCP resource IDs supplied to "terraform import" must match the documentation specified import formats exactly. ## Datasources -## Datasource: `google_product_datasource` - -### Datasource-level change example header - -Description of the change and how users should adjust their configuration (if needed). - ## Datasource: `google_service_account_key` ### `project` is now removed @@ -120,10 +110,15 @@ The field `deletion_protection` has been added with a default value of `true`. T Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing clusters will have `deletion_protection` set to `true` during the next refresh unless otherwise set in configuration. -## Resource: `google_beyondcorp_application` is now removed +## Resource: `google_apigee_keystores_aliases_key_cert_file` -`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. -Use `google_beyondcorp_security_gateway_application` instead. +### `google_apigee_keystores_aliases_key_cert_file` Migrated to the Plugin Framework + +This resource has been migrated from SDKv2 to the more modern [plugin framework resource implementation](https://developer.hashicorp.com/terraform/plugin/framework). One potential breaking change is expected with this migration; please review the details below. + +### `certs_info` is now output-only + +Previously the `certis_info` field was set as an optional value, but the configured value was never used by the API. It is now correctly marked as output-only. If set in your configuration, simply remove it and the API value will continue to be used. ## Resource: `google_artifact_registry_repository` @@ -131,11 +126,16 @@ Use `google_beyondcorp_security_gateway_application` instead. `public_repository` fields have had their default values removed. If your state has been reliant on them, they will need to be manually included into your configuration now. +## Resource: `google_beyondcorp_application` is now removed + +`google_beyondcorp_application`, the associated IAM resources `google_beyondcorp_application_iam_binding`, `google_beyondcorp_application_iam_member`, and `google_beyondcorp_application_iam_policy`, and the `google_beyondcorp_application_iam_policy` datasource have been removed. +Use `google_beyondcorp_security_gateway_application` instead. + ## Resource: `google_bigquery_table` ### `view.use_legacy_sql` no longer has a default value of `True` -The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For a new view, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. +The `view.use_legacy_sql` field no longer has a default value. Configurations that relied on the old default will show no diff in the plan, and there will be no change to existing views. For newly created views, leaving this field unspecified in the configuration will result in the view being created with no `use_legacy_sql` value, which the API interprets as a `true` and assumes the legacy SQL dialect for its query. See the [API documentation](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#ViewDefinition) for more details. ## Resource: `google_bigtable_table_iam_binding` @@ -161,43 +161,57 @@ The `view.use_legacy_sql` field no longer has a default value. Configurations th `budget_filter.credit types` and `budget_filter.subaccounts` are no longer O+C. These fields already did not export any API-default values, so no change to your configuration should be necessary. -## Resource: `google_compute_packet_mirroring` +## Resource: `google_cloudfunctions2_function` -### `subnetworks` and `instances` fields have been converted to sets +### `event_trigger.event_type` is now required -`subnetworks` and `instances` fields have been converted to sets. If you need to access values in their nested objects, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. +The `event_type` field is now required when `event_trigger` is configured. -## Resource: `google_compute_subnetwork` +### `service_config.service` is now an output only field -### `enable_flow_logs`is now removed +Remove `service_config.service` from your configuration after upgrade. -`enable_flow_logs` has been removed in favor of `log_config`. +## Resource: `google_cloud_run_v2_worker_pool` + +### `template.containers.depends_on` is now removed + +Remove `template.containers.depends_on` from your configuration after upgrade. + +## Resource: `google_colab_runtime_template` + +### `post_startup_script_config` is now removed + +Remove `post_startup_script_config` from your configuration after upgrade. ## Resource: `google_compute_instance_template` -### The resource will no longer use hardcoded values +### `disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values -`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. +`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. See the [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates) for more details. -## Resource: `google_compute_region_instance_template` +## Resource: `google_compute_packet_mirroring` -### The resource will no longer use hardcoded values +### `subnetworks` and `instances` fields have been converted to sets -`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. This shouldn't have any effect on the functionality of the resource. +`subnetworks` and `instances` fields have been converted to sets. If you need to access values in their nested objects, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. -## Resource: `google_notebooks_location` is now removed +## Resource: `google_compute_region_instance_template` -This resource is not functional. +### `disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values -## Resource: `google_storage_bucket` +`disk.type`, `disk.mode` and `disk.interface` will no longer use provider configured default values and instead will be set by the API. See the [API documentation](https://cloud.google.com/compute/docs/reference/rest/v1/regionInstanceTemplates) for more details. -### `retention_period` changed to `string` data type +## Resource: `google_compute_router` -`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. +### `advertised_ip_ranges` fields have been converted to sets -Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. +`advertised_ip_ranges` fields have been converted to sets. If you need to access values `advertised_ip_ranges`'s' nested object, it will need to be accessed via `for_each` or locally converting the field to a list/array in your configuration. -To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. +## Resource: `google_compute_subnetwork` + +### `enable_flow_logs`is now removed + +`enable_flow_logs` has been removed in favor of `log_config`. ## Resource: `google_gke_hub_feature_membership` @@ -211,49 +225,37 @@ Remove `configmanagement.binauthz` from your configuration after upgrade. Remove `description` from your configuration after upgrade. -## Resource: `google_colab_runtime_template` - -### `post_startup_script_config` is now removed. +## Resource: `google_memorystore_instance` -Remove `post_startup_script_config` from your configuration after upgrade. + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. ## Resource: `google_monitoring_uptime_check_config` ### Exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` must be set -At least one must be set, and setting both would make it unclear which was being used. +Setting exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` is now enforced in order to avoid situations where it is unclear which was being used. ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required -`load_balancing_scheme` is now a required field. +`load_balancing_scheme` is now a required field. This field was already required for resource functionality so no change to your configuration should be necessary. -## Resource: `google_storage_transfer_job` - -### `transfer_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -### `transfer_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -### `replication_spec.gcs_data_source.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -### `replication_spec.gcs_data_sink.path` Implemented validation to prevent strings from starting with a '/' character, while still permitting empty strings." - -## Resource: `google_cloudfunctions2_function` +## Resource: `google_notebooks_location` is now removed -### `event_trigger.event_type` is now required +This resource is not functional and can safely be removed from your configuration. -The `event_type` field is now required when `event_trigger` is configured. +## Resource: `google_project_service` -### `service_config.service` is changed from `Argument` to `Attribute` +### `disable_on_destroy` now defaults to `false` -Remove `service_config.service` from your configuration after upgrade. +The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. -## Resource: `google_cloud_run_v2_worker_pool` +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. -### `template.containers.depends_on` is removed as it is not supported. +## Resource: `google_redis_cluster` -Remove `template.containers.depends_on` from your configuration after upgrade. + `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. ## Resource: `google_secret_manager_secret_version` @@ -263,34 +265,70 @@ This standardizes the behavior of write-only fields across the provider and make ## Resource: `google_sql_user` -### `password_wo_version` is now required when `password_wo` is set +### `password_wo` and `password_wo_version` must be set together This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. -## Resource: `google_vertex_ai_endpoint` +## Resource: `google_secure_source_manager_instance` -### `enable_secure_private_service_connect` is removed as it is not available in the GA version of the API, only in the beta version. +### `deletion_policy` has had its default value changed to `PREVENT` -## Resource: `google_vertex_ai_index` +`deletion_policy` has had its default value changed to `PREVENT`. This field prevents +Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing resources will have +`deletion_policy` set to `true` during the next refresh unless otherwise set in configuration. -### `metadata`, and `metadata.config` are now required. Resource creation would fail without these attributes already, so no change is necessary to existing configurations. +## Resource: `google_secure_source_manager_repository` -## Resource: `google_tpu_node` is now removed +### `deletion_policy` has had its default value changed to `PREVENT` -`google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. +`deletion_policy` has had its default value changed to `PREVENT`. This field prevents +Terraform from destroying or recreating the cluster during `terraform apply`. In 7.0.0, existing resources will have +`deletion_policy` set to `true` during the next refresh unless otherwise set in configuration. -## Resource: `google_project_service` +## Resource: `google_storage_transfer_job` -### `disable_on_destroy` now defaults to `false` +### Several `path` fields have improved validation -The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. +`transfer_spec.gcs_data_sink.path`, `transfer_spec.gcs_data_source.path`, `replication_spec.gcs_data_source.path`, and `replication_spec.gcs_data_sink.path` are now required to not start with a '/' character. -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. +## Resource: `google_storage_bucket` -## Resource: `google_memorystore_instance` +### `retention_period` changed to `string` data type - `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. +`retention_period` was changed to the [`string` data type](https://developer.hashicorp.com/terraform/language/expressions/types#string) to handle higher values for the bucket's retention period. -## Resource: `google_redis_cluster` +Terraform [Type Conversion](https://developer.hashicorp.com/terraform/language/expressions/types#type-conversion) will handle the change automatically for most configurations, and they will not need to be modified. + +To reflect the new type explicitly, surround the current integer value in quotes, i.e. `retention_period = 10` -> `retention_period = "10"`. + +## Resource: `google_storage_notification` + +### `google_storage_notification` Migrated to the Plugin Framework + +This resource has been migrated from SDKv2 to the more modern [plugin framework resource implementation](https://developer.hashicorp.com/terraform/plugin/framework). One associated breaking change is expected with this migration; please review the details below. + +### `topic` Field Format Change + +The `topic` field for `google_storage_notification` must now be provided in the format `projects/{{project}}/topics/{{topic}}`. + +The previous SDKv2 implementation accepted both `projects/{{project}}/topics/{{topic}}` and the fully qualified Google API format `//pubsub.googleapis.com/projects/{{project}}/topics/{{topic}}` in configuration. However, it consistently stored the latter (fully qualified) format in the Terraform state. + +With this migration, only the `projects/{{project}}/topics/{{topic}}` format is allowed in configuration, aligning with the `id` format of the `google_pubsub_topic` resource. + +A state upgrader will automatically migrate the `topic` field's format in your Terraform state when you upgrade to this provider version. However, you **must ensure your Terraform configuration files are updated** to use the `projects/{{project}}/topics/{{topic}}` format to avoid validation errors. + +## Resource: `google_tpu_node` is now removed + +`google_tpu_node` is removed in favor of `google_tpu_v2_vm`. For moving from TPU Node to TPU VM architecture, see https://cloud.google.com/tpu/docs/system-architecture-tpu-vm#from-tpu-node-to-tpu-vm. + +## Resource: `google_vertex_ai_endpoint` + +### `enable_secure_private_service_connect` is now removed from the GA provider + +`enable_secure_private_service_connect` has been removed from the GA provider it is not available in the GA version of the API. The field is still available when using the beta provider. + +## Resource: `google_vertex_ai_index` + +### `metadata`, and `metadata.config` are now required. - `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. \ No newline at end of file +`metadata`, and `metadata.config` are now required. These fields were already required for resource functionality, so no change is necessary to existing configurations. From cc93af2d367b850f177f8842b00747ba8941a9f8 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 26 Aug 2025 23:53:25 +0530 Subject: [PATCH 063/201] note: removed deprecated status for field detect_md5hash (#14641) --- .../services/storage/resource_storage_bucket_object.go | 3 +-- .../website/docs/r/storage_bucket_object.html.markdown | 2 ++ 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go index e65c8fc9b2bd..015fba07671a 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_object.go @@ -151,8 +151,7 @@ func ResourceStorageBucketObject() *schema.Resource { // Detect changes to local file or changes made outside of Terraform to the file stored on the server. "detect_md5hash": { - Type: schema.TypeString, - Deprecated: "`detect_md5hash` is deprecated and will be removed in future release. Start using `source_md5hash` instead", + Type: schema.TypeString, // This field is not Computed because it needs to trigger a diff. Optional: true, // Makes the diff message nicer: diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown index 86e7b39b008f..961983611298 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_object.html.markdown @@ -81,6 +81,8 @@ One of the following is required: * `detect_md5hash` - (Optional) Detect changes to local file or changes made outside of Terraform to the file stored on the server. MD5 hash of the data, encoded using [base64](https://datatracker.ietf.org/doc/html/rfc4648#section-4). This field is not present for [composite objects](https://cloud.google.com/storage/docs/composite-objects). For more information about using the MD5 hash, see [Hashes and ETags: Best Practices](https://cloud.google.com/storage/docs/hashes-etags#json-api). + ~> **Warning:** For dynamically populated files or objects, `detect_md5hash` cannot track or detect changes and will not trigger updates to the objects in the bucket. Please use `source_md5hash` instead. + * `storage_class` - (Optional) The [StorageClass](https://cloud.google.com/storage/docs/storage-classes) of the new bucket object. Supported values include: `MULTI_REGIONAL`, `REGIONAL`, `NEARLINE`, `COLDLINE`, `ARCHIVE`. If not provided, this defaults to the bucket's default storage class or to a [standard](https://cloud.google.com/storage/docs/storage-classes#standard) class. From d102272bd219d5235c5b48526ecd0e3484dd4eb6 Mon Sep 17 00:00:00 2001 From: chenir0219 Date: Tue, 26 Aug 2025 18:30:28 +0000 Subject: [PATCH 064/201] Add provisioned_throughput to the public documentation for instance template (#14981) --- .../website/docs/r/compute_instance_template.html.markdown | 7 +++---- .../docs/r/compute_region_instance_template.html.markdown | 7 +++---- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index f8862f1c3379..f72b252840f8 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -444,10 +444,9 @@ The following arguments are supported: * `disk_name` - (Optional) Name of the disk. When not provided, this defaults to the name of the instance. -* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This - sets the number of I/O operations per second that the disk can handle. - Values must be between 10,000 and 120,000. For more details, see the - [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). +* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. For more details, see the [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk) or the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks) depending on the selected disk_type. + +* `provisioned_throughput` - (Optional) Indicates how much throughput to provision for the disk, in MB/s. This sets the amount of data that can be read or written from the disk per second. Values must greater than or equal to 1. For more details, see the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks). * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index 1e94cd62e829..afd836162e1a 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -409,10 +409,9 @@ The following arguments are supported: * `disk_name` - (Optional) Name of the disk. When not provided, this defaults to the name of the instance. -* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This - sets the number of I/O operations per second that the disk can handle. - Values must be between 10,000 and 120,000. For more details, see the - [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk). +* `provisioned_iops` - (Optional) Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. For more details, see the [Extreme persistent disk documentation](https://cloud.google.com/compute/docs/disks/extreme-persistent-disk) or the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks) depending on the selected disk_type. + +* `provisioned_throughput` - (Optional) Indicates how much throughput to provision for the disk, in MB/s. This sets the amount of data that can be read or written from the disk per second. Values must greater than or equal to 1. For more details, see the [Hyperdisk documentation](https://cloud.google.com/compute/docs/disks/hyperdisks). * `resource_manager_tags` - (Optional) A set of key/value resource manager tag pairs to bind to this disk. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. From 409394acf7d79588043d38a7096e4918cd979778 Mon Sep 17 00:00:00 2001 From: Rafael Tello-Cabrales <4848374+rafaeltello@users.noreply.github.com> Date: Tue, 26 Aug 2025 11:41:16 -0700 Subject: [PATCH 065/201] FEAT: Add support for Multi-Region Services in cloudrunv2 (#14592) --- mmv1/products/cloudrunv2/Service.yaml | 16 +++ ...resource_cloud_run_v2_service_test.go.tmpl | 97 +++++++++++++++++++ 2 files changed, 113 insertions(+) diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 1806b23b986b..9482b22054af 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -1221,6 +1221,22 @@ properties: type: String description: |- Service account to be used for building the container. The format of this field is `projects/{projectId}/serviceAccounts/{serviceAccountEmail}`. + - name: 'multiRegionSettings' + type: NestedObject + description: |- + Settings for creating a Multi-Region Service. Make sure to use region = 'global' when using them. For more information, visit https://cloud.google.com/run/docs/multiple-regions#deploy + properties: + - name: 'regions' + type: Array + item_type: + type: String + description: |- + The list of regions to deploy the multi-region Service. + - name: 'multiRegionId' + type: String + description: |- + System-generated unique id for the multi-region Service. + output: true - name: 'reconciling' type: Boolean description: | diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index 1336a242d070..fd20578a8d20 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -1345,6 +1345,103 @@ resource "google_cloud_run_v2_service" "default" { `, context) } +func TestAccCloudRunV2Service_cloudrunv2MultiRegionService(t *testing.T) { + t.Parallel() + context := map[string]interface{} { + "random_suffix" : acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase { + PreCheck: func() { acctest.AccTestPreCheck(t)}, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2ServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegion(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegionUpdate(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage", "deletion_protection"}, + }, + }, + }) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegion(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-service%{random_suffix}" + description = "Multi-Region Service" + location = "global" + deletion_protection = false + launch_stage = "GA" + annotations = { + generated-by = "magic-modules" + } + multi_region_settings { + regions = [ + "us-central1", + "us-east1", + "us-west1", + ] + } + ingress = "INGRESS_TRAFFIC_ALL" + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} +`, context) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceWithMultiRegionUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-service%{random_suffix}" + description = "Multi-Region Service" + location = "global" + deletion_protection = false + launch_stage = "GA" + annotations = { + generated-by = "magic-modules" + } + multi_region_settings { + regions = [ + "us-central1", + "us-east1", + ] + } + ingress = "INGRESS_TRAFFIC_ALL" + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } +} +`, context) +} + func testAccCloudRunV2Service_cloudrunv2ServiceWithGpu(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_cloud_run_v2_service" "default" { From b63fca51feb0d1108a234a7cafce46753f622a5b Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Tue, 26 Aug 2025 13:45:41 -0500 Subject: [PATCH 066/201] Remove 7.0.0 TC testing (#14991) --- .../FEATURE-BRANCH-major-release-7.0.0.kt | 105 ------------------ .../projects/google_beta_subproject.kt | 2 +- .../projects/google_ga_subproject.kt | 2 +- .../components/projects/root_project.kt | 4 - 4 files changed, 2 insertions(+), 111 deletions(-) delete mode 100644 mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt deleted file mode 100644 index 5c463e28febc..000000000000 --- a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-major-release-7.0.0.kt +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright (c) HashiCorp, Inc. - * SPDX-License-Identifier: MPL-2.0 - */ - -// This file is controlled by MMv1, any changes made here will be overwritten - -package projects.feature_branches - -import ProviderNameBeta -import ProviderNameGa -import builds.* -import jetbrains.buildServer.configs.kotlin.Project -import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot -import projects.reused.nightlyTests -import replaceCharsId - -const val branchName = "FEATURE-BRANCH-major-release-7.0.0" - -// VCS Roots specifically for pulling code from the feature branches in the downstream repos - -object HashicorpVCSRootGa_featureBranchMajorRelease700: GitVcsRoot({ - name = "VCS root for the hashicorp/terraform-provider-${ProviderNameGa} repo @ refs/heads/${branchName}" - url = "https://github.com/hashicorp/terraform-provider-${ProviderNameGa}" - branch = "refs/heads/${branchName}" - branchSpec = """ - +:(refs/heads/*) - -:refs/pulls/* - """.trimIndent() -}) - -object HashicorpVCSRootBeta_featureBranchMajorRelease700: GitVcsRoot({ - name = "VCS root for the hashicorp/terraform-provider-${ProviderNameBeta} repo @ refs/heads/${branchName}" - url = "https://github.com/hashicorp/terraform-provider-${ProviderNameBeta}" - branch = "refs/heads/${branchName}" - branchSpec = """ - +:(refs/heads/*) - -:refs/pulls/* - """.trimIndent() -}) - -fun featureBranchMajorRelease700_Project(allConfig: AllContextParameters): Project { - - val projectId = replaceCharsId(branchName) - val gaProjectId = replaceCharsId(projectId + "_GA") - val betaProjectId= replaceCharsId(projectId + "_BETA") - - // Get config for using the GA and Beta identities - val gaConfig = getGaAcceptanceTestConfig(allConfig) - val betaConfig = getBetaAcceptanceTestConfig(allConfig) - - return Project{ - id(projectId) - name = "7.0.0 Major Release Testing" - description = "Subproject for testing feature branch $branchName" - - // Register feature branch-specific VCS roots in the project - vcsRoot(HashicorpVCSRootGa_featureBranchMajorRelease700) - vcsRoot(HashicorpVCSRootBeta_featureBranchMajorRelease700) - - // Nested Nightly Test project that uses hashicorp/terraform-provider-google - subProject( - Project{ - id(gaProjectId) - name = "Google" - subProject( - nightlyTests( - gaProjectId, - ProviderNameGa, - HashicorpVCSRootGa_featureBranchMajorRelease700, - gaConfig, - NightlyTriggerConfiguration( - branch = "refs/heads/${branchName}", // Make triggered builds use the feature branch - daysOfWeek = "4" // Wednesday for GA, TeamCity numbers days Sun=1...Sat=7 - ), - ) - ) - } - ) - - // Nested Nightly Test project that uses hashicorp/terraform-provider-google-beta - subProject( - Project { - id(betaProjectId) - name = "Google Beta" - subProject( - nightlyTests( - betaProjectId, - ProviderNameBeta, - HashicorpVCSRootBeta_featureBranchMajorRelease700, - betaConfig, - NightlyTriggerConfiguration( - branch = "refs/heads/${branchName}", // Make triggered builds use the feature branch - daysOfWeek="4" // Wednesday for Beta, TeamCity numbers days Sun=1...Sat=7 - ), - ) - ) - } - ) - - params { - readOnlySettings() - } - } -} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index f3a04d9df13f..75eabec8a70c 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -33,7 +33,7 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { description = "Subproject containing builds for testing the Beta version of the Google provider" // Nightly Test project that uses hashicorp/terraform-provider-google-beta - subProject(nightlyTests(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek="1-3,5-7"))) // All nights except Wednesday (4) for Beta; feature branch testing happens on Wednesdays and TeamCity numbers days Sun=1...Sat=7 + subProject(nightlyTests(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration())) // MM Upstream project that uses modular-magician/terraform-provider-google-beta subProject(mmUpstream(betaId, ProviderNameBeta, ModularMagicianVCSRootBeta, HashiCorpVCSRootBeta, vcrConfig, NightlyTriggerConfiguration())) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt index fbf3685fbd94..cd45d7d754c8 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_ga_subproject.kt @@ -31,7 +31,7 @@ fun googleSubProjectGa(allConfig: AllContextParameters): Project { description = "Subproject containing builds for testing the GA version of the Google provider" // Nightly Test project that uses hashicorp/terraform-provider-google - subProject(nightlyTests(gaId, ProviderNameGa, HashiCorpVCSRootGa, gaConfig, NightlyTriggerConfiguration(daysOfWeek="1-3,5-7"))) // All nights except Wednesday (4) for GA; feature branch testing happens on Wednesday and TeamCity numbers days Sun=1...Sat=7 + subProject(nightlyTests(gaId, ProviderNameGa, HashiCorpVCSRootGa, gaConfig, NightlyTriggerConfiguration())) // MM Upstream project that uses modular-magician/terraform-provider-google subProject(mmUpstream(gaId, ProviderNameGa, ModularMagicianVCSRootGa, HashiCorpVCSRootGa, vcrConfig, NightlyTriggerConfiguration())) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt index c810b73b9605..3c96dea4f099 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt @@ -19,7 +19,6 @@ import generated.ServicesListGa import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.sharedResource import projects.feature_branches.featureBranchResourceIdentitySubProject -import projects.feature_branches.featureBranchMajorRelease700_Project // googleCloudRootProject returns a root project that contains a subprojects for the GA and Beta version of the // Google provider. There are also resources to help manage the test projects used for acceptance tests. @@ -67,9 +66,6 @@ fun googleCloudRootProject(allConfig: AllContextParameters): Project { // Feature branch-testing projects - these will be added and removed as needed - // Feature branch testing - subProject(featureBranchMajorRelease700_Project(allConfig)) // FEATURE-BRANCH-major-release-7.0.0 - params { readOnlySettings() } From c6747a9949ebb8eadcd5837833230ffe7447fd62 Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Tue, 26 Aug 2025 15:49:55 -0300 Subject: [PATCH 067/201] SWP: Add support for value "EXPLICIT_ROUTING_MODE" on field "routing_mode" on resource "google_network_services_gateway" (#14767) --- mmv1/products/networkservices/Gateway.yaml | 2 ++ .../resource_network_services_gateway_test.go | 1 + .../terraform/tpgresource/common_diff_suppress.go | 13 +++++++++++++ 3 files changed, 16 insertions(+) diff --git a/mmv1/products/networkservices/Gateway.yaml b/mmv1/products/networkservices/Gateway.yaml index 39d4048e3c73..6404cdf9f2b4 100644 --- a/mmv1/products/networkservices/Gateway.yaml +++ b/mmv1/products/networkservices/Gateway.yaml @@ -227,7 +227,9 @@ properties: - 'DEBUG_HEADERS' - name: 'routingMode' type: Enum + diff_suppress_func: 'tpgresource.SuppressRoutingModeDefault' description: | The routing mode of the Gateway. This field is configurable only for gateways of type SECURE_WEB_GATEWAY. This field is required for gateways of type SECURE_WEB_GATEWAY. enum_values: - 'NEXT_HOP_ROUTING_MODE' + - 'EXPLICIT_ROUTING_MODE' diff --git a/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go b/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go index c2e15bffad71..08275ca4125e 100644 --- a/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go +++ b/mmv1/third_party/terraform/services/networkservices/resource_network_services_gateway_test.go @@ -943,6 +943,7 @@ resource "google_network_services_gateway" "foobar" { location = "us-central1" addresses = ["10.128.0.99"] type = "SECURE_WEB_GATEWAY" + routing_mode = "EXPLICIT_ROUTING_MODE" ports = [443] description = "my description" gateway_security_policy = google_network_security_gateway_security_policy.default.id diff --git a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go index cbc7e442a25e..a0ba73cd1a4e 100644 --- a/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go +++ b/mmv1/third_party/terraform/tpgresource/common_diff_suppress.go @@ -101,6 +101,19 @@ func ProjectNumberDiffSuppress(_, old, new string, _ *schema.ResourceData) bool return a2 == b2 } +// Suppresses diffs where `routing_mode` is unset (empty string) vs. explicitly set +// to "EXPLICIT_ROUTING_MODE". Since null/empty is treated as the default +// EXPLICIT_ROUTING_MODE, both values collapse into the same state. This ensures +// Terraform does not show unnecessary differences unless the value is explicitly +// changed to "NEXT_HOP_ROUTING_MODE". +func SuppressRoutingModeDefault(_, old, new string, _ *schema.ResourceData) bool { + if old == new { + return true + } + return (old == "" && new == "EXPLICIT_ROUTING_MODE") || + (old == "EXPLICIT_ROUTING_MODE" && new == "") +} + // Suppress diffs when the value read from api // has the project ID instead of the project number func ProjectIDDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { From 0011d3718a0cb857602b04279729c136e4b0e193 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Wed, 27 Aug 2025 00:22:18 +0530 Subject: [PATCH 068/201] feat: (storage) added field credentials_secret in storage transfer job (#14819) --- .../resource_storage_transfer_job.go | 20 +++++++++++++++---- .../resource_storage_transfer_job_meta.yaml | 1 + .../docs/r/storage_transfer_job.html.markdown | 2 ++ 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 38fbf29048af..2eff1acfb8d6 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -99,6 +99,7 @@ var ( awsS3AuthKeys = []string{ "transfer_spec.0.aws_s3_data_source.0.aws_access_key", "transfer_spec.0.aws_s3_data_source.0.role_arn", + "transfer_spec.0.aws_s3_data_source.0.credentials_secret", } azureOptionCredentials = []string{ "transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials", @@ -766,6 +767,12 @@ func awsS3DataSchema() *schema.Resource { Optional: true, Description: `The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: https://{id}.cloudfront.net or any valid custom domain. Must begin with https://.`, }, + "credentials_secret": { + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: awsS3AuthKeys, + Description: `The Resource name of a secret in Secret Manager. AWS credentials must be stored in Secret Manager in JSON format. If credentials_secret is specified, do not specify role_arn or aws_access_key. Format: projects/{projectNumber}/secrets/{secret_name}.`, + }, }, } } @@ -1378,10 +1385,11 @@ func expandAwsS3Data(awsS3Datas []interface{}) *storagetransfer.AwsS3Data { awsS3Data := awsS3Datas[0].(map[string]interface{}) result := &storagetransfer.AwsS3Data{ - BucketName: awsS3Data["bucket_name"].(string), - AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), - RoleArn: awsS3Data["role_arn"].(string), - Path: awsS3Data["path"].(string), + BucketName: awsS3Data["bucket_name"].(string), + AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), + RoleArn: awsS3Data["role_arn"].(string), + CredentialsSecret: awsS3Data["credentials_secret"].(string), + Path: awsS3Data["path"].(string), } if v, ok := awsS3Data["managed_private_network"]; ok { @@ -1413,6 +1421,10 @@ func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceDa data["cloudfront_domain"] = awsS3Data.CloudfrontDomain } + if awsS3Data.CredentialsSecret != "" { + data["credentials_secret"] = awsS3Data.CredentialsSecret + } + return []map[string]interface{}{data} } diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml index 1992d537368f..8d14a6504710 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml @@ -49,6 +49,7 @@ fields: - field: 'transfer_spec.aws_s3_data_source.managed_private_network' - field: 'transfer_spec.aws_s3_data_source.path' - field: 'transfer_spec.aws_s3_data_source.role_arn' + - field: 'transfer_spec.aws_s3_data_source.credentials_secret' - field: 'transfer_spec.azure_blob_storage_data_source.azure_credentials.sas_token' - field: 'transfer_spec.azure_blob_storage_data_source.container' - field: 'transfer_spec.azure_blob_storage_data_source.credentials_secret' diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index b83ea6ebe37a..2c27448bd9c7 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -264,6 +264,8 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam * `cloudfront_domain` - (Optional) The CloudFront distribution domain name pointing to this bucket, to use when fetching. See [Transfer from S3 via CloudFront](https://cloud.google.com/storage-transfer/docs/s3-cloudfront) for more information. Format: `https://{id}.cloudfront.net` or any valid custom domain. Must begin with `https://`. +* `credentials_secret` - (Optional) The Resource name of a secret in Secret Manager. AWS credentials must be stored in Secret Manager in JSON format. If credentials_secret is specified, do not specify role_arn or aws_access_key. Format: `projects/{projectNumber}/secrets/{secret_name}`. + The `aws_access_key` block supports: * `access_key_id` - (Required) AWS Key ID. From 86efccaaf7cbdd5ccea62541685e0c7d71c8b83a Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Tue, 26 Aug 2025 12:23:00 -0700 Subject: [PATCH 069/201] `teamcity`: use ModularMagician repo for testing purposes in `weeklyDiffTest` project (#14989) --- .../.teamcity/components/projects/google_beta_subproject.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt index 75eabec8a70c..c67a50303ee5 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/google_beta_subproject.kt @@ -43,7 +43,7 @@ fun googleSubProjectBeta(allConfig: AllContextParameters): Project { subProject(vcrRecording(betaId, ProviderNameBeta, HashiCorpVCSRootBeta, ModularMagicianVCSRootBeta, vcrConfig)) // Beta Diff Test project that uses hashicorp/terraform-provider-google-beta-diff-test - subProject(weeklyDiffTests(betaId + "_DIFF_TEST", ProviderNameBeta, HashiCorpVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek = "SAT", nightlyTestsEnabled = false))) + subProject(weeklyDiffTests(betaId + "_DIFF_TEST", ProviderNameBeta, ModularMagicianVCSRootBeta, betaConfig, NightlyTriggerConfiguration(daysOfWeek = "SAT", nightlyTestsEnabled = false))) params { readOnlySettings() From 34e26e73069a2282f7cb4196241d81bfed4f9a96 Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Tue, 26 Aug 2025 12:23:35 -0700 Subject: [PATCH 070/201] altered datasource url usage (#14880) --- mmv1/api/resource.go | 4 +- mmv1/products/cloudrun/Service.yaml | 2 + mmv1/templates/terraform/datasource.go.tmpl | 7 ++-- .../provider/provider_mmv1_resources.go.tmpl | 2 +- .../cloudrun/data_source_cloud_run_service.go | 41 ------------------- 5 files changed, 9 insertions(+), 47 deletions(-) delete mode 100644 mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 386487ac696f..a8b03959f0c9 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -2033,7 +2033,7 @@ func (r Resource) ShouldDatasourceSetAnnotations() bool { // that should be marked as "Required". func (r Resource) DatasourceRequiredFields() []string { requiredFields := []string{} - uriParts := strings.Split(r.SelfLink, "/") + uriParts := strings.Split(r.IdFormat, "/") for _, part := range uriParts { if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { @@ -2050,7 +2050,7 @@ func (r Resource) DatasourceRequiredFields() []string { // that should be marked as "Optional". func (r Resource) DatasourceOptionalFields() []string { optionalFields := []string{} - uriParts := strings.Split(r.SelfLink, "/") + uriParts := strings.Split(r.IdFormat, "/") for _, part := range uriParts { if strings.HasPrefix(part, "{{") && strings.HasSuffix(part, "}}") { diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index 6f2ef4e9f2b3..294276192445 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -29,6 +29,8 @@ base_url: 'apis/serving.knative.dev/v1/namespaces/{{project}}/services' cai_base_url: 'projects/{{project}}/locations/{{location}}/services' import_format: - 'locations/{{location}}/namespaces/{{project}}/services/{{name}}' +datasource: + generate: true timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/templates/terraform/datasource.go.tmpl b/mmv1/templates/terraform/datasource.go.tmpl index f7bc979c589c..edd29d3c1dd5 100644 --- a/mmv1/templates/terraform/datasource.go.tmpl +++ b/mmv1/templates/terraform/datasource.go.tmpl @@ -75,10 +75,11 @@ func DataSource{{ .ResourceName -}}() *schema.Resource { func dataSource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - id, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{$.SelfLinkUri}}{{$.ReadQueryParams}}") - if err != nil { + id, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{ $.IdFormat -}}") + if err != nil { return err - } + } + d.SetId(id) err = resource{{ $.ResourceName -}}Read(d, meta) diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index b0e968f31749..06f8767048a9 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -73,7 +73,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_cloud_quotas_quota_info": cloudquotas.DataSourceGoogleCloudQuotasQuotaInfo(), "google_cloud_quotas_quota_infos": cloudquotas.DataSourceGoogleCloudQuotasQuotaInfos(), "google_cloud_run_locations": cloudrun.DataSourceGoogleCloudRunLocations(), - "google_cloud_run_service": cloudrun.DataSourceGoogleCloudRunService(), + "google_cloud_run_service": cloudrun.DataSourceCloudRunService(), "google_cloud_run_v2_job": cloudrunv2.DataSourceGoogleCloudRunV2Job(), "google_cloud_run_v2_service": cloudrunv2.DataSourceGoogleCloudRunV2Service(), "google_cloud_run_v2_worker_pool": cloudrunv2.DataSourceGoogleCloudRunV2WorkerPool(), diff --git a/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go b/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go deleted file mode 100644 index c674b097723d..000000000000 --- a/mmv1/third_party/terraform/services/cloudrun/data_source_cloud_run_service.go +++ /dev/null @@ -1,41 +0,0 @@ -package cloudrun - -import ( - "fmt" - - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-provider-google/google/tpgresource" - transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" -) - -func DataSourceGoogleCloudRunService() *schema.Resource { - - dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceCloudRunService().Schema) - tpgresource.AddRequiredFieldsToSchema(dsSchema, "name", "location") - tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") - - return &schema.Resource{ - Read: dataSourceGoogleCloudRunServiceRead, - Schema: dsSchema, - } -} - -func dataSourceGoogleCloudRunServiceRead(d *schema.ResourceData, meta interface{}) error { - config := meta.(*transport_tpg.Config) - - id, err := tpgresource.ReplaceVars(d, config, "locations/{{location}}/namespaces/{{project}}/services/{{name}}") - if err != nil { - return fmt.Errorf("Error constructing id: %s", err) - } - d.SetId(id) - err = resourceCloudRunServiceRead(d, meta) - if err != nil { - return err - } - - if d.Id() == "" { - return fmt.Errorf("%s not found", id) - } - - return nil -} From a161067e3e5c9ee9fc0e4ed566b14890fad8caba Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Tue, 26 Aug 2025 16:43:47 -0300 Subject: [PATCH 071/201] compute: canonicalize backend.group self-links to avoid spurious diffs between v1/beta and variants (#14939) --- .../compute/RegionBackendService.yaml | 2 +- ...ompute_region_backend_service_test.go.tmpl | 280 ++++++++++++++++++ .../tpgresource/self_link_helpers.go | 34 +++ 3 files changed, 315 insertions(+), 1 deletion(-) diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index ed56d1af4162..312a970d5c9d 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -251,7 +251,7 @@ properties: Group resource using the fully-qualified URL, rather than a partial URL. required: true - diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' + diff_suppress_func: 'tpgresource.CompareSelfLinkCanonicalPaths' custom_flatten: 'templates/terraform/custom_flatten/guard_self_link.go.tmpl' - name: 'maxConnections' type: Integer diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index a8a46c54f711..c4e16475dc75 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -433,6 +433,286 @@ func TestAccComputeRegionBackendService_withLogConfig(t *testing.T) { }) } +func TestAccComputeRegionBackendService_zonalILB(t *testing.T) { + t.Parallel() + + serviceName := fmt.Sprintf("tf-test-ilb-bs-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-ilb-hc-%s", acctest.RandString(t, 10)) + checkName2 := fmt.Sprintf("tf-test-ilb-hc2-%s", acctest.RandString(t, 10)) + negName := fmt.Sprintf("tf-test-ilb-neg-%s", acctest.RandString(t, 10)) + negName2 := fmt.Sprintf("tf-test-ilb-neg2-%s", acctest.RandString(t, 10)) + instanceName := fmt.Sprintf("tf-test-ilb-vm-%s", acctest.RandString(t, 10)) + instanceName2 := fmt.Sprintf("tf-test-ilb-vm2-%s", acctest.RandString(t, 10)) + + // subnetwork with random suffix + subnetName := fmt.Sprintf("tf-test-subnet-%s", acctest.RandString(t, 8)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + // STEP 1: base (self-link v1) + { + Config: testAccComputeRegionBackendService_zonalILB_withGroup( + testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), + serviceName, + "google_compute_network_endpoint_group.neg.id", + ), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + }, + + // STEP 2: same NEG with /compute/beta/ (apply OK) + { + Config: fmt.Sprintf(` +%s + +locals { + neg_beta = replace(google_compute_network_endpoint_group.neg.id, "/compute/v1/", "/compute/beta/") +} + +%s +`, testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), + testAccComputeRegionBackendService_zonalILB_withGroup("", serviceName, "local.neg_beta"), + ), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + }, + + // STEP 3: Invalid variation for API (UPPERCASE + "/") — tested only in PLAN + { + PlanOnly: true, // does not call the API; only exercises diff/canonicalization + Config: fmt.Sprintf(` +%s + +locals { + neg_slash_upper = "${google_compute_network_endpoint_group.neg.id}" +} + +%s +`, testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetName), + testAccComputeRegionBackendService_zonalILB_withGroup("", serviceName, "local.neg_slash_upper"), + ), + }, + + // STEP 4: Modified scenario (changes NEG/HC/VM) — continues validating real updates + { + Config: testAccComputeRegionBackendService_zonalILBModified(serviceName, checkName, negName, instanceName, checkName2, negName2, instanceName2, subnetName), + }, + { + ResourceName: "google_compute_region_backend_service.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeRegionBackendService_common(checkName, negName, instanceName, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "tf-test-net" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s" + ip_cidr_range = "10.10.0.0/16" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_region_health_check" "hc1" { + name = "%s" + region = "us-central1" + http_health_check { + port = 8080 + request_path = "/status" + } +} + +resource "google_compute_instance" "default" { + name = "%s" + zone = "us-central1-a" + machine_type = "e2-micro" + + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + + network_interface { + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + access_config {} + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "%s" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" + network_endpoint_type = "GCE_VM_IP_PORT" +} + +resource "google_compute_network_endpoint" "endpoint" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + zone = "us-central1-a" + instance = google_compute_instance.default.name + ip_address = google_compute_instance.default.network_interface[0].network_ip + port = 8080 +} +`, subnetworkName, checkName, instanceName, negName) +} + +func testAccComputeRegionBackendService_zonalILB_withGroup(commonHCL string, serviceName string, groupExpr string) string { + header := commonHCL + return fmt.Sprintf(` +%s +resource "google_compute_region_backend_service" "default" { + name = "%s" + region = "us-central1" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_region_health_check.hc1.id] + + backend { + group = %s + balancing_mode = "RATE" + max_rate_per_endpoint = 100 + capacity_scaler = 1.0 + } + + session_affinity = "CLIENT_IP" + locality_lb_policy = "ROUND_ROBIN" +} +`, header, serviceName, groupExpr) +} + +func testAccComputeRegionBackendService_zonalILBModified(serviceName, checkName, negName, instanceName, checkName2, negName2, instanceName2, subnetworkName string) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "tf-test-net" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s" + ip_cidr_range = "10.10.0.0/16" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_region_health_check" "hc1" { + name = "%s" + region = "us-central1" + http_health_check { + port = 8080 + request_path = "/status" + } +} + +resource "google_compute_instance" "default" { + name = "%s" + zone = "us-central1-a" + machine_type = "e2-micro" + + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + + network_interface { + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + access_config {} + } +} + +resource "google_compute_network_endpoint_group" "neg" { + name = "%s" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" + network_endpoint_type = "GCE_VM_IP_PORT" +} + +resource "google_compute_network_endpoint" "endpoint" { + network_endpoint_group = google_compute_network_endpoint_group.neg.name + zone = "us-central1-a" + instance = google_compute_instance.default.name + ip_address = google_compute_instance.default.network_interface[0].network_ip + port = 8080 +} + +resource "google_compute_instance" "instance2" { + name = "%s" + zone = "us-central1-a" + machine_type = "e2-micro" + + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + + network_interface { + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + access_config {} + } +} + +resource "google_compute_region_health_check" "hc2" { + name = "%s" + region = "us-central1" + http_health_check { + port = 80 + } +} + +resource "google_compute_network_endpoint_group" "neg2" { + name = "%s" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + zone = "us-central1-a" + network_endpoint_type = "GCE_VM_IP_PORT" +} + +resource "google_compute_network_endpoint" "endpoint2" { + network_endpoint_group = google_compute_network_endpoint_group.neg2.name + zone = "us-central1-a" + instance = google_compute_instance.instance2.name + ip_address = google_compute_instance.instance2.network_interface[0].network_ip + port = 8080 +} + +resource "google_compute_region_backend_service" "default" { + name = "%s" + region = "us-central1" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_region_health_check.hc2.id] + + backend { + group = google_compute_network_endpoint_group.neg2.id + balancing_mode = "RATE" + max_rate_per_endpoint = 200 + capacity_scaler = 0.5 + } +} +`, subnetworkName, checkName, instanceName, negName, instanceName2, checkName2, negName2, serviceName) +} + func TestAccComputeRegionBackendService_withDynamicBackendCount(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go index 93982ff5c0d1..61a8243cba8d 100644 --- a/mmv1/third_party/terraform/tpgresource/self_link_helpers.go +++ b/mmv1/third_party/terraform/tpgresource/self_link_helpers.go @@ -73,6 +73,40 @@ func CompareSelfLinkOrResourceName(_, old, new string, _ *schema.ResourceData) b return CompareSelfLinkRelativePaths("", old, new, nil) } +// canonicalizeSelfLink normalizes Compute API self-links by removing the version prefix (v1/beta), +// ensuring a leading "/", collapsing duplicate slashes, trimming any trailing "/", +// and lowercasing the result so logically identical links compare equal. +func CompareSelfLinkCanonicalPaths(_, old, new string, _ *schema.ResourceData) bool { + return canonicalizeSelfLink(old) == canonicalizeSelfLink(new) +} + +var ( + rePrefix = regexp.MustCompile(`(?i)^https?://[a-z0-9.-]*/compute/(v1|beta)/`) + reDuplicateSlashes = regexp.MustCompile(`/+`) +) + +func canonicalizeSelfLink(link string) string { + if link == "" { + return "" + } + + // Remove "https://…/compute/v1/" or "https://…/compute/beta/" + path := rePrefix.ReplaceAllString(link, "/") + + // Ensure leading "/" + if !strings.HasPrefix(path, "/") { + path = "/" + path + } + + // Collapse "//" + path = reDuplicateSlashes.ReplaceAllString(path, "/") + + // Remove trailing "/" + path = strings.TrimSuffix(path, "/") + + return strings.ToLower(path) +} + // Hash the relative path of a self link. func SelfLinkRelativePathHash(selfLink interface{}) int { path, _ := GetRelativePath(selfLink.(string)) From b363509c8de779cc510956819ff5c3f88c396414 Mon Sep 17 00:00:00 2001 From: cardinalli_andre_cint Date: Tue, 26 Aug 2025 17:09:53 -0300 Subject: [PATCH 072/201] Add Terraform support for GCS in Global Internal Load Balancers (#14679) Co-authored-by: samir-cit Co-authored-by: Nick Elliot --- mmv1/products/compute/BackendBucket.yaml | 6 +++++- .../terraform/examples/backend_bucket_global_ilb.tf.tmpl | 6 +++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mmv1/products/compute/BackendBucket.yaml b/mmv1/products/compute/BackendBucket.yaml index 21ad6c671212..b8c3b6e7aeb8 100644 --- a/mmv1/products/compute/BackendBucket.yaml +++ b/mmv1/products/compute/BackendBucket.yaml @@ -115,6 +115,7 @@ examples: backend_bucket_name: 'global-ilb-backend-bucket' bucket_name: 'global-ilb-bucket' exclude_docs: true + skip_vcr: true parameters: properties: - name: 'bucketName' @@ -273,7 +274,9 @@ properties: client when the resource is created. - name: 'enableCdn' type: Boolean - description: 'If true, enable Cloud CDN for this BackendBucket.' + description: | + If true, enable Cloud CDN for this BackendBucket. + Note: This cannot be set to true when loadBalancingScheme is set to INTERNAL_MANAGED. include_empty_value_in_cai: true - name: 'name' type: String @@ -295,6 +298,7 @@ properties: description: | The value can only be INTERNAL_MANAGED for cross-region internal layer 7 load balancer. If loadBalancingScheme is not specified, the backend bucket can be used by classic global external load balancers, or global application external load balancers, or both. + Important: CDN cannot be enabled (enableCdn cannot be set to true) when loadBalancingScheme is set to INTERNAL_MANAGED. enum_values: - 'INTERNAL_MANAGED' send_empty_value: true diff --git a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl index 9497c721747b..01d3d96fc795 100644 --- a/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl +++ b/mmv1/templates/terraform/examples/backend_bucket_global_ilb.tf.tmpl @@ -1,3 +1,7 @@ +# Note: This example must be run in a project without Cloud Armor tier configured, +# as it may cause conflicts with the INTERNAL_MANAGED load balancing scheme. +# This test is skipped in VCR mode due to non-determinism in project creation and resource management. + resource "google_project" "unarmored" { project_id = "tf-test%{random_suffix}" name = "tf-test%{random_suffix}" @@ -14,7 +18,7 @@ resource "google_project_service" "project" { resource "google_compute_backend_bucket" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "backend_bucket_name"}}" - project = google_project.unarmored.name + project = google_project.unarmored.number bucket_name = google_storage_bucket.{{$.PrimaryResourceId}}.name load_balancing_scheme = "INTERNAL_MANAGED" From 3427eb26396eb27f4cf51996428bbc1269d26d99 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 26 Aug 2025 13:19:23 -0700 Subject: [PATCH 073/201] add vacation for zhenhua (#14992) --- .ci/magician/github/membership_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 488dd5c70d74..1dedcf7dcc19 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -145,8 +145,8 @@ var ( "zli82016": { vacations: []Vacation{ { - startDate: newDate(2025, 1, 15), - endDate: newDate(2025, 2, 9), + startDate: newDate(2025, 8, 27), + endDate: newDate(2025, 9, 2), }, }, }, From 5375627aadd57ee1ae59eb024cf0be14c0a3f05f Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 26 Aug 2025 13:21:24 -0700 Subject: [PATCH 074/201] tgc-revival: add cloudasset feed resources (#14934) --- mmv1/api/resource.go | 14 ++++++++++++-- mmv1/products/cloudasset/FolderFeed.yaml | 5 +++++ mmv1/products/cloudasset/OrganizationFeed.yaml | 5 +++++ mmv1/products/cloudasset/ProjectFeed.yaml | 4 ++++ .../tgc_next/cai2hcl/resource_converter.go.tmpl | 4 ++-- .../decoders/backup_dr_backup_plan.go.tmpl | 2 +- .../certificatemanager_certificate.go.tmpl | 2 +- .../tgc_next/decoders/cloud_asset_feed.go.tmpl | 4 ++++ .../decoders/compute_backend_service.go.tmpl | 2 +- .../tgc_next/decoders/compute_subnetwork.go.tmpl | 2 +- .../pkg/cai2hcl/converters/convert_resource.go | 12 +++++++++++- 11 files changed, 47 insertions(+), 9 deletions(-) create mode 100644 mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index a8b03959f0c9..f14b98e20b35 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -385,6 +385,12 @@ type TGCResource struct { // If true, the Terraform custom encoder is not applied during tfplan2cai TGCIgnoreTerraformEncoder bool `yaml:"tgc_ignore_terraform_encoder,omitempty"` + + // [Optional] The parameter that uniquely identifies the resource. + // Generally, it's safe to leave empty, in which case it defaults to `name`. + // Other values are normally useful in cases where an object has a parent + // and is identified by some non-name value, such as an ip+port pair. + CaiIdentity string `yaml:"cai_identity,omitempty"` } func (r *Resource) UnmarshalYAML(unmarshal func(any) error) error { @@ -1893,14 +1899,18 @@ func (r Resource) DefineAssetTypeForResourceInProduct() bool { // For example: //monitoring.googleapis.com/v3/projects/{{project}}/services/{{service_id}} func (r Resource) rawCaiAssetNameTemplate(productBackendName string) string { caiBaseUrl := "" + caiId := "name" + if r.CaiIdentity != "" { + caiId = r.CaiIdentity + } if r.CaiBaseUrl != "" { - caiBaseUrl = fmt.Sprintf("%s/{{name}}", r.CaiBaseUrl) + caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.CaiBaseUrl, caiId) } if caiBaseUrl == "" { caiBaseUrl = r.SelfLink } if caiBaseUrl == "" { - caiBaseUrl = fmt.Sprintf("%s/{{name}}", r.BaseUrl) + caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.BaseUrl, caiId) } return fmt.Sprintf("//%s.googleapis.com/%s", productBackendName, caiBaseUrl) } diff --git a/mmv1/products/cloudasset/FolderFeed.yaml b/mmv1/products/cloudasset/FolderFeed.yaml index f1c41eaba949..36a8de774956 100644 --- a/mmv1/products/cloudasset/FolderFeed.yaml +++ b/mmv1/products/cloudasset/FolderFeed.yaml @@ -38,7 +38,12 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/cloud_asset_feed.go.tmpl' supports_indirect_user_project_override: true +include_in_tgc_next_DO_NOT_USE: true +cai_base_url: 'folders/{{folder}}/feeds' +cai_identity: 'feed_id' +tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_folder_feed' primary_resource_id: 'folder_feed' diff --git a/mmv1/products/cloudasset/OrganizationFeed.yaml b/mmv1/products/cloudasset/OrganizationFeed.yaml index f43276faa1b0..0c2082471491 100644 --- a/mmv1/products/cloudasset/OrganizationFeed.yaml +++ b/mmv1/products/cloudasset/OrganizationFeed.yaml @@ -38,7 +38,12 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' + tgc_decoder: 'templates/tgc_next/decoders/cloud_asset_feed.go.tmpl' supports_indirect_user_project_override: true +include_in_tgc_next_DO_NOT_USE: true +cai_base_url: 'organizations/{{org_id}}/feeds' +cai_identity: 'feed_id' +tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_organization_feed' primary_resource_id: 'organization_feed' diff --git a/mmv1/products/cloudasset/ProjectFeed.yaml b/mmv1/products/cloudasset/ProjectFeed.yaml index 525bbca61a56..4132e44ff685 100644 --- a/mmv1/products/cloudasset/ProjectFeed.yaml +++ b/mmv1/products/cloudasset/ProjectFeed.yaml @@ -38,6 +38,10 @@ custom_code: pre_create: 'templates/terraform/pre_create/cloud_asset_feed.go.tmpl' post_create: 'templates/terraform/post_create/cloud_asset_feed.go.tmpl' custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' +include_in_tgc_next_DO_NOT_USE: true +cai_base_url: 'projects/{{project}}/feeds' +cai_identity: 'feed_id' +tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_project_feed' primary_resource_id: 'project_feed' diff --git a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl index f9efdc3e73fc..15a71bd749e5 100644 --- a/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/resource_converter.go.tmpl @@ -92,7 +92,7 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass hclData := make(map[string]interface{}) {{ if $.CustomCode.TgcDecoder -}} - res, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res) + res, hclData, err = resource{{ $.ResourceName -}}TgcDecoder(d, config, res, hclData) if err != nil { return nil, err } @@ -140,7 +140,7 @@ func (c *{{ $.ResourceName -}}Cai2hclConverter) convertResourceData(asset caiass {{- end }} {{- if $.CustomCode.TgcDecoder }} -func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { +func resource{{ $.ResourceName -}}TgcDecoder(d *schema.ResourceData, meta interface{}, res map[string]interface{}, hclData map[string]interface{}) (map[string]interface{}, map[string]interface{}, error) { {{ $.CustomTemplate $.CustomCode.TgcDecoder false -}} } {{- end }} diff --git a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl index 996b7f1a1cbd..2ce5d4f233e0 100644 --- a/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/backup_dr_backup_plan.go.tmpl @@ -17,4 +17,4 @@ if rules, ok := res["backupRules"].([]interface{}); ok { } } -return res, nil \ No newline at end of file +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl index f7e84128a130..2596316b67fc 100644 --- a/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/certificatemanager_certificate.go.tmpl @@ -6,4 +6,4 @@ if vStr, ok := res["scope"].(string); ok && vStr == "DEFAULT" { // Omit the default value. delete(res, "scope") } -return res, nil +return res, hclData, nil diff --git a/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl b/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl new file mode 100644 index 000000000000..69605bf73cd3 --- /dev/null +++ b/mmv1/templates/tgc_next/decoders/cloud_asset_feed.go.tmpl @@ -0,0 +1,4 @@ +// billing_project is the required url_param_only property, but is not in CAI asset name or data +// TODO: handle it in a generic way +hclData["billing_project"] = "null" +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl index de931a039dea..74d3e6ab7ea5 100644 --- a/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/compute_backend_service.go.tmpl @@ -28,4 +28,4 @@ if v, ok := res["backends"]; ok { } } -return res, nil \ No newline at end of file +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl index 748efa32f2f4..b1d31f1ad3f9 100644 --- a/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl +++ b/mmv1/templates/tgc_next/decoders/compute_subnetwork.go.tmpl @@ -20,4 +20,4 @@ if raw, ok := res["stackType"]; ok { } } -return res, nil \ No newline at end of file +return res, hclData, nil \ No newline at end of file diff --git a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go index daabb215054f..18fbc46b56a4 100644 --- a/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go +++ b/mmv1/third_party/tgc_next/pkg/cai2hcl/converters/convert_resource.go @@ -21,7 +21,7 @@ func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, er } } - // Edge cases + // Handle the tdge case that multiple Terraform resources share the same CAI asset type if asset.Type == "compute.googleapis.com/Autoscaler" { if strings.Contains(asset.Name, "/zones/") { converter = ConverterMap[asset.Type]["ComputeAutoscaler"] @@ -29,5 +29,15 @@ func ConvertResource(asset caiasset.Asset) ([]*models.TerraformResourceBlock, er converter = ConverterMap[asset.Type]["ComputeRegionAutoscaler"] } } + + if asset.Type == "cloudasset.googleapis.com/Feed" { + if strings.Contains(asset.Name, "/organizations/") { + converter = ConverterMap[asset.Type]["CloudAssetOrganizationFeed"] + } else if strings.Contains(asset.Name, "/folders/") { + converter = ConverterMap[asset.Type]["CloudAssetFolderFeed"] + } else { + converter = ConverterMap[asset.Type]["CloudAssetProjectFeed"] + } + } return converter.Convert(asset) } From 23e0f84ce2ae0353f08b002a606078f3a7d444e5 Mon Sep 17 00:00:00 2001 From: translucens Date: Wed, 27 Aug 2025 05:26:00 +0900 Subject: [PATCH 075/201] add GKE default disk_type notes (#14930) --- .../terraform/website/docs/r/container_cluster.html.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 4bd90c16e424..623a11241f9b 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -890,7 +890,7 @@ The `master_authorized_networks_config.cidr_blocks` block supports: Prefer configuring `boot_disk`. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-balanced' This is being migrated to `boot_disk.disk_type`, and must match if specified in both places. Prefer configuring `boot_disk`. + (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', or 'hyperdisk-balanced'). Defaults to `hyperdisk-balanced` if `hyperdisk-balanced` is supported and `pd-balanced` is not supported for the machine type; otherwise defaults to `pd-balanced`. This is being migrated to `boot_disk.disk_type`, and must match if specified in both places. Prefer configuring `boot_disk`. * `enable_confidential_storage` - (Optional) Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. @@ -1085,7 +1085,7 @@ sole_tenant_config { in GB. The smallest allowed disk size is 10GB. Defaults to 100GB. This is being migrated from `node_config.disk_size_gb`, and must match if specified in both places. Prefer using this field. * `disk_type` - (Optional) Type of the disk attached to each node - (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', 'hyperdisk-balanced'). If unspecified, the default disk type is 'pd-balanced' This is being migrated from `node_config.disk_type`, and must match if specified in both places. Prefer using this field. + (e.g. 'pd-standard', 'pd-balanced', 'pd-ssd', or 'hyperdisk-balanced'). Defaults to `hyperdisk-balanced` if `hyperdisk-balanced` is supported and `pd-balanced` is not supported for the machine type; otherwise defaults to `pd-balanced`. This is being migrated from `node_config.disk_type`, and must match if specified in both places. Prefer using this field. * `provisioned_iops` - (Optional) Configure disk IOPs. This is only valid if the `disk_type` is 'hyperdisk-balanced'. See [performance limit documention](https://cloud.google.com/compute/docs/disks/hyperdisk-perf-limits) for more information about valid values. From ed24432ec850860d15e4c4ee4d0283e9eb163b77 Mon Sep 17 00:00:00 2001 From: Naheed <89418276+naheedtayab@users.noreply.github.com> Date: Tue, 26 Aug 2025 21:27:20 +0100 Subject: [PATCH 076/201] Fix typo on word 'Certificate' for map description (#14859) --- mmv1/products/certificatemanager/CertificateMapEntry.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/certificatemanager/CertificateMapEntry.yaml b/mmv1/products/certificatemanager/CertificateMapEntry.yaml index a8b23152a365..c7b58e3b37ba 100644 --- a/mmv1/products/certificatemanager/CertificateMapEntry.yaml +++ b/mmv1/products/certificatemanager/CertificateMapEntry.yaml @@ -59,7 +59,7 @@ parameters: - name: 'map' type: ResourceRef description: | - A map entry that is inputted into the cetrificate map + A map entry that is inputted into the certificate map url_param_only: true required: true immutable: true From 214ebd8df12aa3040fac3d200cf2310dfce9252a Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Wed, 27 Aug 2025 02:24:04 +0530 Subject: [PATCH 077/201] added qos support (#14929) --- mmv1/products/netapp/StoragePool.yaml | 15 +++ mmv1/products/netapp/Volume.yaml | 5 + .../resource_netapp_volume_test.go.tmpl | 101 +++++++++++++++++- 3 files changed, 120 insertions(+), 1 deletion(-) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index cc2ae1b0352e..6921fdfd36f5 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -189,6 +189,7 @@ properties: type: String description: | Optional. Custom Performance Total Throughput of the pool (in MiB/s). + default_from_api: true - name: 'totalIops' type: String description: | @@ -207,3 +208,17 @@ properties: Flag indicating that the hot-tier threshold will be auto-increased by 10% of the hot-tier when it hits 100%. Default is true. The increment will kick in only if the new size after increment is still less than or equal to storage pool size. min_version: 'beta' + - name: 'qosType' + type: Enum + description: | + QoS (Quality of Service) type of the storage pool. + Possible values are: AUTO, MANUAL. + enum_values: + - 'QOS_TYPE_UNSPECIFIED' + - 'AUTO' + - 'MANUAL' + - name: 'availableThroughputMibps' + type: Double + description: | + Available throughput of the storage pool (in MiB/s). + output: true diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 072e43425f6a..9bd31101c5d6 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -567,3 +567,8 @@ properties: description: | Optional. Labels to be added to the replication as the key value pairs. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. + - name: 'throughputMibps' + type: Double + description: | + Optional. Custom Performance Total Throughput of the pool (in MiB/s). + default_from_api: true diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl index 88ea097e6554..a8d8f2b9b5d9 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl @@ -760,7 +760,6 @@ data "google_compute_network" "default" { `, context) } - {{ if ne $.TargetVersionName `ga` -}} func TestAccNetappVolume_flexAutoTierNetappVolume_update(t *testing.T) { context := map[string]interface{}{ @@ -871,4 +870,104 @@ data "google_compute_network" "default" { } `, context) } + +func TestAccNetappStoragePool_ManualQos(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetappVolumeDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccNetappVolume_ManualQosAuto(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_ManualQosManual(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappVolume_ManualQosAuto(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4" + service_level = "EXTREME" + capacity_gib = "2048" + network = data.google_compute_network.default.id + qos_type = "AUTO" +} + +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.test_pool] + create_duration = "3m" +} + +resource "google_netapp_volume" "test_volume" { + location = "us-east4" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.test_pool.name + protocols = ["NFSV3"] +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} + +func testAccNetappVolume_ManualQosManual(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4" + service_level = "EXTREME" + capacity_gib = "2048" + network = data.google_compute_network.default.id + qos_type = "MANUAL" +} + +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.test_pool] + create_duration = "3m" +} + +resource "google_netapp_volume" "test_volume" { + location = "us-east4" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + description = "This is a test description for manual qos volume" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.test_pool.name + protocols = ["NFSV3"] + throughput_mibps = 12.5 +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} {{ end }} From bdbcf3a3c90e028c6732668cebfc45d3a7340792 Mon Sep 17 00:00:00 2001 From: HansiMou Date: Tue, 26 Aug 2025 15:08:39 -0700 Subject: [PATCH 078/201] Add unique index support in Firestore (#14682) --- mmv1/products/firestore/Index.yaml | 14 +++++++- .../examples/firestore_index_unique.tf.tmpl | 32 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl diff --git a/mmv1/products/firestore/Index.yaml b/mmv1/products/firestore/Index.yaml index 571e67499892..c3e29a56845a 100644 --- a/mmv1/products/firestore/Index.yaml +++ b/mmv1/products/firestore/Index.yaml @@ -92,6 +92,12 @@ examples: database_id: 'database-id-sparse-any' test_env_vars: project_id: 'PROJECT_NAME' + - name: 'firestore_index_unique' + primary_resource_id: 'my-index' + vars: + database_id: 'database-id-unique' + test_env_vars: + project_id: 'PROJECT_NAME' parameters: properties: - name: 'name' @@ -152,6 +158,12 @@ properties: definition reach or traverse an array, except via an explicit array index. Violations will result in errors. Note this field only applies to indexes with MONGODB_COMPATIBLE_API ApiScope. + - name: 'unique' + type: Boolean + default_from_api: true + description: + Whether it is an unique index. Unique index ensures all values for the + indexed field(s) are unique across documents. - name: 'fields' type: Array description: | @@ -206,7 +218,7 @@ properties: send_empty_value: true allow_empty_object: true properties: - # Meant to be an empty object with no properties. + # Meant to be an empty object with no properties. [] # Most composite indexes require at least two fields, but it is possible # for a user to require a single field index such as `__name__ DESC`. diff --git a/mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl b/mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl new file mode 100644 index 000000000000..4797628e90cb --- /dev/null +++ b/mmv1/templates/terraform/examples/firestore_index_unique.tf.tmpl @@ -0,0 +1,32 @@ +resource "google_firestore_database" "database" { + project = "{{index $.TestEnvVars "project_id"}}" + name = "{{index $.Vars "database_id"}}" + location_id = "nam5" + type = "FIRESTORE_NATIVE" + database_edition = "ENTERPRISE" + + delete_protection_state = "DELETE_PROTECTION_DISABLED" + deletion_policy = "DELETE" +} + +resource "google_firestore_index" "{{$.PrimaryResourceId}}" { + project = "{{index $.TestEnvVars "project_id"}}" + database = google_firestore_database.database.name + collection = "atestcollection" + + api_scope = "MONGODB_COMPATIBLE_API" + query_scope = "COLLECTION_GROUP" + multikey = true + density = "DENSE" + unique = true + + fields { + field_path = "name" + order = "ASCENDING" + } + + fields { + field_path = "description" + order = "DESCENDING" + } +} From ff3aee6739aaa87c30f819fc1df1ce929f9e402a Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 27 Aug 2025 00:09:48 +0200 Subject: [PATCH 079/201] artifactregistry: added `registry_uri` as attribute to `google_artifact_registry_repository` (#14776) --- mmv1/products/artifactregistry/Repository.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index 1241e4cac2e8..bb007704faeb 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -270,6 +270,11 @@ properties: longer than 63 characters. Label keys must begin with a lowercase letter and may only contain lowercase letters, numeric characters, underscores, and dashes. + - name: 'registryUri' + type: String + output: true + description: | + The repository endpoint, for example: us-docker.pkg.dev/my-proj/my-repo. - name: 'kmsKeyName' type: String description: |- From c588b8a965e2606b4ab293458632975bfd7eb859 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 27 Aug 2025 07:38:15 -0700 Subject: [PATCH 080/201] Upgrade DCL 1.83.0 (#14935) --- mmv1/third_party/terraform/go.mod | 2 +- mmv1/third_party/terraform/go.sum | 2 ++ .../resource_cloudbuild_worker_pool_test.go.tmpl | 3 ++- .../website/docs/r/cloudbuild_worker_pool.html.markdown | 6 +++++- tpgtools/go.mod | 2 +- tpgtools/go.sum | 8 ++------ 6 files changed, 13 insertions(+), 10 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 026850fd2bd3..3641e8f50c8b 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/auth v0.16.4 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index ed7608cc7167..bd42f7af505b 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -431,3 +431,5 @@ honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 h1:pvSYcI7HKOtqHTr4E9cRqVbgnh0+qnJZCrnmozltFVg= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= diff --git a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl index 9451c9c5c2ce..aa6399bb3de0 100644 --- a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_worker_pool_test.go.tmpl @@ -132,8 +132,9 @@ resource "google_cloudbuild_worker_pool" "pool" { location = "europe-west1" worker_config { disk_size_gb = 101 - machine_type = "e2-standard-4" + machine_type = "c3-standard-4" no_external_ip = false + enable_nested_virtualization = true } annotations = { diff --git a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown index bd9e48c1904b..50005f52291f 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudbuild_worker_pool.html.markdown @@ -106,7 +106,11 @@ The following arguments are supported: Immutable. Subnet IP range within the peered network. This is specified in CIDR notation with a slash and the subnet prefix size. You can optionally specify an IP address before the subnet prefix value. e.g. `192.168.0.0/29` would specify an IP range starting at 192.168.0.0 with a prefix size of 29 bits. `/16` would specify a prefix size of 16 bits, with an automatically determined IP within the peered VPC. If unspecified, a value of `/24` will be used. The `worker_config` block supports: - + +* `enable_nested_virtualization` - + (Optional) + Enable nested virtualization on the worker, if supported by the machine type. See [Worker pool config file](https://cloud.google.com/build/docs/private-pools/worker-pool-config-file-schema). If left blank, Cloud Build will set this to false. + * `disk_size_gb` - (Optional) Size of the disk attached to the worker, in GB. See [diskSizeGb](https://cloud.google.com/build/docs/private-pools/private-pool-config-file-schema#disksizegb). Specify a value of up to 1000. If `0` is specified, Cloud Build will use a standard disk size. diff --git a/tpgtools/go.mod b/tpgtools/go.mod index d7a556fbec12..f80b39ae5daa 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.23 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index 4c05f97caca1..058e41114a23 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -6,12 +6,8 @@ cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdi cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0 h1:ZpQrm5i+ppVxTQjp6lU2APyAejavB/d7G2gZNu2RxsU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.80.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0 h1:zTRBYNu7nk3TMbiRfkBcRNzw4cOeym0z1GduDYNyRyE= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.81.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0 h1:58Vw+qpPWX4JGAB/DfuDwEg6dGp0+q6raXqjs52qRik= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.82.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 h1:pvSYcI7HKOtqHTr4E9cRqVbgnh0+qnJZCrnmozltFVg= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= From 43cd12a93609a49d1a53ed672dc1f0402d7f7458 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 27 Aug 2025 16:41:52 +0200 Subject: [PATCH 081/201] logging: update documentation for `include_children` in `google_logging_organization_sink` (#14990) --- .../services/logging/resource_logging_organization_sink.go | 2 +- .../website/docs/r/logging_organization_sink.html.markdown | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go b/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go index fcf750b93c86..708f370a5c4a 100644 --- a/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go +++ b/mmv1/third_party/terraform/services/logging/resource_logging_organization_sink.go @@ -33,7 +33,7 @@ func ResourceLoggingOrganizationSink() *schema.Resource { Type: schema.TypeBool, Optional: true, Default: false, - Description: `Whether or not to include children organizations in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`, + Description: `Whether or not to include child folders or projects in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included.`, } schm.Schema["intercept_children"] = &schema.Schema{ Type: schema.TypeBool, diff --git a/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown b/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown index a0a1fc869b0c..8dfb360b8a7c 100644 --- a/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/logging_organization_sink.html.markdown @@ -64,7 +64,7 @@ The following arguments are supported: * `disabled` - (Optional) If set to True, then this sink is disabled and it does not export any log entries. -* `include_children` - (Optional) Whether or not to include children organizations in the sink export. If true, logs +* `include_children` - (Optional) Whether or not to include child folders or projects in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization are included. * `intercept_children` - (Optional) Whether or not to intercept logs from child projects. If true, matching logs will not From 3edb536293445807a5a4c65f4075744fd53d8af3 Mon Sep 17 00:00:00 2001 From: bryan0515 Date: Wed, 27 Aug 2025 08:40:24 -0700 Subject: [PATCH 082/201] Add IAP for global forwarding rule (#14947) --- mmv1/products/iap/ForwardingRuleService.yaml | 56 +++++++++++++++++++ .../forwarding_rule_service_basic.tf.tmpl | 38 +++++++++++++ 2 files changed, 94 insertions(+) create mode 100644 mmv1/products/iap/ForwardingRuleService.yaml create mode 100644 mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl diff --git a/mmv1/products/iap/ForwardingRuleService.yaml b/mmv1/products/iap/ForwardingRuleService.yaml new file mode 100644 index 000000000000..c65f35deb029 --- /dev/null +++ b/mmv1/products/iap/ForwardingRuleService.yaml @@ -0,0 +1,56 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'WebForwardingRuleService' +description: | + Only used to generate IAM resources +# This resource is only used to generate IAM resources. They do not correspond to real +# GCP resources, and should not be used to generate anything other than IAM support. +exclude_resource: true +docs: +id_format: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +base_url: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +self_link: 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +import_format: + - 'projects/{{project}}/iap_web/forwarding_rule/services/{{name}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +iam_policy: + method_name_separator: ':' + parent_resource_type: 'google_compute_global_forwarding_rule' + fetch_iam_policy_verb: 'POST' + allowed_iam_role: 'roles/iap.httpsResourceAccessor' + parent_resource_attribute: 'forwarding_rule_service_name' + iam_conditions_request_type: 'REQUEST_BODY' + example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' +custom_code: +exclude_tgc: true +examples: + - name: 'forwarding_rule_service_basic' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-forwarding-rule-service%s", context["random_suffix"])' + vars: + forwarding_rule_service_name: 'forwarding-rule-service' + target_http_proxy_name: 'target-http-proxy-name' + url_map_name: 'url-map-name' + backend_service_name: 'backend-service-name' + health_check_name: 'health-check-name' +parameters: +properties: + - name: 'name' + type: String + description: Name or self link of a forwarding rule service. + required: true diff --git a/mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl b/mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl new file mode 100644 index 000000000000..7e5122104b69 --- /dev/null +++ b/mmv1/templates/terraform/examples/forwarding_rule_service_basic.tf.tmpl @@ -0,0 +1,38 @@ +resource "google_compute_health_check" "default" { + name = "{{index $.Vars "health_check_name"}}" + http_health_check { + port = 80 + request_path = "/" + } +} + + +resource "google_compute_backend_service" "default" { + name = "{{index $.Vars "backend_service_name"}}" + protocol = "HTTP" + port_name = "http" + timeout_sec = 10 + health_checks = [google_compute_health_check.default.id] + load_balancing_scheme = "EXTERNAL_MANAGED" +} + + +resource "google_compute_url_map" "default" { + name = "{{index $.Vars "url_map_name"}}" + default_service = google_compute_backend_service.default.id +} + + +resource "google_compute_target_http_proxy" "default" { + name = "{{index $.Vars "target_http_proxy_name"}}" + url_map = google_compute_url_map.default.id +} + + +resource "google_compute_global_forwarding_rule" "{{$.PrimaryResourceId}}" { + name = "{{index $.Vars "forwarding_rule_service_name"}}" + target = google_compute_target_http_proxy.default.id + port_range = "80" + load_balancing_scheme = "EXTERNAL_MANAGED" +} + From 7a17e403d6beb18f7929bc6e14d36d22299a4aa3 Mon Sep 17 00:00:00 2001 From: Richard Belleville Date: Wed, 27 Aug 2025 08:40:42 -0700 Subject: [PATCH 083/201] GKE Multi-Subnet additional_ip_ranges_config: Change test subnet and network name from main to msc_main (#14839) --- .../resource_container_cluster_test.go.tmpl | 150 ++++++++++-------- 1 file changed, 88 insertions(+), 62 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 77e8246a06e8..9eef6abaf583 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -14323,9 +14323,77 @@ resource "google_container_cluster" "primary" { `, name, networkName, subnetworkName, config) } +type subnetRangeInfo struct { + SubnetName string + RangeNames []string +} + +func bootstrapAdditionalIpRangesNetworkConfig(t *testing.T, name string, additionalSubnetCount int, secondaryRangeCount int) (string, []subnetRangeInfo) { + sri := []subnetRangeInfo{} + + // We create our network to ensure no range collisions. + networkName := acctest.BootstrapSharedTestNetwork(t, fmt.Sprintf("%s-network", name)) + mainSubnet := acctest.BootstrapSubnetWithOverrides(t, fmt.Sprintf("%s-subnet-main", name), networkName, map[string]interface{}{ + "ipCidrRange": "10.2.0.0/24", + "secondaryIpRanges": []map[string]interface{}{ + { + "rangeName": "pods", + "ipCidrRange": "10.3.0.0/16", + }, + { + "rangeName": "services", + "ipCidrRange": "10.4.0.0/16", + }, + }, + }) + + si := subnetRangeInfo{ + SubnetName: mainSubnet, + RangeNames: []string{"pods"}, + } + sri = append(sri, si) + + cumulativeRangeIndex := 0 + for subnetIndex := 0; subnetIndex < additionalSubnetCount; subnetIndex++ { + ranges := []map[string]interface{}{} + rangeNames := []string{} + for rangeIndex := 0; rangeIndex < secondaryRangeCount; rangeIndex++ { + rangeName := fmt.Sprintf("range-%d", cumulativeRangeIndex) + r := map[string]interface{}{ + "rangeName": rangeName, + "ipCidrRange": fmt.Sprintf("10.0.%d.0/24", cumulativeRangeIndex), + } + rangeNames = append(rangeNames, rangeName) + ranges = append(ranges, r) + cumulativeRangeIndex++ + } + + subnetOverrides := map[string]interface{}{ + "ipCidrRange": fmt.Sprintf("10.1.%d.0/24", subnetIndex), + "secondaryIpRanges": ranges, + } + + subnetName := fmt.Sprintf("%s-subnet-add-%d", name, subnetIndex) + acctest.BootstrapSubnetWithOverrides(t, subnetName, networkName, subnetOverrides) + + si := subnetRangeInfo{ + SubnetName: subnetName, + RangeNames: rangeNames, + } + + sri = append(sri, si) + } + + return networkName, sri +} + func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) { t.Parallel() + testName := "gke-msc" + network, sri := bootstrapAdditionalIpRangesNetworkConfig(t, testName, 2, 2) + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -14333,7 +14401,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 2, 2), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), }, { ResourceName: "google_container_cluster.primary", @@ -14349,6 +14417,9 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_create(t *testing.T) func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) { t.Parallel() + testName := "gke-msc-update" + network, sri := bootstrapAdditionalIpRangesNetworkConfig(t, testName, 2, 2) + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -14356,7 +14427,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), }, { ResourceName: "google_container_cluster.primary", @@ -14366,7 +14437,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) Check: resource.TestCheckResourceAttrSet("google_container_cluster.primary", "node_pool.0.network_config.subnetwork"), }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 1, 1), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:len(sri)-1]), }, { ResourceName: "google_container_cluster.primary", @@ -14375,7 +14446,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:1]), }, { ResourceName: "google_container_cluster.primary", @@ -14384,7 +14455,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 2, 2), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri), }, { ResourceName: "google_container_cluster.primary", @@ -14393,7 +14464,7 @@ func TestAccContainerCluster_additional_ip_ranges_config_on_update(t *testing.T) ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, 0, 0), + Config: testAccContainerCluster_additional_ip_ranges_config(clusterName, network, sri[:1]), }, { ResourceName: "google_container_cluster.primary", @@ -14445,76 +14516,31 @@ func TestAccContainerCluster_withAnonymousAuthenticationConfig(t *testing.T) { }) } -func testAccContainerCluster_additional_ip_ranges_config(name string, additionalSubnetCount int, secondaryRangeCount int) string { - var subnetStr string +func testAccContainerCluster_additional_ip_ranges_config(clusterName string, networkName string, sri []subnetRangeInfo) string { var additionalIpRangesStr string - cumulativeRangeIndex := 0 - for subnetIndex := 0; subnetIndex < additionalSubnetCount; subnetIndex++ { - var secondaryRangeStr string + + for _, si := range sri[1:] { var podIpv4RangeStr string - for rangeIndex := 0; rangeIndex < secondaryRangeCount; rangeIndex++ { - secondaryRangeStr += fmt.Sprintf(` - secondary_ip_range { - range_name = "range-%d" - ip_cidr_range = "10.0.%d.0/24" - } - `, cumulativeRangeIndex, cumulativeRangeIndex) - - podIpv4RangeStr += fmt.Sprintf("google_compute_subnetwork.extra_%d.secondary_ip_range[%d].range_name", subnetIndex, rangeIndex) - if rangeIndex != secondaryRangeCount - 1 { + for i, rn := range si.RangeNames { + podIpv4RangeStr += fmt.Sprintf("\"%s\"", rn) + if i != len(si.RangeNames) - 1 { podIpv4RangeStr += ", " } - cumulativeRangeIndex++ } - - subnetStr += fmt.Sprintf(` - resource "google_compute_subnetwork" "extra_%d" { - ip_cidr_range = "10.1.%d.0/24" - name = "tf-test-subnet-%d" - network = google_compute_network.main.self_link - region = "us-central1" - %s - } - `, subnetIndex, subnetIndex, subnetIndex, secondaryRangeStr) - additionalIpRangesStr += fmt.Sprintf(` additional_ip_ranges_config { - subnetwork = google_compute_subnetwork.extra_%d.id + subnetwork = "%s" pod_ipv4_range_names = [%s] } - `, subnetIndex, podIpv4RangeStr) + `, si.SubnetName, podIpv4RangeStr) } return fmt.Sprintf(` - resource "google_compute_network" "main" { - name = "%s" - auto_create_subnetworks = false - } - - resource "google_compute_subnetwork" "main" { - ip_cidr_range = "10.2.0.0/24" - name = "%s" - network = google_compute_network.main.self_link - region = "us-central1" - - secondary_ip_range { - range_name = "services" - ip_cidr_range = "10.3.0.0/16" - } - - secondary_ip_range { - range_name = "pods" - ip_cidr_range = "10.4.0.0/16" - } - } - - %s - resource "google_container_cluster" "primary" { name = "%s" location = "us-central1-a" - network = google_compute_network.main.name - subnetwork = google_compute_subnetwork.main.name + network = "%s" + subnetwork = "%s" initial_node_count = 1 ip_allocation_policy { @@ -14525,7 +14551,7 @@ func testAccContainerCluster_additional_ip_ranges_config(name string, additional deletion_protection = false } - `, name, name, subnetStr, name, additionalIpRangesStr) + `, clusterName, networkName, sri[0].SubnetName, additionalIpRangesStr) } func testAccContainerCluster_withAnonymousAuthenticationConfig(name, networkName, subnetworkName string, mode string) string { From 68a864cb37a3d75fde5bcc743b28eb5aa21659ef Mon Sep 17 00:00:00 2001 From: Shrishty Chandra <3104562+shrishty@users.noreply.github.com> Date: Wed, 27 Aug 2025 21:10:52 +0530 Subject: [PATCH 084/201] Promote update_strategy field in google_compute_network_peering to v1 (#14820) Co-authored-by: Shrishty Chandra --- .../resource_compute_network_peering.go.tmpl | 19 ------------------- ... resource_compute_network_peering_test.go} | 2 -- .../r/compute_network_peering.html.markdown | 2 +- 3 files changed, 1 insertion(+), 22 deletions(-) rename mmv1/third_party/terraform/services/compute/{resource_compute_network_peering_test.go.tmpl => resource_compute_network_peering_test.go} (99%) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl index 1afbaac3e4f0..0b24e3fc61fc 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering.go.tmpl @@ -114,7 +114,6 @@ func ResourceComputeNetworkPeering() *schema.Resource { Default: "IPV4_ONLY", }, - {{ if ne $.TargetVersionName `ga` }} "update_strategy": { Type: schema.TypeString, Optional: true, @@ -122,7 +121,6 @@ func ResourceComputeNetworkPeering() *schema.Resource { Description: `The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"]`, Default: "INDEPENDENT", }, - {{- end }} }, UseJSONNumber: true, } @@ -223,11 +221,9 @@ func resourceComputeNetworkPeeringRead(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error setting stack_type: %s", err) } - {{ if ne $.TargetVersionName `ga` }} if err := d.Set("update_strategy", flattenNetworkPeeringUpdateStrategy(peering.UpdateStrategy, d, config)); err != nil { return fmt.Errorf("Error setting update_strategy: %s", err) } - {{- end }} return nil } @@ -328,19 +324,6 @@ func findPeeringFromNetwork(network *compute.Network, peeringName string) *compu return nil } func expandNetworkPeering(d *schema.ResourceData) *compute.NetworkPeering { - {{ if eq $.TargetVersionName `ga` }} - return &compute.NetworkPeering{ - ExchangeSubnetRoutes: true, - Name: d.Get("name").(string), - Network: d.Get("peer_network").(string), - ExportCustomRoutes: d.Get("export_custom_routes").(bool), - ImportCustomRoutes: d.Get("import_custom_routes").(bool), - ExportSubnetRoutesWithPublicIp: d.Get("export_subnet_routes_with_public_ip").(bool), - ImportSubnetRoutesWithPublicIp: d.Get("import_subnet_routes_with_public_ip").(bool), - StackType: d.Get("stack_type").(string), - ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, - } - {{- else }} return &compute.NetworkPeering{ ExchangeSubnetRoutes: true, Name: d.Get("name").(string), @@ -353,8 +336,6 @@ func expandNetworkPeering(d *schema.ResourceData) *compute.NetworkPeering { UpdateStrategy: d.Get("update_strategy").(string), ForceSendFields: []string{"ExportSubnetRoutesWithPublicIp", "ImportCustomRoutes", "ExportCustomRoutes"}, } - {{- end }} - } func flattenNetworkPeeringStackType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go similarity index 99% rename from mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl rename to mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go index 17d2a20537d9..f97e7d71a5d0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_network_peering_test.go @@ -141,7 +141,6 @@ func TestAccComputeNetworkPeering_stackType(t *testing.T) { } -{{ if ne $.TargetVersionName `ga` }} func TestAccComputeNetworkPeering_updateStrategy(t *testing.T) { t.Parallel() @@ -177,7 +176,6 @@ func TestAccComputeNetworkPeering_updateStrategy(t *testing.T) { }) } -{{- end }} func testAccComputeNetworkPeeringDestroyProducer(t *testing.T) func(s *terraform.State) error { return func(s *terraform.State) error { diff --git a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown index 410448fb8766..03aabfac59f1 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_network_peering.html.markdown @@ -68,7 +68,7 @@ Whether subnet routes with public IP range are imported. The default value is fa * `stack_type` - (Optional) Which IP version(s) of traffic and routes are allowed to be imported or exported between peer networks. The default value is IPV4_ONLY. Possible values: ["IPV4_ONLY", "IPV4_IPV6"]. -* `update_strategy` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) +* `update_strategy` - (Optional) The update strategy determines the semantics for updates and deletes to the peering connection configuration. The default value is INDEPENDENT. Possible values: ["INDEPENDENT", "CONSENSUS"] ## Attributes Reference From 2746d987cd0e7c885c3a239204100ba21948f373 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 27 Aug 2025 08:45:17 -0700 Subject: [PATCH 085/201] Ignore parallelstore Instance update_time changes in importstateverify (#14987) --- mmv1/products/parallelstore/Instance.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mmv1/products/parallelstore/Instance.yaml b/mmv1/products/parallelstore/Instance.yaml index 59ae8a9cf935..d073d8aa9f04 100644 --- a/mmv1/products/parallelstore/Instance.yaml +++ b/mmv1/products/parallelstore/Instance.yaml @@ -48,12 +48,16 @@ examples: name: 'instance' network_name: 'network' address_name: 'address' + ignore_read_extra: + - "update_time" - name: 'parallelstore_instance_basic' primary_resource_id: 'instance' vars: name: 'instance' network_name: 'network' address_name: 'address' + ignore_read_extra: + - "update_time" parameters: - name: 'location' type: String From cacf08143ad605715a366690ee5e0e80b6507c5b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wojtek=20Olesi=C5=84ski?= <64974153+wojtekolesinski@users.noreply.github.com> Date: Wed, 27 Aug 2025 17:48:36 +0200 Subject: [PATCH 086/201] Fix non-idempotent behavior in google_compute_region_network_endpoint_group when network is omitted (#14948) Signed-off-by: wojtekolesinski --- mmv1/products/compute/RegionNetworkEndpointGroup.yaml | 1 + .../region_network_endpoint_group_psc_service_attachment.tf.tmpl | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml index 454f66a0244a..5cba46b0978e 100644 --- a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml +++ b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml @@ -163,6 +163,7 @@ properties: custom_expand: 'templates/terraform/custom_expand/resourceref_with_validation.go.tmpl' resource: 'Network' imports: 'selfLink' + default_from_api: true - name: 'subnetwork' type: ResourceRef description: | diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl b/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl index 0bf3fbeec1c5..51a0d376f9b8 100644 --- a/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_psc_service_attachment.tf.tmpl @@ -64,6 +64,5 @@ resource "google_compute_region_network_endpoint_group" "{{$.PrimaryResourceId}} psc_data { producer_port = "88" } - network = google_compute_network.default.self_link subnetwork = google_compute_subnetwork.default.self_link } From a419b913b7fa000edffc44c14b3d7f935f2f31ae Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Wed, 27 Aug 2025 21:31:07 +0530 Subject: [PATCH 087/201] Add create_time field in backups (#14951) --- .../backupdr/data_source_backup_dr_backup.go.tmpl | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl index 944e547352d3..2933f3c1ea3f 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup.go.tmpl @@ -45,6 +45,11 @@ func DataSourceGoogleCloudBackupDRBackup() *schema.Resource { Computed: true, Description: `Name of the Data Source associated with Backup.`, }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The time when the backup was created.`, + }, }, }, }, @@ -64,6 +69,11 @@ func DataSourceGoogleCloudBackupDRBackup() *schema.Resource { Type: schema.TypeString, Required: true, }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The time when the backup was created.`, + }, } return &schema.Resource{ @@ -146,6 +156,7 @@ func flattenDataSourceBackupDRBackups(v interface{}, d *schema.ResourceData, con "backup_id": flattenDataSourceBackupDRBackupsBackupId(original["backupId"], d, config), "backup_vault_id": flattenDataSourceBackupDRBackupsBackupVaultId(original["backupVaultId"], d, config), "data_source_id": flattenDataSourceBackupDRBackupsDataSourceId(original["dataSourceId"], d, config), + "create_time": flattenDataSourceBackupDRBackupsCreateTime(original["createTime"], d, config), }) } return transformed @@ -170,3 +181,7 @@ func flattenDataSourceBackupDRBackupsBackupVaultId(v interface{}, d *schema.Reso func flattenDataSourceBackupDRBackupsDataSourceId(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return v } + +func flattenDataSourceBackupDRBackupsCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} \ No newline at end of file From aea7d7cfb5675ff5b8deaded06f17fd9623a6776 Mon Sep 17 00:00:00 2001 From: nimish-khurana Date: Wed, 27 Aug 2025 21:44:25 +0530 Subject: [PATCH 088/201] feat: enable default_from_api flag for ODB Network related fields in Oracledatabase AutonomousDatabase resource (#14898) --- mmv1/products/oracledatabase/AutonomousDatabase.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/products/oracledatabase/AutonomousDatabase.yaml b/mmv1/products/oracledatabase/AutonomousDatabase.yaml index 3fb1691b9ddc..67e6d993c696 100644 --- a/mmv1/products/oracledatabase/AutonomousDatabase.yaml +++ b/mmv1/products/oracledatabase/AutonomousDatabase.yaml @@ -666,12 +666,14 @@ properties: projects/{project}/locations/{location}/odbNetworks/{odb_network} It is optional but if specified, this should match the parent ODBNetwork of the odb_subnet and backup_odb_subnet. + default_from_api: true - name: odbSubnet type: String description: |- The name of the OdbSubnet associated with the Autonomous Database for IP allocation. Format: projects/{project}/locations/{location}/odbNetworks/{odb_network}/odbSubnets/{odb_subnet} + default_from_api: true - name: 'createTime' type: String description: 'The date and time that the Autonomous Database was created. ' From c30a42817317f9d3ffa6a009de3ce0cc898c48f4 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 27 Aug 2025 09:46:42 -0700 Subject: [PATCH 089/201] Added missing update_time importstateverify ignore for notebooks instance (#14988) --- .../services/notebooks/resource_notebooks_instance_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go index bb7da4791158..84ac3f4f66d1 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_instance_test.go @@ -25,7 +25,7 @@ func TestAccNotebooksInstance_create_vm_image(t *testing.T) { ResourceName: "google_notebooks_instance.test", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"vm_image", "metadata"}, + ImportStateVerifyIgnore: []string{"vm_image", "metadata", "update_time"}, }, }, }) From e94584a8902d4c06ca2447787d2677b0b975c2ec Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Wed, 27 Aug 2025 12:12:50 -0500 Subject: [PATCH 090/201] fix storage bucket retention_period migration crash (#15000) --- .../resource_storage_bucket_600_migration.go | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go index bf4a561b50ea..22553d7bbbf3 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_600_migration.go @@ -2,9 +2,10 @@ package storage import ( "context" + "encoding/json" + "fmt" "log" "math" - "strconv" "strings" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" @@ -1571,8 +1572,14 @@ func ResourceStorageBucketStateUpgradeV3(_ context.Context, rawState map[string] retentionPolicies := rawState["retention_policy"].([]interface{}) if len(retentionPolicies) > 0 { retentionPolicy := retentionPolicies[0].(map[string]interface{}) - if v, ok := retentionPolicy["retention_period"]; ok { - retentionPolicy["retention_period"] = strconv.Itoa(v.(int)) + // nil check + if v, ok := retentionPolicy["retention_period"]; ok && v != nil { + // number conversion check to error rather than crash + if num, ok := v.(json.Number); ok { + retentionPolicy["retention_period"] = num.String() + } else { + return rawState, fmt.Errorf("retention_period in state has unexpected type %T", v) + } } } } From 3765e5663f318034832ae5ac605208304deb4aad Mon Sep 17 00:00:00 2001 From: Andrew Ferg Date: Wed, 27 Aug 2025 14:23:26 -0400 Subject: [PATCH 091/201] Fix HA Policy update test flakiness (#14960) --- ...ice_ha_policy_manual_leader_update_test.go | 74 +++++++++++-------- 1 file changed, 44 insertions(+), 30 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go index 085f5cc19f8b..952893dde6bb 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_ha_policy_manual_leader_update_test.go @@ -128,21 +128,28 @@ resource "google_compute_region_backend_service" "default" { protocol = "UDP" load_balancing_scheme = "EXTERNAL" network = google_compute_network.default.id - backend { - group = google_compute_network_endpoint_group.neg.self_link - balancing_mode = "CONNECTION" - } - ha_policy { - fast_ip_move = "GARP_RA" - leader { - backend_group = google_compute_network_endpoint_group.neg.self_link - network_endpoint { - instance = google_compute_instance.endpoint-instance1.name - } - } - } - // Must explicitly disable connection draining to override default value. - connection_draining_timeout_sec = 0 + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance1.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 + // Explicitly depend on the endpoints to prevent test flakes due to creating + // the BackendService before the endpoints have been added to the NEG. + depends_on = [ + google_compute_network_endpoint_group.neg, + google_compute_network_endpoint.endpoint1, + google_compute_network_endpoint.endpoint2 + ] } `, context) } @@ -228,21 +235,28 @@ resource "google_compute_region_backend_service" "default" { protocol = "UDP" load_balancing_scheme = "EXTERNAL" network = google_compute_network.default.id - backend { - group = google_compute_network_endpoint_group.neg.self_link - balancing_mode = "CONNECTION" - } - ha_policy { - fast_ip_move = "GARP_RA" - leader { - backend_group = google_compute_network_endpoint_group.neg.self_link - network_endpoint { - instance = google_compute_instance.endpoint-instance2.name - } - } - } - // Must explicitly disable connection draining to override default value. - connection_draining_timeout_sec = 0 + backend { + group = google_compute_network_endpoint_group.neg.self_link + balancing_mode = "CONNECTION" + } + ha_policy { + fast_ip_move = "GARP_RA" + leader { + backend_group = google_compute_network_endpoint_group.neg.self_link + network_endpoint { + instance = google_compute_instance.endpoint-instance2.name + } + } + } + // Must explicitly disable connection draining to override default value. + connection_draining_timeout_sec = 0 + // Explicitly depend on the endpoints to prevent test flakes due to creating + // the BackendService before the endpoints have been added to the NEG. + depends_on = [ + google_compute_network_endpoint_group.neg, + google_compute_network_endpoint.endpoint1, + google_compute_network_endpoint.endpoint2 + ] } `, context) } From b49a4ed564c89184dec0d6a6926c490ed9451d63 Mon Sep 17 00:00:00 2001 From: Gitika Yadav <123439083+gitika-yadav@users.noreply.github.com> Date: Thu, 28 Aug 2025 02:03:18 +0530 Subject: [PATCH 092/201] Update BackupPlan.yaml to move resource type field to GA (#14984) --- mmv1/products/backupdr/BackupPlan.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/mmv1/products/backupdr/BackupPlan.yaml b/mmv1/products/backupdr/BackupPlan.yaml index c60029f8850a..42ff4e86a235 100644 --- a/mmv1/products/backupdr/BackupPlan.yaml +++ b/mmv1/products/backupdr/BackupPlan.yaml @@ -98,7 +98,6 @@ properties: type: Array description: | The list of all resource types to which the `BackupPlan` can be applied. - min_version: beta item_type: type: String output: true From ff818d353100afb1563bc984a60f6080080ee9df Mon Sep 17 00:00:00 2001 From: aditikumarii-google Date: Thu, 28 Aug 2025 02:03:56 +0530 Subject: [PATCH 093/201] =?UTF-8?q?Changes=20to=20sql=20testAccSqlDatabase?= =?UTF-8?q?Instance=5FupdateFromBackupDR=20function=E2=80=A6=20(#14999)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../sql/resource_sql_database_instance_test.go.tmpl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 87a8ab904fa4..5bf0eeeeace0 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -6664,10 +6664,10 @@ resource "google_sql_database_instance" "instance" { region = "us-central1" settings { - tier = "db-g1-small" + tier = "db-g1-small" backup_configuration { - enabled = true - } + enabled = "false" + } } lifecycle { ignore_changes = [ From 546c24cfd0cd7da54f40409be50d2d0844efda5f Mon Sep 17 00:00:00 2001 From: Axel Kossek Date: Wed, 27 Aug 2025 22:52:08 +0200 Subject: [PATCH 094/201] Add resource_manager_tags support to Region Backend Service api (#14837) Co-authored-by: Thomas Rodgers --- .../compute/RegionBackendService.yaml | 16 ++++++ ...ompute_region_backend_service_test.go.tmpl | 55 +++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index 312a970d5c9d..d022966f3458 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -1544,3 +1544,19 @@ properties: description: | The name of the VM instance of the leader network endpoint. The instance must already be attached to the NEG specified in the haPolicy.leader.backendGroup. + - name: 'params' + type: NestedObject + ignore_read: true + immutable: true + description: | + Additional params passed with the request, but not persisted as part of resource payload + properties: + - name: 'resourceManagerTags' + type: KeyValuePairs + description: | + Resource manager tags to be bound to the region backend service. Tag keys and values have the + same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, + and values are in the format tagValues/456. + api_name: resourceManagerTags + ignore_read: true + immutable: true diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl index c4e16475dc75..591114bb6691 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_backend_service_test.go.tmpl @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccComputeRegionBackendService_basic(t *testing.T) { @@ -756,6 +757,35 @@ func TestAccComputeRegionBackendService_withDynamicBackendCount(t *testing.T) { }) } +func TestAccComputeRegionBackendService_withTags(t *testing.T) { + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + + serviceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + checkName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tagKeyResult := acctest.BootstrapSharedTestTagKeyDetails(t, "crm-rbs-tagkey", "organizations/"+org, make(map[string]interface{})) + sharedTagkey,_ := tagKeyResult["shared_tag_key"] + tagValueResult := acctest.BootstrapSharedTestTagValueDetails(t, "crm-rbs-tagvalue", sharedTagkey, org) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionBackendServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionBackendService_withTags(serviceName, checkName, tagKeyResult["name"], tagValueResult["name"]), + }, + { + ResourceName: "google_compute_region_backend_service.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"params"}, + }, + }, + }) +} + func testAccComputeRegionBackendService_withDynamicBackendCount(serviceName, netName, hcName, igName string) string { return fmt.Sprintf(` locals { @@ -1770,3 +1800,28 @@ resource "google_compute_region_health_check" "health_check" { } `, serviceName, checkName) } + +func testAccComputeRegionBackendService_withTags(serviceName, checkName string, tagKey string, tagValue string) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + name = "%s" + health_checks = [google_compute_health_check.zero.self_link] + region = "us-central1" + params { + resource_manager_tags = { + "%s" = "%s" + } + } +} + +resource "google_compute_health_check" "zero" { + name = "%s" + check_interval_sec = 1 + timeout_sec = 1 + + tcp_health_check { + port = "80" + } +} +`, serviceName, tagKey, tagValue, checkName) +} \ No newline at end of file From 3128bac0e3811085bcca7e9333ad3ea095f3f589 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Wed, 27 Aug 2025 16:53:41 -0400 Subject: [PATCH 095/201] Modify test file generation to use resource ImportPath (#15002) --- mmv1/provider/template_data.go | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 6964d5de5f9b..32df384cbc8a 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -144,7 +144,7 @@ func (td *TemplateData) GenerateTestFile(filePath string, resource api.Resource) } tmplInput := TestInput{ Res: resource, - ImportPath: td.ImportPath(), + ImportPath: resource.ImportPath, PROJECT_NAME: "my-project-name", CREDENTIALS: "my/credentials/filename.json", REGION: "us-west1", @@ -281,15 +281,6 @@ func (td *TemplateData) GenerateFile(filePath, templatePath string, input any, g } } -func (td *TemplateData) ImportPath() string { - if td.VersionName == GA_VERSION { - return "github.com/hashicorp/terraform-provider-google/google" - } else if td.VersionName == ALPHA_VERSION || td.VersionName == PRIVATE_VERSION { - return "internal/terraform-next/google-private" - } - return "github.com/hashicorp/terraform-provider-google-beta/google-beta" -} - func FixImports(outputPath string, dumpDiffs bool) { log.Printf("Fixing go import paths") From 921e62cd68912e2f05e854136f181dea69773db9 Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Wed, 27 Aug 2025 14:34:20 -0700 Subject: [PATCH 096/201] `teamcity`: set `RELEASE_DIFF` as environment variable instead of parameter (#15003) --- .../terraform/.teamcity/components/builds/build_parameters.kt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt index 4a2af9b93e15..393493f525df 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt @@ -210,7 +210,7 @@ fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: Str text("PARALLELISM", "%d".format(parallelism)) text("TEST_PREFIX", prefix) text("TIMEOUT", timeout) - text("RELEASE_DIFF", "true") + text("env.RELEASE_DIFF", releaseDiffTest) } // ParametrizedWithType.sweeperParameters sets build parameters that affect how sweepers are run From 4f2a6799b6235b3fb982aa322814d4ef67598f30 Mon Sep 17 00:00:00 2001 From: kautikdk <144651627+kautikdk@users.noreply.github.com> Date: Wed, 27 Aug 2025 22:49:25 +0000 Subject: [PATCH 097/201] feat(storagetransfer): add service_account to google_storage_transfer_job (#14961) --- .../resource_storage_transfer_job.go | 16 + .../resource_storage_transfer_job_test.go | 349 ++++++++++++++++-- .../docs/r/storage_transfer_job.html.markdown | 6 +- 3 files changed, 347 insertions(+), 24 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 2eff1acfb8d6..41bd024723ce 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -142,6 +142,11 @@ func ResourceStorageTransferJob() *schema.Resource { ForceNew: true, Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`, }, + "service_account": { + Type: schema.TypeString, + Optional: true, + Description: `The user-managed service account to run the job. If this field is specified, the given service account is granted the necessary permissions to all applicable resources (e.g. GCS buckets) required by the job.`, + }, "event_stream": { Type: schema.TypeList, Optional: true, @@ -909,6 +914,7 @@ func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) ReplicationSpec: expandReplicationSpecs(d.Get("replication_spec").([]interface{})), LoggingConfig: expandTransferJobLoggingConfig(d.Get("logging_config").([]interface{})), NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})), + ServiceAccount: d.Get("service_account").(string), } var res *storagetransfer.TransferJob @@ -976,6 +982,9 @@ func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) er if err := d.Set("deletion_time", res.DeletionTime); err != nil { return fmt.Errorf("Error setting deletion_time: %s", err) } + if err := d.Set("service_account", res.ServiceAccount); err != nil { + return fmt.Errorf("Error setting service_account: %s", err) + } err = d.Set("schedule", flattenTransferSchedule(res.Schedule)) if err != nil { @@ -1085,6 +1094,13 @@ func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{}) } } + if d.HasChange("service_account") { + fieldMask = append(fieldMask, "service_account") + if v, ok := d.GetOk("service_account"); ok { + transferJob.ServiceAccount = v.(string) + } + } + if len(fieldMask) == 0 { return nil } diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 09647d6ff895..3dc2058b0e31 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -573,6 +573,57 @@ func TestAccStorageTransferJob_hdfsSource(t *testing.T) { }) } +func TestAccStorageTransferJob_withServiceAccount(t *testing.T) { + t.Parallel() + + testTransferJobDescription := acctest.RandString(t, 10) + testSourceBucketName := fmt.Sprintf("tf-acc-source-%s", acctest.RandString(t, 10)) + testSinkBucketName := fmt.Sprintf("tf-acc-sink-%s", acctest.RandString(t, 10)) + testServiceAccountId := fmt.Sprintf("tf-acc-sa1-%s", acctest.RandString(t, 10)) + testUpdatedServiceAccountId := fmt.Sprintf("tf-acc-sa2-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageTransferJobDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccStorageTransferJob_withServiceAccount(testTransferJobDescription, testSourceBucketName, testSinkBucketName, testServiceAccountId, envvar.GetTestProjectFromEnv()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_storage_transfer_job.with_sa", "service_account", fmt.Sprintf("%s@%s.iam.gserviceaccount.com", testServiceAccountId, envvar.GetTestProjectFromEnv())), + ), + }, + { + ResourceName: "google_storage_transfer_job.with_sa", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_withServiceAccount_updated(testTransferJobDescription, testSourceBucketName, testSinkBucketName, testServiceAccountId, testUpdatedServiceAccountId, envvar.GetTestProjectFromEnv()), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("google_storage_transfer_job.with_sa", "service_account", fmt.Sprintf("%s@%s.iam.gserviceaccount.com", testUpdatedServiceAccountId, envvar.GetTestProjectFromEnv())), + ), + }, + { + ResourceName: "google_storage_transfer_job.with_sa", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccStorageTransferJob_withServiceAccount_removed(testTransferJobDescription, testSourceBucketName, testSinkBucketName, envvar.GetTestProjectFromEnv()), + }, + { + ResourceName: "google_storage_transfer_job.with_sa", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { t.Parallel() @@ -937,7 +988,7 @@ resource "google_storage_transfer_job" "transfer_job" { } repeat_interval = "604800s" } - + logging_config { log_actions = [ "COPY", @@ -1112,7 +1163,7 @@ resource "google_storage_transfer_job" "transfer_job" { } repeat_interval = "604800s" } - + logging_config { log_actions = [ "COPY", @@ -1138,7 +1189,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN data "google_storage_transfer_project_service_account" "default" { project = "%s" } - + resource "google_storage_bucket" "data_source" { name = "%s" project = "%s" @@ -1146,13 +1197,13 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_source" { bucket = google_storage_bucket.data_source.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_bucket" "data_sink" { name = "%s" project = "%s" @@ -1160,18 +1211,18 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_sink" { bucket = google_storage_bucket.data_sink.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_transfer_job" "transfer_job" { name = "transferJobs/%s" description = "%s" project = "%s" - + transfer_spec { gcs_data_source { bucket_name = google_storage_bucket.data_source.name @@ -1182,7 +1233,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN path = "foo/bar/" } } - + schedule { schedule_start_date { year = 2018 @@ -1202,7 +1253,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN } repeat_interval = "604800s" } - + depends_on = [ google_storage_bucket_iam_member.data_source, google_storage_bucket_iam_member.data_sink, @@ -1387,7 +1438,7 @@ resource "google_storage_transfer_job" "transfer_job" { path = "foo/bar/" } } - + logging_config { enable_on_prem_gcs_transfer_logs = true } @@ -1522,7 +1573,7 @@ resource "google_storage_transfer_job" "transfer_job" { bucket_name = google_storage_bucket.data_source.name } } - + logging_config { enable_on_prem_gcs_transfer_logs = false } @@ -2155,7 +2206,7 @@ resource "google_storage_transfer_job" "transfer_job" { last_modified_since = "2020-01-01T00:00:00Z" last_modified_before = "2020-01-01T00:00:00Z" exclude_prefixes = [ - "a/b/c", + "a/b/c", ] include_prefixes = [ "a/b" @@ -2241,7 +2292,7 @@ resource "google_storage_transfer_job" "transfer_job" { last_modified_since = "2020-01-01T00:00:00Z" last_modified_before = "2020-01-01T00:00:00Z" exclude_prefixes = [ - "a/b/c", + "a/b/c", ] include_prefixes = [ "a/b" @@ -2431,7 +2482,7 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck data "google_storage_transfer_project_service_account" "default" { project = "%s" } - + resource "google_storage_bucket" "data_source" { name = "%s" project = "%s" @@ -2439,13 +2490,13 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_source" { bucket = google_storage_bucket.data_source.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_bucket" "data_sink" { name = "%s" project = "%s" @@ -2453,18 +2504,18 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_sink" { bucket = google_storage_bucket.data_sink.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_transfer_job" "transfer_job" { name = "transferJobs/%s" description = "%s" project = "%s" - + transfer_spec { gcs_data_source { bucket_name = google_storage_bucket.data_source.name @@ -2475,7 +2526,7 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck path = "%s" } } - + schedule { schedule_start_date { year = 2018 @@ -2495,7 +2546,7 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck } repeat_interval = "604800s" } - + depends_on = [ google_storage_bucket_iam_member.data_source, google_storage_bucket_iam_member.data_sink, @@ -2503,3 +2554,257 @@ func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBuck } `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project, gcsPath) } + +func testAccStorageTransferJob_withServiceAccount(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, project string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + project = "%s" + account_id = "%s" + display_name = "Test Service Account" +} + +resource "google_storage_bucket" "source" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket" "sink" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "source_iam" { + bucket = google_storage_bucket.source.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account.email}" +} + +resource "google_storage_bucket_iam_member" "sink_iam" { + bucket = google_storage_bucket.sink.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account.email}" +} + +data "google_storage_transfer_project_service_account" "transfer_sa" { +} + +resource "google_service_account_iam_member" "token_creator" { + service_account_id = google_service_account.test_account.name + role = "roles/iam.serviceAccountTokenCreator" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.transfer_sa.email}" +} + +resource "time_sleep" "wait_120_seconds" { + depends_on = [ + google_service_account_iam_member.token_creator, + google_storage_bucket_iam_member.source_iam, + google_storage_bucket_iam_member.sink_iam, + ] + create_duration = "120s" +} + +resource "google_storage_transfer_job" "with_sa" { + description = "%s" + project = "%s" + service_account = google_service_account.test_account.email + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.source.name + } + gcs_data_sink { + bucket_name = google_storage_bucket.sink.name + } + } + + schedule { + schedule_start_date { + year = 2023 + month = 1 + day = 15 + } + schedule_end_date { + year = 2023 + month = 1 + day = 15 + } + } + + depends_on = [ + time_sleep.wait_120_seconds, + ] +} +`, project, serviceAccountId, project, dataSourceBucketName, project, dataSinkBucketName, description, project) +} + +func testAccStorageTransferJob_withServiceAccount_updated(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, updatedServiceAccountId, project string) string { + return fmt.Sprintf(` +resource "google_service_account" "test_account" { + project = "%s" + account_id = "%s" + display_name = "Test Service Account" +} + +resource "google_service_account" "test_account_2" { + project = "%s" + account_id = "%s" + display_name = "Test Service Account 2" +} + +resource "google_storage_bucket" "source" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket" "sink" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "source_iam" { + bucket = google_storage_bucket.source.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account_2.email}" +} + +resource "google_storage_bucket_iam_member" "sink_iam" { + bucket = google_storage_bucket.sink.name + role = "roles/storage.admin" + member = "serviceAccount:${google_service_account.test_account_2.email}" +} + +data "google_storage_transfer_project_service_account" "transfer_sa" { +} + +resource "google_service_account_iam_member" "token_creator" { + service_account_id = google_service_account.test_account_2.name + role = "roles/iam.serviceAccountTokenCreator" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.transfer_sa.email}" +} + +resource "time_sleep" "wait_120_seconds_2" { + depends_on = [ + google_service_account_iam_member.token_creator, + google_storage_bucket_iam_member.source_iam, + google_storage_bucket_iam_member.sink_iam, + ] + create_duration = "120s" +} + +resource "google_storage_transfer_job" "with_sa" { + description = "%s" + project = "%s" + service_account = google_service_account.test_account_2.email + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.source.name + } + gcs_data_sink { + bucket_name = google_storage_bucket.sink.name + } + } + + schedule { + schedule_start_date { + year = 2023 + month = 1 + day = 15 + } + schedule_end_date { + year = 2023 + month = 1 + day = 15 + } + } + + depends_on = [ + time_sleep.wait_120_seconds_2, + ] +} +`, project, serviceAccountId, project, updatedServiceAccountId, project, dataSourceBucketName, project, dataSinkBucketName, description, project) +} + +func testAccStorageTransferJob_withServiceAccount_removed(description, dataSourceBucketName, dataSinkBucketName, project string) string { + return fmt.Sprintf(` + +resource "google_storage_bucket" "source" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket" "sink" { + project = "%s" + name = "%s" + location = "US" + force_destroy = true +} + + +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} + +resource "google_storage_bucket_iam_member" "source_iam" { + bucket = google_storage_bucket.source.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_bucket_iam_member" "sink_iam" { + bucket = google_storage_bucket.sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "time_sleep" "wait_120_seconds_3" { + depends_on = [ + google_storage_bucket_iam_member.source_iam, + google_storage_bucket_iam_member.sink_iam, + ] + create_duration = "120s" +} + +resource "google_storage_transfer_job" "with_sa" { + description = "%s" + project = "%s" + + transfer_spec { + gcs_data_source { + bucket_name = google_storage_bucket.source.name + } + gcs_data_sink { + bucket_name = google_storage_bucket.sink.name + } + } + + schedule { + schedule_start_date { + year = 2023 + month = 1 + day = 15 + } + schedule_end_date { + year = 2023 + month = 1 + day = 15 + } + } + + depends_on = [ + time_sleep.wait_120_seconds_3, + ] + +} +`, project, dataSourceBucketName, project, dataSinkBucketName, project, description, project) +} diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index 2c27448bd9c7..65bd53d0a055 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -140,6 +140,8 @@ The following arguments are supported: * `project` - (Optional) The project in which the resource belongs. If it is not provided, the provider project is used. +* `service_account` - (Optional) The user-managed service account to run the job. If this field is specified, the given service account is granted the necessary permissions to all applicable resources (e.g. GCS buckets) required by the job. + * `status` - (Optional) Status of the job. Default: `ENABLED`. **NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.** * `notification_config` - (Optional) Notification configuration. This is not supported for transfers involving PosixFilesystem. Structure [documented below](#nested_notification_config). @@ -328,13 +330,13 @@ The `azure_credentials` block supports: The `loggin_config` block supports: -* `log_actions` - (Optional) A list of actions to be logged. If empty, no logs are generated. Not supported for transfers with PosixFilesystem data sources; use enableOnpremGcsTransferLogs instead. +* `log_actions` - (Optional) A list of actions to be logged. If empty, no logs are generated. Not supported for transfers with PosixFilesystem data sources; use enableOnpremGcsTransferLogs instead. Each action may be one of `FIND`, `DELETE`, and `COPY`. * `log_action_states` - (Optional) A list of loggable action states. If empty, no logs are generated. Not supported for transfers with PosixFilesystem data sources; use enableOnpremGcsTransferLogs instead. Each action state may be one of `SUCCEEDED`, and `FAILED`. -* `enable_on_prem_gcs_transfer` - (Optional) For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer. +* `enable_on_prem_gcs_transfer` - (Optional) For transfers with a PosixFilesystem source, this option enables the Cloud Storage transfer logs for this transfer. Defaults to false. ## Attributes Reference From 8819c95d944b6fb8fe4cb906f9fa4c6aa6ee19f7 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 27 Aug 2025 16:53:14 -0700 Subject: [PATCH 098/201] Fixed TestAccApigeeSecurityAction_apigeeSecurityActionFull test setup (#14986) --- .../resource_apigee_security_action_test.go | 42 ++++++++++++++++++- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go index c49f19b29c06..cd4dd1e382b4 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go @@ -9,6 +9,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" ) @@ -57,7 +58,9 @@ func TestAccApigeeSecurityAction_apigeeSecurityActionFull(t *testing.T) { t.Parallel() context := map[string]interface{}{ - "random_suffix": acctest.RandString(t, 10), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), } acctest.VcrTest(t, resource.TestCase{ @@ -177,10 +180,45 @@ func TestAccApigeeSecurityAction_apigeeSecurityActionFull(t *testing.T) { func testAccApigeeSecurityAction_apigeeBase(context map[string]interface{}) string { return acctest.Nprintf(` -data "google_client_config" "current" {} +resource "google_project" "project" { + project_id = "tf-test-%{random_suffix}" + name = "tf-test-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.project] +} + +resource "google_project_service" "apigee" { + project = google_project.project.project_id + service = "apigee.googleapis.com"" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + depends_on = [google_project_service.apigee] +} + +resource "google_project_service" "servicenetworking" { + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" + depends_on = [google_project_service.compute] +} + +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_project_service.servicenetworking] +} resource "google_compute_network" "apigee_network" { name = "tf-test-network-%{random_suffix}" + depends_on = [time_sleep.wait_120_seconds] } resource "google_compute_global_address" "apigee_range" { From 0005201431a4daf44dca43e207acb1a2d846d6be Mon Sep 17 00:00:00 2001 From: Jon Buckley Date: Thu, 28 Aug 2025 11:55:08 -0400 Subject: [PATCH 099/201] sql: Add `consumer_network_status`, `ip_address`, and `status` fields to `google_sql_database_instance` (#14643) --- .../sql/resource_sql_database_instance.go.tmpl | 18 ++++++++++++++++++ .../resource_sql_database_instance_meta.yaml | 3 +++ .../docs/r/sql_database_instance.html.markdown | 6 ++++++ 3 files changed, 27 insertions(+) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index bdae1754c38c..6abe0b4d3d4e 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -561,6 +561,21 @@ API (for read pools, effective_availability_type may differ from availability_ty Required: true, Description: `The consumer network of this consumer endpoint. This must be a resource path that includes both the host project and the network name. The consumer host project of this network might be different from the consumer service project.`, }, + "consumer_network_status": { + Type: schema.TypeString, + Computed: true, + Description: `The connection policy status of the consumer network.`, + }, + "ip_address": { + Type: schema.TypeString, + Computed: true, + Description: `The IP address of the consumer endpoint.`, + }, + "status": { + Type: schema.TypeString, + Computed: true, + Description: `The connection status of the consumer endpoint.`, + }, }, }, Description: `A comma-separated list of networks or a comma-separated list of network-project pairs. Each project in this list is represented by a project number (numeric) or by a project ID (alphanumeric). This allows Private Service Connect connections to be created automatically for the specified networks.`, @@ -2701,7 +2716,10 @@ func flattenPscAutoConnections(pscAutoConnections []*sqladmin.PscAutoConnectionC for _, flag := range pscAutoConnections { data := map[string]interface{}{ "consumer_network": flag.ConsumerNetwork, + "consumer_network_status": flag.ConsumerNetworkStatus, "consumer_service_project_id": flag.ConsumerProject, + "ip_address": flag.IpAddress, + "status": flag.Status, } flags = append(flags, data) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml index df9d0644270c..18549ce5cb8b 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_meta.yaml @@ -100,7 +100,10 @@ fields: - field: 'settings.ip_configuration.private_network' - field: 'settings.ip_configuration.psc_config.allowed_consumer_projects' - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_network' + - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_network_status' - field: 'settings.ip_configuration.psc_config.psc_auto_connections.consumer_service_project_id' + - field: 'settings.ip_configuration.psc_config.psc_auto_connections.ip_address' + - field: 'settings.ip_configuration.psc_config.psc_auto_connections.status' - field: 'settings.ip_configuration.psc_config.psc_enabled' - field: 'settings.ip_configuration.server_ca_mode' - field: 'settings.ip_configuration.server_ca_pool' diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index b0675085eaf1..f3c424607478 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -698,6 +698,12 @@ performing filtering in a Terraform config. ~> **NOTE:** Users can upgrade a read replica instance to a stand-alone Cloud SQL instance with the help of `instance_type`. To promote, users have to set the `instance_type` property as `CLOUD_SQL_INSTANCE` and remove/unset `master_instance_name` and `replica_configuration` from instance configuration. This operation might cause your instance to restart. +* `settings.ip_configuration.psc_config.psc_auto_connections.consumer_network_status` - (Output) The connection policy status of the consumer network. + +* `settings.ip_configuration.psc_config.psc_auto_connections.ip_address` - (Output) The IP address of the consumer endpoint. + +* `settings.ip_configuration.psc_config.psc_auto_connections.status` - (Output) The connection status of the consumer endpoint. + * `settings.version` - Used to make sure changes to the `settings` block are atomic. From f84cf65f8518d932ddadfafd002bb49cb8d16fd1 Mon Sep 17 00:00:00 2001 From: malhotrasagar2212 Date: Thu, 28 Aug 2025 12:12:29 -0400 Subject: [PATCH 100/201] Compute public delagated sub prefix list support (#14264) --- .../compute/PublicDelegatedPrefix.yaml | 55 +++++++++++++ ...e_compute_public_advertised_prefix_test.go | 79 +++++++++++++++++++ 2 files changed, 134 insertions(+) diff --git a/mmv1/products/compute/PublicDelegatedPrefix.yaml b/mmv1/products/compute/PublicDelegatedPrefix.yaml index c84125c96061..21a74f13da40 100644 --- a/mmv1/products/compute/PublicDelegatedPrefix.yaml +++ b/mmv1/products/compute/PublicDelegatedPrefix.yaml @@ -122,3 +122,58 @@ properties: The IP address range, in CIDR format, represented by this public delegated prefix. required: true + - name: 'publicDelegatedSubPrefixs' + type: Array + output: true + description: | + List of sub public delegated fixes for BYO IP functionality. + Each item in this array represents a sub prefix that can be + used to create addresses or further allocations. + item_type: + type: NestedObject + properties: + - name: 'name' + type: String + description: | + The name of the sub public delegated prefix. + - name: 'description' + type: String + description: | + An optional description of this sub public delegated prefix. + - name: 'region' + type: String + description: | + Output-only. The region of the sub public delegated prefix if it is regional. If absent, the sub prefix is global. + - name: 'status' + type: Enum + description: | + The status of the sub public delegated prefix. + enum_values: + - 'INITIALIZING' + - 'READY_TO_ANNOUNCE' + - 'ANNOUNCED' + - 'DELETING' + - name: 'ipCidrRange' + type: String + description: | + The IP address range in the CIDR format represented by this sub prefix. + - name: 'isAddress' + type: Boolean + description: | + Whether the sub prefix is delegated for address creation. + - name: 'mode' + type: Enum + description: | + The PublicDelegatedSubPrefix mode for IPv6 only. + enum_values: + - 'DELEGATION' + - 'EXTERNAL_IPV6_FORWARDING_RULE_CREATION' + - 'EXTERNAL_IPV6_SUBNETWORK_CREATION' + - name: 'allocatablePrefixLength' + type: Integer + description: | + The allocatable prefix length supported by this PublicDelegatedSubPrefix. + - name: 'delegatee_project' + type: String + description: | + Name of the project scoping this PublicDelegatedSubPrefix. diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go index 9c4ca876645e..856a6801d044 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_public_advertised_prefix_test.go @@ -21,6 +21,7 @@ func TestAccComputePublicPrefixes(t *testing.T) { "public_delegated_prefixes_ipv6": testAccComputePublicDelegatedPrefix_publicDelegatedPrefixesIpv6Test, "public_advertised_prefixes_pdp_scope": testAccComputePublicAdvertisedPrefix_publicAdvertisedPrefixesPdpScopeTest, "public_delegated_prefix_ipv6_subnet_mode": testAccComputePublicDelegatedPrefix_publicDelegatedPrefixIpv6SubnetModeTest, + "public_delgated_prefix_with_sub_prefix": TestAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample, } for name, tc := range testCases { @@ -35,6 +36,84 @@ func TestAccComputePublicPrefixes(t *testing.T) { } } +func TestAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(t *testing.T) { + t.Parallel() + subPrefixResourceName := "google_compute_public_delegated_prefix.subprefix" + parentProject := "tf-static-byoip" + parentRegion := "us-central1" + parentName := "tf-test-delegation-mode-sub-pdp" + + context := map[string]interface{}{ + "parent_pdp_id": "projects/tf-static-byoip/regions/us-central1/publicDelegatedPrefixes/tf-test-delegation-mode-sub-pdp", + "project": "tf-static-byoip", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputePublicDelegatedPrefixDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(context), + Check: resource.ComposeTestCheckFunc( + // First, a basic check that the sub-prefix was created + resource.TestCheckResourceAttrSet(subPrefixResourceName, "id"), + + // Now, the custom check function + testAccCheckParentHasSubPrefix(t, parentProject, parentRegion, parentName, subPrefixResourceName), + ), + }, + { + ResourceName: "google_compute_public_delegated_prefix.subprefix", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + }, + }) +} + +func testAccComputePublicDelegatedPrefix_computePublicDelegatedPrefixWithSubPrefixExample(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_compute_public_delegated_prefix" "subprefix" { + name = "tf-test-sub-prefix-1%{random_suffix}" + description = "A nested address" + region = "us-central1" + ip_cidr_range = "2600:1901:4500:2::/64" + parent_prefix = "%{parent_pdp_id}" + mode = "DELEGATION" +} +`, context) +} + +func testAccCheckParentHasSubPrefix(t *testing.T, project, region, parentName, subPrefixResourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[subPrefixResourceName] + if !ok { + return fmt.Errorf("Not found: %s", subPrefixResourceName) + } + newSubPrefixName := rs.Primary.Attributes["name"] + + config := acctest.GoogleProviderConfig(t) + computeService := config.NewComputeClient(config.UserAgent) + + parent, err := computeService.PublicDelegatedPrefixes.Get(project, region, parentName).Do() + if err != nil { + return err + } + + for _, sub := range parent.PublicDelegatedSubPrefixs { + if sub.Name == newSubPrefixName { + return nil + } + } + + return fmt.Errorf("Sub-Prefix %q not found in parent %q's sub-prefix list", newSubPrefixName, parentName) + } +} + func testAccComputePublicAdvertisedPrefix_publicAdvertisedPrefixesPdpScopeTest(t *testing.T) { context := map[string]interface{}{ "description": envvar.GetTestPublicAdvertisedPrefixDescriptionFromEnv(t), From 3e1b5423789ad74edb61871d164ff254a0d21e7a Mon Sep 17 00:00:00 2001 From: shumiao Date: Thu, 28 Aug 2025 10:08:06 -0700 Subject: [PATCH 101/201] Update unit test - AutomaticManagement is no longer supported on the gkehub api server for the ConfigManagement feature (#14995) --- .../resource_gke_hub_feature_test.go.tmpl | 62 ------------------- 1 file changed, 62 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 37e510333765..518347c9d53f 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -489,14 +489,6 @@ func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) ImportState: true, ImportStateVerify: true, }, - { - Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementEnableAutomaticManagementUpdate(context), - }, - { - ResourceName: "google_gke_hub_feature.feature", - ImportState: true, - ImportStateVerify: true, - }, { Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementRemovalUpdate(context), }, @@ -505,37 +497,10 @@ func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) ImportState: true, ImportStateVerify: true, }, - { - Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementAutomaticManagement(context), - }, - { - ResourceName: "google_gke_hub_feature.feature", - ImportState: true, - ImportStateVerify: true, - }, }, }) } -func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementAutomaticManagement(context map[string]interface{}) string { - return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_feature" "feature" { - name = "configmanagement" - location = "global" - fleet_default_member_config { - configmanagement { - management = "MANAGEMENT_AUTOMATIC" - config_sync { - enabled = true - } - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] - project = google_project.project.project_id -} -`, context) -} - func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(context map[string]interface{}) string { return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` resource "google_gke_hub_feature" "feature" { @@ -593,33 +558,6 @@ resource "google_gke_hub_feature" "feature" { `, context) } -func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementEnableAutomaticManagementUpdate(context map[string]interface{}) string { - return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` -resource "google_gke_hub_feature" "feature" { - name = "configmanagement" - location = "global" - fleet_default_member_config { - configmanagement { - management = "MANAGEMENT_AUTOMATIC" - config_sync { - prevent_drift = true - source_format = "unstructured" - oci { - sync_repo = "us-central1-docker.pkg.dev/corp-gke-build-artifacts/acm/configs:latest" - policy_dir = "/acm/nonprod-root/" - secret_type = "gcpserviceaccount" - sync_wait_secs = "15" - gcp_service_account_email = "gke-cluster@gke-foo-nonprod.iam.gserviceaccount.com" - } - } - } - } - depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] - project = google_project.project.project_id -} -`, context) -} - func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementRemovalUpdate(context map[string]interface{}) string { return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` resource "google_gke_hub_feature" "feature" { From 6bb6bdc68646031303f8c47784536ecbc0b949ac Mon Sep 17 00:00:00 2001 From: Jiongxin Ye <48576162+JessieYee@users.noreply.github.com> Date: Thu, 28 Aug 2025 10:38:00 -0700 Subject: [PATCH 102/201] update go.mod and go.sum (#14926) --- mmv1/third_party/terraform/go.mod | 6 +++--- mmv1/third_party/terraform/go.sum | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index 3641e8f50c8b..ab0546334c5d 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -3,7 +3,7 @@ module github.com/hashicorp/terraform-provider-google go 1.23.0 require ( - cloud.google.com/go/auth v0.16.4 + cloud.google.com/go/auth v0.16.5 cloud.google.com/go/auth/oauth2adapt v0.2.8 cloud.google.com/go/bigtable v1.37.0 github.com/GoogleCloudPlatform/declarative-resource-client-library v1.83.0 @@ -35,8 +35,8 @@ require ( golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 golang.org/x/net v0.43.0 golang.org/x/oauth2 v0.30.0 - google.golang.org/api v0.247.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b + google.golang.org/api v0.248.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c google.golang.org/grpc v1.74.2 google.golang.org/protobuf v1.36.7 gopkg.in/yaml.v2 v2.4.0 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index bd42f7af505b..b4176222aeaf 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -5,8 +5,8 @@ cel.dev/expr v0.24.0/go.mod h1:hLPLo1W4QUmuYdA72RBX06QTs6MXw941piREPl3Yfiw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.121.0 h1:pgfwva8nGw7vivjZiRfrmglGWiCJBP+0OmDpenG/Fwg= cloud.google.com/go v0.121.0/go.mod h1:rS7Kytwheu/y9buoDmu5EIpMMCI4Mb8ND4aeN4Vwj7Q= -cloud.google.com/go/auth v0.16.4 h1:fXOAIQmkApVvcIn7Pc2+5J8QTMVbUGLscnSVNl11su8= -cloud.google.com/go/auth v0.16.4/go.mod h1:j10ncYwjX/g3cdX7GpEzsdM+d+ZNsXAbb6qXA7p1Y5M= +cloud.google.com/go/auth v0.16.5 h1:mFWNQ2FEVWAliEQWpAdH80omXFokmrnbDhUS9cBywsI= +cloud.google.com/go/auth v0.16.5/go.mod h1:utzRfHMP+Vv0mpOkTRQoWD2q3BatTOoWbA7gCc2dUhQ= cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/bigtable v1.37.0 h1:Q+x7y04lQ0B+WXp03wc1/FLhFt4CwcQdkwWT0M4Jp3w= @@ -390,8 +390,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.247.0 h1:tSd/e0QrUlLsrwMKmkbQhYVa109qIintOls2Wh6bngc= -google.golang.org/api v0.247.0/go.mod h1:r1qZOPmxXffXg6xS5uhx16Fa/UFY8QU/K4bfKrnvovM= +google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y= +google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -403,8 +403,8 @@ google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuO google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY= google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b h1:zPKJod4w6F1+nRGDI9ubnXYhU9NSWoFAijkHkUXeTK8= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c h1:qXWI/sQtv5UKboZ/zUk7h+mrf/lXORyI+n9DKDAusdg= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250818200422-3122310a409c/go.mod h1:gw1tLEfykwDz2ET4a12jcXt4couGAm7IwsVaTy0Sflo= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= From d6166b5d2152df9109755164a45229fe321b4dfa Mon Sep 17 00:00:00 2001 From: Lakshman Swaminathan Date: Thu, 28 Aug 2025 11:44:52 -0700 Subject: [PATCH 103/201] autogeneration of tests for singular datasources (#14872) Co-authored-by: Stephen Lewis (Burrows) --- mmv1/api/resource.go | 42 +++++--- mmv1/api/resource/datasource.go | 2 + mmv1/products/cloudrun/Service.yaml | 1 + .../FolderIntelligenceConfig.yaml | 1 + .../OrganizationIntelligenceConfig.yaml | 1 + .../ProjectIntelligenceConfig.yaml | 1 + mmv1/provider/template_data.go | 29 ++++++ mmv1/provider/terraform.go | 16 ++++ .../base_configs/datasource_test_file.go.tmpl | 96 +++++++++++++++++++ .../iap/data_source_iap_client_test.go | 70 -------------- 10 files changed, 177 insertions(+), 82 deletions(-) create mode 100644 mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl delete mode 100644 mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index f14b98e20b35..f29a0328bfad 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -1203,31 +1203,42 @@ func ImportIdFormats(importFormat, identity []string, baseUrl string) []string { return uniq } -func (r Resource) IgnoreReadPropertiesToString(e resource.Examples) string { +// IgnoreReadProperties returns a sorted slice of property names (snake_case) that should be ignored when reading. +// This is useful for downstream code that needs to iterate over these properties. +func (r Resource) IgnoreReadProperties(e resource.Examples) []string { var props []string for _, tp := range r.AllUserProperties() { if tp.UrlParamOnly || tp.IsA("ResourceRef") { - props = append(props, fmt.Sprintf("\"%s\"", google.Underscore(tp.Name))) + props = append(props, google.Underscore(tp.Name)) } } - for _, tp := range e.IgnoreReadExtra { - props = append(props, fmt.Sprintf("\"%s\"", tp)) - } - for _, tp := range r.IgnoreReadLabelsFields(r.PropertiesWithExcluded()) { - props = append(props, fmt.Sprintf("\"%s\"", tp)) - } - for _, tp := range ignoreReadFields(r.AllUserProperties()) { - props = append(props, fmt.Sprintf("\"%s\"", tp)) - } + props = append(props, e.IgnoreReadExtra...) + props = append(props, r.IgnoreReadLabelsFields(r.PropertiesWithExcluded())...) + props = append(props, ignoreReadFields(r.AllUserProperties())...) slices.Sort(props) + return props +} +// IgnoreReadPropertiesToString returns the ignore read properties as a Go-syntax string slice. +// This is a wrapper around IgnoreReadProperties for backwards compatibility. +func (r Resource) IgnoreReadPropertiesToString(e resource.Examples) string { + props := r.IgnoreReadProperties(e) if len(props) > 0 { - return fmt.Sprintf("[]string{%s}", strings.Join(props, ", ")) + return fmt.Sprintf("[]string{%s}", strings.Join(quoteStrings(props), ", ")) } return "" } +// quoteStrings returns a new slice with each string quoted. +func quoteStrings(strs []string) []string { + quoted := make([]string, len(strs)) + for i, s := range strs { + quoted[i] = fmt.Sprintf("\"%s\"", s) + } + return quoted +} + func ignoreReadFields(props []*Type) []string { var fields []string for _, tp := range props { @@ -2021,6 +2032,13 @@ func (r *Resource) ShouldGenerateSingularDataSource() bool { return r.Datasource.Generate } +func (r *Resource) ShouldGenerateSingularDataSourceTests() bool { + if r.Datasource == nil { + return false + } + return !r.Datasource.ExcludeTest +} + func (r Resource) ShouldDatasourceSetLabels() bool { for _, p := range r.Properties { if p.Name == "labels" && p.Type == "KeyValueLabels" { diff --git a/mmv1/api/resource/datasource.go b/mmv1/api/resource/datasource.go index ee87c1501f87..2fe4f4755d6f 100644 --- a/mmv1/api/resource/datasource.go +++ b/mmv1/api/resource/datasource.go @@ -16,4 +16,6 @@ package resource type Datasource struct { // boolean to determine whether the datasource file should be generated Generate bool `yaml:"generate"` + // boolean to determine whether tests should be generated for a datasource + ExcludeTest bool `yaml:"exclude_test"` } diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index 294276192445..6e9415daed92 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -31,6 +31,7 @@ import_format: - 'locations/{{location}}/namespaces/{{project}}/services/{{name}}' datasource: generate: true + exclude_test: true timeouts: insert_minutes: 20 update_minutes: 20 diff --git a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml index 83e0f7a1b098..ee9cb9e043a3 100644 --- a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml @@ -51,6 +51,7 @@ autogen_async: false datasource: generate: true + exclude_test: true examples: - name: storage_control_folder_intelligence_config_basic diff --git a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml index edf604dac363..547f726d0a54 100644 --- a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml @@ -51,6 +51,7 @@ autogen_async: false datasource: generate: true + exclude_test: true examples: - name: storage_control_organization_intelligence_config_basic diff --git a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml index d2c654e08b13..60962fef6c4d 100644 --- a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml @@ -52,6 +52,7 @@ autogen_async: false datasource: generate: true + exclude_test: true examples: - name: storage_control_project_intelligence_config_basic diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 32df384cbc8a..831d7fecafe1 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -165,6 +165,35 @@ func (td *TemplateData) GenerateTestFile(filePath string, resource api.Resource) td.GenerateFile(filePath, templatePath, tmplInput, true, templates...) } +func (td *TemplateData) GenerateDataSourceTestFile(filePath string, resource api.Resource) { + templatePath := "templates/terraform/examples/base_configs/datasource_test_file.go.tmpl" + templates := []string{ + "templates/terraform/env_var_context.go.tmpl", + templatePath, + } + tmplInput := TestInput{ + Res: resource, + ImportPath: resource.ImportPath, + PROJECT_NAME: "my-project-name", + CREDENTIALS: "my/credentials/filename.json", + REGION: "us-west1", + ORG_ID: "123456789", + ORG_DOMAIN: "example.com", + ORG_TARGET: "123456789", + PROJECT_NUMBER: "1111111111111", + BILLING_ACCT: "000000-0000000-0000000-000000", + MASTER_BILLING_ACCT: "000000-0000000-0000000-000000", + SERVICE_ACCT: "my@service-account.com", + CUST_ID: "A01b123xz", + IDENTITY_USER: "cloud_identity_user", + PAP_DESCRIPTION: "description", + CHRONICLE_ID: "00000000-0000-0000-0000-000000000000", + VMWAREENGINE_PROJECT: "my-vmwareengine-project", + } + + td.GenerateFile(filePath, templatePath, tmplInput, true, templates...) +} + func (td *TemplateData) GenerateIamPolicyFile(filePath string, resource api.Resource) { templatePath := "templates/terraform/iam_policy.go.tmpl" templates := []string{ diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 3e96ce7f58ea..88f14e0c853a 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -108,6 +108,7 @@ func (t *Terraform) GenerateObject(object api.Resource, outputFolder, productPat t.GenerateResourceTests(object, *templateData, outputFolder) t.GenerateResourceSweeper(object, *templateData, outputFolder) t.GenerateSingularDataSource(object, *templateData, outputFolder) + t.GenerateSingularDataSourceTests(object, *templateData, outputFolder) // log.Printf("Generating %s metadata", object.Name) t.GenerateResourceMetadata(object, *templateData, outputFolder) } @@ -208,6 +209,21 @@ func (t *Terraform) GenerateSingularDataSource(object api.Resource, templateData templateData.GenerateDataSourceFile(targetFilePath, object) } +func (t *Terraform) GenerateSingularDataSourceTests(object api.Resource, templateData TemplateData, outputFolder string) { + if !object.ShouldGenerateSingularDataSourceTests() { + return + } + + productName := t.Product.ApiName + targetFolder := path.Join(outputFolder, t.FolderName(), "services", productName) + if err := os.MkdirAll(targetFolder, os.ModePerm); err != nil { + log.Println(fmt.Errorf("error creating parent directory %v: %v", targetFolder, err)) + } + targetFilePath := path.Join(targetFolder, fmt.Sprintf("data_source_%s_test.go", t.ResourceGoFilename(object))) + templateData.GenerateDataSourceTestFile(targetFilePath, object) + +} + // GenerateProduct creates the product.go file for a given service directory. // This will be used to seed the directory and add a package-level comment // specific to the product. diff --git a/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl b/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl new file mode 100644 index 000000000000..84421bf40166 --- /dev/null +++ b/mmv1/templates/terraform/examples/base_configs/datasource_test_file.go.tmpl @@ -0,0 +1,96 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 + +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** Type: MMv1 *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package {{ $.Res.PackageName }}_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + + "{{ $.ImportPath }}/acctest" + "{{ $.ImportPath }}/envvar" + "{{ $.ImportPath }}/tpgresource" +) + +{{ if $.Res.TestExamples }} +{{ $e := index $.Res.TestExamples 0 }} +func TestAccDataSource{{ $.Res.ResourceName }}_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + {{- template "EnvVarContext" dict "TestEnvVars" $e.TestEnvVars "HasNewLine" false}} + {{- range $varKey, $varVal := $e.TestVarsOverrides }} + "{{$varKey}}": {{$varVal}}, + {{- end }} + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + {{- if $.Res.VersionedProvider $e.MinVersion }} + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + {{- else }} + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + {{- end }} + {{- if $e.ExternalProviders }} + ExternalProviders: map[string]resource.ExternalProvider{ + {{- range $provider := $e.ExternalProviders }} + "{{$provider}}": {}, + {{- end }} + }, + {{- end }} + {{- if not $.Res.ExcludeDelete }} + CheckDestroy: testAccCheck{{ $.Res.ResourceName }}DestroyProducer(t), +{{- end }} + Steps: []resource.TestStep{ + { + Config: testAcc{{ $e.TestSlug $.Res.ProductMetadata.Name $.Res.Name }}DataSource(context), + Check: resource.ComposeTestCheckFunc( + {{- if gt (len ($.Res.IgnoreReadProperties $e)) 0 }} + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + "data.{{ $e.ResourceType $.Res.TerraformName }}.default", + "{{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}", + map[string]struct{}{ + {{- range $prop := $.Res.IgnoreReadProperties $e }} + "{{ $prop }}": {}, + {{- end }} + }, + ), + {{- else }} + acctest.CheckDataSourceStateMatchesResourceState("data.{{ $e.ResourceType $.Res.TerraformName }}.default", "{{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}"), + {{- end }} + ), + }, + }, + }) +} + +func testAcc{{ $e.TestSlug $.Res.ProductMetadata.Name $.Res.Name }}DataSource(context map[string]interface{}) string { + return acctest.Nprintf(` +{{ $e.TestHCLText }} + +data "{{ $.Res.TerraformName }}" "default" { +{{- range $fieldName := $.Res.DatasourceRequiredFields }} + {{ $fieldName }} = {{ $e.ResourceType $.Res.TerraformName }}.{{ $e.PrimaryResourceId }}.{{ $fieldName }} +{{- end }} +}`, + context, + ) +} +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go b/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go deleted file mode 100644 index 0442f018d845..000000000000 --- a/mmv1/third_party/terraform/services/iap/data_source_iap_client_test.go +++ /dev/null @@ -1,70 +0,0 @@ -package iap_test - -import ( - "testing" - - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" -) - -func TestAccIapClient_Datasource_basic(t *testing.T) { - t.Parallel() - - context := map[string]interface{}{ - "org_id": envvar.GetTestOrgFromEnv(t), - "org_domain": envvar.GetTestOrgDomainFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } - - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - Steps: []resource.TestStep{ - { - Config: testAccIapClientDatasourceConfig(context), - Check: resource.ComposeTestCheckFunc( - acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( - "data.google_iap_client.project_client", - "google_iap_client.project_client", - map[string]struct{}{ - "brand": {}, - }, - ), - ), - }, - }, - }) -} - -func testAccIapClientDatasourceConfig(context map[string]interface{}) string { - return acctest.Nprintf(` -resource "google_project" "project" { - project_id = "tf-test%{random_suffix}" - name = "tf-test%{random_suffix}" - org_id = "%{org_id}" - deletion_policy = "DELETE" -} - -resource "google_project_service" "project_service" { - project = google_project.project.project_id - service = "iap.googleapis.com" -} - -resource "google_iap_brand" "project_brand" { - support_email = "support@%{org_domain}" - application_title = "Cloud IAP protected Application" - project = google_project_service.project_service.project -} - -resource "google_iap_client" "project_client" { - display_name = "Test Client" - brand = google_iap_brand.project_brand.name -} - -data "google_iap_client" "project_client" { - brand = google_iap_client.project_client.brand - client_id = google_iap_client.project_client.client_id -} -`, context) -} From a941c02deef4cbe80d086539a7f4856bea4e1318 Mon Sep 17 00:00:00 2001 From: Mehul3217 <44620455+Mehul3217@users.noreply.github.com> Date: Fri, 29 Aug 2025 00:44:12 +0530 Subject: [PATCH 104/201] fixing custom performance pool, since totalIops will be returned by sever (#15005) --- mmv1/products/netapp/StoragePool.yaml | 1 + .../resource_netapp_storage_pool_test.go.tmpl | 72 +++++++++++++++++++ 2 files changed, 73 insertions(+) diff --git a/mmv1/products/netapp/StoragePool.yaml b/mmv1/products/netapp/StoragePool.yaml index 6921fdfd36f5..084d89c9f67f 100644 --- a/mmv1/products/netapp/StoragePool.yaml +++ b/mmv1/products/netapp/StoragePool.yaml @@ -194,6 +194,7 @@ properties: type: String description: | Optional. Custom Performance Total IOPS of the pool If not provided, it will be calculated based on the totalThroughputMibps + default_from_api: true - name: 'hotTierSizeGib' type: String description: | diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl index 7f283395f5b4..e5b5063c461f 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_storage_pool_test.go.tmpl @@ -567,3 +567,75 @@ data "google_compute_network" "default" { } `, context) } + +func TestAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetappStoragePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_full(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(context), + }, + { + ResourceName: "google_netapp_storage_pool.test_pool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + description = "this is a test description" + custom_performance_enabled = true + total_throughput_mibps = "200" +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} + +func testAccNetappStoragePool_customPerformanceEnabledStoragePoolCreateExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "test_pool" { + name = "tf-test-pool%{random_suffix}" + location = "us-east4-a" + service_level = "FLEX" + capacity_gib = "2048" + network = data.google_compute_network.default.id + description = "this is updated test description" + custom_performance_enabled = true + total_throughput_mibps = "200" + total_iops = "3500" +} + +data "google_compute_network" "default" { + name = "%{network_name}" +} +`, context) +} \ No newline at end of file From b9257d5cc0a572d917a382b606277c4de3ac3741 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 28 Aug 2025 14:24:21 -0700 Subject: [PATCH 105/201] Removed changes that were left out of v7 release (#15013) --- .../docs/guides/version_7_upgrade.html.markdown | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index 16c85e5b33da..e19675a49741 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -229,12 +229,6 @@ Remove `description` from your configuration after upgrade. `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. -## Resource: `google_monitoring_uptime_check_config` - -### Exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` must be set - -Setting exactly one of `http_check.auth_info.password` and `http_check.auth_info.password_wo` is now enforced in order to avoid situations where it is unclear which was being used. - ## Resource: `google_network_services_lb_traffic_extension` ### `load_balancing_scheme` is now required @@ -257,12 +251,6 @@ Now, destroying the resource will only remove it from Terraform's state and leav `allow_fewer_zones_deployment` has been removed because it isn't user-configurable. -## Resource: `google_secret_manager_secret_version` - -### `secret_data_wo` and `secret_data_wo_version` must be set together - -This standardizes the behavior of write-only fields across the provider and makes it easier to remember to update the fields together. - ## Resource: `google_sql_user` ### `password_wo` and `password_wo_version` must be set together From 085498d35d4b37017b3bec466cae2bbb96e3f1c0 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Thu, 28 Aug 2025 17:41:32 -0400 Subject: [PATCH 106/201] Remove unused TerraformResourceDirectory and TerraformProviderModule fields in generator (#15001) --- mmv1/provider/template_data.go | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/mmv1/provider/template_data.go b/mmv1/provider/template_data.go index 831d7fecafe1..8946f9238e31 100644 --- a/mmv1/provider/template_data.go +++ b/mmv1/provider/template_data.go @@ -35,9 +35,6 @@ type TemplateData struct { OutputFolder string VersionName string - TerraformResourceDirectory string - TerraformProviderModule string - // TODO rewrite: is this needed? // # Information about the local environment // # (which formatters are enabled, start-time) @@ -53,18 +50,6 @@ var goimportFiles sync.Map func NewTemplateData(outputFolder string, versionName string) *TemplateData { td := TemplateData{OutputFolder: outputFolder, VersionName: versionName} - - if versionName == GA_VERSION { - td.TerraformResourceDirectory = "google" - td.TerraformProviderModule = "github.com/hashicorp/terraform-provider-google" - } else if versionName == ALPHA_VERSION || versionName == PRIVATE_VERSION { - td.TerraformResourceDirectory = "google-private" - td.TerraformProviderModule = "internal/terraform-next" - } else { - td.TerraformResourceDirectory = "google-beta" - td.TerraformProviderModule = "github.com/hashicorp/terraform-provider-google-beta" - } - return &td } From 69880d9d5555d581cbe9d7bfce2197321ad80a33 Mon Sep 17 00:00:00 2001 From: Ryan Oaks Date: Thu, 28 Aug 2025 18:05:45 -0400 Subject: [PATCH 107/201] Update provider generation to use generic naming for imports (#15008) --- mmv1/provider/provider.go | 6 ++---- mmv1/provider/terraform.go | 11 ++++------- 2 files changed, 6 insertions(+), 11 deletions(-) diff --git a/mmv1/provider/provider.go b/mmv1/provider/provider.go index 7da908c8aa5e..1f32008fff85 100644 --- a/mmv1/provider/provider.go +++ b/mmv1/provider/provider.go @@ -18,10 +18,8 @@ type Provider interface { const TERRAFORM_PROVIDER_GA = "github.com/hashicorp/terraform-provider-google" const TERRAFORM_PROVIDER_BETA = "github.com/hashicorp/terraform-provider-google-beta" const TGC_PROVIDER = "github.com/GoogleCloudPlatform/terraform-google-conversion/v6" -const TERRAFORM_PROVIDER_PRIVATE = "internal/terraform-next" const RESOURCE_DIRECTORY_GA = "google" const RESOURCE_DIRECTORY_BETA = "google-beta" -const RESOURCE_DIRECTORY_PRIVATE = "google-private" const RESOURCE_DIRECTORY_TGC = "pkg" // # TODO: Review all object interfaces and move to private methods @@ -40,8 +38,8 @@ func ImportPathFromVersion(v string) string { tpg = TERRAFORM_PROVIDER_BETA dir = RESOURCE_DIRECTORY_BETA default: - tpg = TERRAFORM_PROVIDER_PRIVATE - dir = RESOURCE_DIRECTORY_PRIVATE + tpg = "github.com/hashicorp/terraform-provider-google-" + v + dir = "google-" + v } return fmt.Sprintf("%s/%s", tpg, dir) } diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 88f14e0c853a..cecb60ec909d 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -302,10 +302,8 @@ func (t *Terraform) GenerateIamDocumentation(object api.Resource, templateData T func (t *Terraform) FolderName() string { if t.TargetVersionName == "ga" { return "google" - } else if t.TargetVersionName == "beta" { - return "google-beta" } - return "google-private" + return "google-" + t.TargetVersionName } // Similar to FullResourceName, but override-aware to prevent things like ending in _test. @@ -732,9 +730,8 @@ func (t Terraform) replaceImportPath(outputFolder, target string) { tpg = TERRAFORM_PROVIDER_BETA dir = RESOURCE_DIRECTORY_BETA default: - tpg = TERRAFORM_PROVIDER_PRIVATE - dir = RESOURCE_DIRECTORY_PRIVATE - + tpg = "github.com/hashicorp/terraform-provider-google-" + t.TargetVersionName + dir = "google-" + t.TargetVersionName } sourceByte = bytes.Replace(sourceByte, []byte(gaImportPath), []byte(tpg+"/"+dir), -1) @@ -764,7 +761,7 @@ func (t Terraform) ProviderFromVersion() string { case "beta": dir = RESOURCE_DIRECTORY_BETA default: - dir = RESOURCE_DIRECTORY_PRIVATE + dir = "google-" + t.TargetVersionName } return dir } From a964c384ceb1a35769c99eb8c7b2b371af9050de Mon Sep 17 00:00:00 2001 From: Brad Hoekstra Date: Thu, 28 Aug 2025 19:20:48 -0400 Subject: [PATCH 108/201] Document deprecated GKE Enterprise fields. (#14932) Signed-off-by: Brad Hoekstra --- .../services/container/resource_container_cluster.go.tmpl | 3 +++ .../terraform/website/docs/r/container_cluster.html.markdown | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 4c053deb9a46..16456ccafaf0 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -2523,12 +2523,14 @@ func ResourceContainerCluster() *schema.Resource { MaxItems: 1, Computed: true, Description: `Defines the config needed to enable/disable GKE Enterprise`, + Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "cluster_tier": { Type: schema.TypeString, Computed: true, Description: `Indicates the effective cluster tier. Available options include STANDARD and ENTERPRISE.`, + Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, }, "desired_tier": { Type: schema.TypeString, @@ -2536,6 +2538,7 @@ func ResourceContainerCluster() *schema.Resource { Computed: true, ValidateFunc: validation.StringInSlice([]string{"STANDARD", "ENTERPRISE"}, false), Description: `Indicates the desired cluster tier. Available options include STANDARD and ENTERPRISE.`, + Deprecated: `GKE Enterprise features are now available without an Enterprise tier. This field is deprecated and will be removed in a future major release`, DiffSuppressFunc: tpgresource.EmptyOrDefaultStringSuppress("CLUSTER_TIER_UNSPECIFIED"), }, }, diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 623a11241f9b..bd0d63961dac 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -421,7 +421,7 @@ Fleet configuration for the cluster. Structure is [documented below](#nested_fle Configuration for [direct-path (via ALTS) with workload identity.](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1beta1/projects.locations.clusters#workloadaltsconfig). Structure is [documented below](#nested_workload_alts_config). * `enterprise_config` - (Optional) - Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). + (DEPRECATED) Configuration for [Enterprise edition].(https://cloud.google.com/kubernetes-engine/enterprise/docs/concepts/gke-editions). Structure is [documented below](#nested_enterprise_config). Deprecated as GKE Enterprise features are now available without an Enterprise tier. See https://cloud.google.com/blog/products/containers-kubernetes/gke-gets-new-pricing-and-capabilities-on-10th-birthday for the announcement of this change. * `anonymous_authentication_config` - (Optional) Configuration for [anonymous authentication restrictions](https://cloud.google.com/kubernetes-engine/docs/how-to/hardening-your-cluster#restrict-anon-access). Structure is [documented below](#anonymous_authentication_config). @@ -1643,7 +1643,7 @@ linux_node_config { The `enterprise_config` block supports: -* `desired_tier` - (Optional) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. +* `desired_tier` - (Optional) (DEPRECATED) Sets the tier of the cluster. Available options include `STANDARD` and `ENTERPRISE`. Deprecated as GKE Enterprise features are now available without an Enterprise tier. See https://cloud.google.com/blog/products/containers-kubernetes/gke-gets-new-pricing-and-capabilities-on-10th-birthday for the announcement of this change. The `anonymous_authentication_config` block supports: From aa4edf7f8ab7818f008abac66eeeaa57c8102b52 Mon Sep 17 00:00:00 2001 From: Mauricio Alvarez Leon <65101411+BBBmau@users.noreply.github.com> Date: Thu, 28 Aug 2025 16:50:26 -0700 Subject: [PATCH 109/201] `teamcity`: `RELEASE_DIFF` == "true" (#15016) --- mmv1/third_party/terraform/acctest/diff_utils.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/acctest/diff_utils.go b/mmv1/third_party/terraform/acctest/diff_utils.go index d23f226180f4..0a91f3ebed29 100644 --- a/mmv1/third_party/terraform/acctest/diff_utils.go +++ b/mmv1/third_party/terraform/acctest/diff_utils.go @@ -20,7 +20,7 @@ const diffTag = "[Diff]" func isReleaseDiffEnabled() bool { releaseDiff := os.Getenv("RELEASE_DIFF") - return releaseDiff != "" + return releaseDiff == "true" } func initializeReleaseDiffTest(c resource.TestCase, testName string, tempOutputFile *os.File) resource.TestCase { From e0ff702260d00103d005d230263de7a3e6e5fa21 Mon Sep 17 00:00:00 2001 From: NA2047 <12290725+NA2047@users.noreply.github.com> Date: Thu, 28 Aug 2025 19:29:27 -0500 Subject: [PATCH 110/201] Fix for desired_auto_created_endpoints incorrect update (#14777) Co-authored-by: Riley Karson Co-authored-by: Stephen Lewis (Burrows) --- mmv1/products/memorystore/Instance.yaml | 4 +- .../decoders/memorystore_instance.go.tmpl | 24 +- .../resource_memorystore_instance_test.go | 233 ++++++++++++++++++ 3 files changed, 256 insertions(+), 5 deletions(-) diff --git a/mmv1/products/memorystore/Instance.yaml b/mmv1/products/memorystore/Instance.yaml index ec40528f2814..9d937470dc09 100644 --- a/mmv1/products/memorystore/Instance.yaml +++ b/mmv1/products/memorystore/Instance.yaml @@ -108,8 +108,8 @@ examples: 'secondary_instance_prevent_destroy': 'false' virtual_fields: - name: 'desired_psc_auto_connections' - description: "`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead." - deprecation_message: '`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead.' + description: "`desired_psc_auto_connections` is deprecated Use `desired_auto_created_endpoints` instead `terraform import` will only work with desired_auto_created_endpoints`." + deprecation_message: '`desired_psc_auto_connections` is deprecated. Use `desired_auto_created_endpoints` instead. `terraform import` will only work with desired_auto_created_endpoints`.' type: Array immutable: true conflicts: diff --git a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl index 68ce254147ec..3918e6238c31 100644 --- a/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl +++ b/mmv1/templates/terraform/decoders/memorystore_instance.go.tmpl @@ -85,10 +85,28 @@ } } } + // We want to make these fields detect API-side drift, so if the API returns a value for them and they're set in config, we set them in state. + // On import, we only set `desired_auto_created_endpoints` because that's the non-deprecated field. if len(transformed) > 0 { - d.Set("desired_auto_created_endpoints", transformed) - log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder for %#v", transformed) - + _, okEndpoint := d.GetOk("desired_auto_created_endpoints") + _, okPsc := d.GetOk("desired_psc_auto_connections") + if okEndpoint { + d.Set("desired_auto_created_endpoints", transformed) + log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder within endpoints for %#v", transformed) + } else if okPsc { + d.Set("desired_auto_created_endpoints", []interface{}{}) + } + if okPsc { + d.Set("desired_psc_auto_connections", transformed) + log.Printf("[DEBUG] Setting desired_psc_auto_connections in decoder within endpoints for %#v", transformed) + } else if okEndpoint { + d.Set("desired_psc_auto_connections", []interface{}{}) + } + // Set preferred field on import + if !okPsc && !okEndpoint { + d.Set("desired_auto_created_endpoints", transformed) + log.Printf("[DEBUG] Setting desired_auto_created_endpoints in decoder within endpoints for %#v", transformed) + } } } diff --git a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go index 12b182cfdea1..7636dfc388e6 100644 --- a/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go +++ b/mmv1/third_party/terraform/services/memorystore/resource_memorystore_instance_test.go @@ -1537,3 +1537,236 @@ data "google_project" "project" { } `, context) } + +func TestAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "location": "us-central1", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckMemorystoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_bothConnections(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabledPscAutoConnections(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_onlyAutoCreatedEndpoints(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + { + Config: testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_neitherConnection(context), + }, + { + ResourceName: "google_memorystore_instance.instance-cluster-disabled", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"desired_auto_created_endpoints.#", "desired_auto_created_endpoints.0.%", "desired_auto_created_endpoints.0.project_id", "desired_auto_created_endpoints.0.network", "desired_psc_auto_connections.#", "desired_psc_auto_connections.0.%", "desired_psc_auto_connections.0.network", "desired_psc_auto_connections.0.project_id"}, + }, + }, + }) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabledPscAutoConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + desired_psc_auto_connections { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_bothConnections(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + desired_psc_auto_connections { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + desired_auto_created_endpoints { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_onlyAutoCreatedEndpoints(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + desired_auto_created_endpoints { + network = google_compute_network.producer_net.id + project_id = data.google_project.project.project_id + } + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} + +func testAccMemorystoreInstance_memorystorePscAutoInstanceClusterDisabled_neitherConnection(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_memorystore_instance" "instance-cluster-disabled" { + instance_id = "tf-test-instance-psc%{random_suffix}" + shard_count = 1 + location = "%{location}" + deletion_protection_enabled = false + mode = "CLUSTER_DISABLED" + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] +} + +resource "google_network_connectivity_service_connection_policy" "default" { + name = "tf-test-my-policy%{random_suffix}" + location = "%{location}" + service_class = "gcp-memorystore" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + name = "tf-test-my-subnet%{random_suffix}" + ip_cidr_range = "10.0.0.248/29" + region = "%{location}" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + name = "tf-test-my-network%{random_suffix}" + auto_create_subnetworks = false +} + +data "google_project" "project" { +} +`, context) +} From bee5162c1f6e1aec228b024227dbaea2d87c0cad Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 29 Aug 2025 19:04:35 +0200 Subject: [PATCH 111/201] MMv1 field: changed `write_only` to `write_only_legacy` and marked field as deprecated (#15022) --- docs/content/reference/field.md | 6 ++++-- mmv1/api/resource.go | 2 +- mmv1/api/type.go | 14 ++++++++------ mmv1/products/bigquerydatatransfer/Config.yaml | 2 +- mmv1/products/monitoring/UptimeCheckConfig.yaml | 2 +- mmv1/products/secretmanager/SecretVersion.yaml | 2 +- .../custom_flatten/dataplex_entry_aspects.go.tmpl | 2 +- .../terraform/flatten_property_method.go.tmpl | 6 +++--- ...rty_write_only_documentation.html.markdown.tmpl | 2 +- .../property_documentation.html.markdown.tmpl | 2 +- .../terraform/resource.html.markdown.tmpl | 6 +++--- mmv1/templates/terraform/schema_property.go.tmpl | 2 +- 12 files changed, 26 insertions(+), 22 deletions(-) diff --git a/docs/content/reference/field.md b/docs/content/reference/field.md index 1f24ec8abc06..ae40c0b3bd4d 100644 --- a/docs/content/reference/field.md +++ b/docs/content/reference/field.md @@ -107,7 +107,7 @@ Example: sensitive: true ``` -### `write_only` +### `write_only_legacy` (deprecated) If true, the field is considered "write-only", which means that its value will be obscured in Terraform output as well as not be stored in state. This field is meant to replace `sensitive` as it doesn't store the value in state. See [Ephemerality in Resources - Use Write-only arguments](https://developer.hashicorp.com/terraform/language/resources/ephemeral/write-only) @@ -121,9 +121,11 @@ This field cannot be used in conjuction with `immutable` or `sensitive`. Example: ```yaml -write_only: true +write_only_legacy: true ``` +**Deprecated**: This field is deprecated and will be removed in a future release. + ### `ignore_read` If true, the provider sets the field's value in the resource state based only on the user's configuration. If false or unset, the provider sets the field's diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index f29a0328bfad..5468905cddae 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -617,7 +617,7 @@ func (r Resource) SensitiveProps() []*Type { func (r Resource) WriteOnlyProps() []*Type { props := r.AllNestedProperties(r.RootProperties()) return google.Select(props, func(p *Type) bool { - return p.WriteOnly + return p.WriteOnlyLegacy }) } diff --git a/mmv1/api/type.go b/mmv1/api/type.go index ca664c774a8c..341b31d44ee7 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -171,7 +171,9 @@ type Type struct { Sensitive bool `yaml:"sensitive,omitempty"` // Adds `Sensitive: true` to the schema - WriteOnly bool `yaml:"write_only,omitempty"` // Adds `WriteOnly: true` to the schema + // TODO: remove this field after all references are migrated + // see: https://github.com/GoogleCloudPlatform/magic-modules/pull/14933#pullrequestreview-3166578379 + WriteOnlyLegacy bool `yaml:"write_only_legacy,omitempty"` // Adds `WriteOnlyLegacy: true` to the schema // Does not set this value to the returned API value. Useful for fields // like secrets where the returned API value is not helpful. @@ -395,11 +397,11 @@ func (t *Type) Validate(rName string) { log.Fatalf("'default_value' and 'default_from_api' cannot be both set in resource %s", rName) } - if t.WriteOnly && (t.DefaultFromApi || t.Output) { + if t.WriteOnlyLegacy && (t.DefaultFromApi || t.Output) { log.Fatalf("Property %s cannot be write_only and default_from_api or output at the same time in resource %s", t.Name, rName) } - if t.WriteOnly && t.Sensitive { + if t.WriteOnlyLegacy && t.Sensitive { log.Fatalf("Property %s cannot be write_only and sensitive at the same time in resource %s", t.Name, rName) } @@ -750,7 +752,7 @@ func (t Type) WriteOnlyProperties() []*Type { } case t.IsA("NestedObject"): props = google.Select(t.UserProperties(), func(p *Type) bool { - return p.WriteOnly + return p.WriteOnlyLegacy }) case t.IsA("Map"): props = google.Reject(t.ValueType.WriteOnlyProperties(), func(p *Type) bool { @@ -1224,8 +1226,8 @@ func (t *Type) IsForceNew() bool { return t.Immutable } - // WriteOnly fields are never immutable - if t.WriteOnly { + // WriteOnlyLegacy fields are never immutable + if t.WriteOnlyLegacy { return false } diff --git a/mmv1/products/bigquerydatatransfer/Config.yaml b/mmv1/products/bigquerydatatransfer/Config.yaml index df88222fc7b7..873f4987a0a6 100644 --- a/mmv1/products/bigquerydatatransfer/Config.yaml +++ b/mmv1/products/bigquerydatatransfer/Config.yaml @@ -243,7 +243,7 @@ properties: type: String description: | The Secret Access Key of the AWS account transferring data from. - write_only: true + write_only_legacy: true at_least_one_of: - 'sensitive_params.0.secretAccessKeyWo' - 'sensitive_params.0.secretAccessKey' diff --git a/mmv1/products/monitoring/UptimeCheckConfig.yaml b/mmv1/products/monitoring/UptimeCheckConfig.yaml index 67f7fdd0e791..4d6ca99b5581 100644 --- a/mmv1/products/monitoring/UptimeCheckConfig.yaml +++ b/mmv1/products/monitoring/UptimeCheckConfig.yaml @@ -259,7 +259,7 @@ properties: - 'password' required_with: - 'http_check.0.auth_info.0.password_wo_version' - write_only: true + write_only_legacy: true - name: 'passwordWoVersion' type: String immutable: true diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index d3e0335ee2bd..41cd710ee90c 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -172,7 +172,7 @@ properties: - 'SecretDataWoVersion' conflicts: - 'payload.0.secretData' - write_only: true + write_only_legacy: true - name: 'SecretDataWoVersion' type: Integer default_value: 0 diff --git a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl index de8dd922f1f9..fcd51234760c 100644 --- a/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/dataplex_entry_aspects.go.tmpl @@ -15,7 +15,7 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso transformed = append(transformed, map[string]interface{}{ {{- range $prop := $.ItemType.UserProperties }} - {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} + {{- if not (or $prop.IgnoreRead $prop.WriteOnlyLegacy) }} "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), {{- end }} {{- end }} diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index 7c24debc4096..e36259baebb5 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -13,7 +13,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} {{- define "flattenPropertyMethod" }} -{{- if $.WriteOnly }} +{{- if $.WriteOnlyLegacy }} {{- else if and $.CustomFlatten (not $.ShouldIgnoreCustomFlatten) }} {{- $.CustomTemplate $.CustomFlatten false -}} {{- else -}} @@ -34,7 +34,7 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso {{- end }} transformed := make(map[string]interface{}) {{- range $prop := $.UserProperties }} - {{- if $prop.WriteOnly }} + {{- if $prop.WriteOnlyLegacy }} {{- else if $prop.FlattenObject }} if {{ $prop.ApiName }} := flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config); {{ $prop.ApiName }} != nil { obj := {{ $prop.ApiName }}.([]interface{})[0] @@ -80,7 +80,7 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso {{- end }} {{- range $prop := $.ItemType.UserProperties }} - {{- if not (or $prop.IgnoreRead $prop.WriteOnly) }} + {{- if not (or $prop.IgnoreRead $prop.WriteOnlyLegacy) }} "{{ underscore $prop.Name }}": flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}{{$prop.TitlelizeProperty}}(original["{{ $prop.ApiName }}"], d, config), {{- end }} {{- end }} diff --git a/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl b/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl index 4c4aa286648b..92e426750b56 100644 --- a/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl +++ b/mmv1/templates/terraform/nested_property_write_only_documentation.html.markdown.tmpl @@ -3,7 +3,7 @@ {{ "" }} {{- if $.NestedProperties }} {{- range $np := $.NestedProperties }} - {{- if $np.WriteOnly }} + {{- if $np.WriteOnlyLegacy }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $np -}} {{- end -}} {{- end -}} diff --git a/mmv1/templates/terraform/property_documentation.html.markdown.tmpl b/mmv1/templates/terraform/property_documentation.html.markdown.tmpl index a6f89d8bb946..ee1399b228ef 100644 --- a/mmv1/templates/terraform/property_documentation.html.markdown.tmpl +++ b/mmv1/templates/terraform/property_documentation.html.markdown.tmpl @@ -36,7 +36,7 @@ {{- if $.Sensitive }} **Note**: This property is sensitive and will not be displayed in the plan. {{- end }} - {{- if $.WriteOnly }} + {{- if $.WriteOnlyLegacy }} **Note**: This property is write-only and will not be read from the API. {{- end }} {{- if and (not $.FlattenObject) $.NestedProperties }} diff --git a/mmv1/templates/terraform/resource.html.markdown.tmpl b/mmv1/templates/terraform/resource.html.markdown.tmpl index ae1026a4b46f..b848ff6b1680 100644 --- a/mmv1/templates/terraform/resource.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource.html.markdown.tmpl @@ -88,13 +88,13 @@ The following arguments are supported: {{ "" }} {{ "" }} {{- range $p := $.RootProperties }} - {{- if and $p.Required (not $p.WriteOnly) }} + {{- if and $p.Required (not $p.WriteOnlyLegacy) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} {{- end }} {{- end }} {{ "" }} {{- range $p := $.RootProperties }} - {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnly) }} + {{- if and (not $p.Required) (not $p.Output) (not $p.WriteOnlyLegacy) }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p -}} {{- end }} {{- end }} @@ -129,7 +129,7 @@ The following arguments are supported: The following write-only attributes are supported: {{ range $p := $.RootProperties }} - {{- if $p.WriteOnly }} + {{- if $p.WriteOnlyLegacy }} {{- trimTemplate "property_documentation.html.markdown.tmpl" $p }} {{- end}} {{- end }} diff --git a/mmv1/templates/terraform/schema_property.go.tmpl b/mmv1/templates/terraform/schema_property.go.tmpl index 180e49cf6312..b046c585f13c 100644 --- a/mmv1/templates/terraform/schema_property.go.tmpl +++ b/mmv1/templates/terraform/schema_property.go.tmpl @@ -161,7 +161,7 @@ Default value: {{ .ItemType.DefaultValue -}} {{ if .Sensitive -}} Sensitive: true, {{ end -}} -{{ if .WriteOnly -}} +{{ if .WriteOnlyLegacy -}} WriteOnly: true, {{ end -}} {{ if not (eq .DefaultValue nil ) -}} From fceafaffb872b7b57d00ffe3485440054142d93c Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 29 Aug 2025 17:23:25 -0700 Subject: [PATCH 112/201] Bumped version for GKE Hub Feature test to fix test failures (#15032) --- .../services/gkehub2/resource_gke_hub_feature_test.go.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl index 518347c9d53f..c1bb69f57339 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_feature_test.go.tmpl @@ -508,7 +508,7 @@ resource "google_gke_hub_feature" "feature" { location = "global" fleet_default_member_config { configmanagement { - version = "1.19.1" + version = "1.21.2" config_sync { source_format = "hierarchy" git { @@ -535,7 +535,7 @@ resource "google_gke_hub_feature" "feature" { location = "global" fleet_default_member_config { configmanagement { - version = "1.19.2" + version = "1.21.3" management = "MANAGEMENT_MANUAL" config_sync { enabled = true From b6cb99b6361868d2885c75566314192ebff32fd6 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 29 Aug 2025 17:26:24 -0700 Subject: [PATCH 113/201] Fixed typo in TestAccApigeeSecurityAction_apigeeSecurityActionFull config (#15030) --- .../services/apigee/resource_apigee_security_action_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go index cd4dd1e382b4..e18f05fa5a66 100644 --- a/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_security_action_test.go @@ -195,7 +195,7 @@ resource "time_sleep" "wait_60_seconds" { resource "google_project_service" "apigee" { project = google_project.project.project_id - service = "apigee.googleapis.com"" + service = "apigee.googleapis.com" depends_on = [time_sleep.wait_60_seconds] } @@ -237,7 +237,7 @@ resource "google_service_networking_connection" "apigee_vpc_connection" { resource "google_apigee_organization" "apigee_org" { analytics_region = "us-central1" - project_id = data.google_client_config.current.project + project_id = google_project.project.project_id authorized_network = google_compute_network.apigee_network.id depends_on = [google_service_networking_connection.apigee_vpc_connection] } From c02ebf90235f2f5b1a2d80b06ac83445eff24901 Mon Sep 17 00:00:00 2001 From: g-dreva Date: Tue, 2 Sep 2025 15:52:29 +0000 Subject: [PATCH 114/201] Adds UnitKind provider for SaasRuntime (#14910) --- mmv1/products/saasservicemgmt/UnitKind.yaml | 207 ++++++++++++++++++ .../saas_runtime_unit_kind_basic.tf.tmpl | 28 +++ ...source_saas_runtime_unit_kind_test.go.tmpl | 164 ++++++++++++++ 3 files changed, 399 insertions(+) create mode 100644 mmv1/products/saasservicemgmt/UnitKind.yaml create mode 100644 mmv1/templates/terraform/examples/saas_runtime_unit_kind_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_unit_kind_test.go.tmpl diff --git a/mmv1/products/saasservicemgmt/UnitKind.yaml b/mmv1/products/saasservicemgmt/UnitKind.yaml new file mode 100644 index 000000000000..f89a69c4038c --- /dev/null +++ b/mmv1/products/saasservicemgmt/UnitKind.yaml @@ -0,0 +1,207 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: UnitKind +description: A UnitKind serves as a template or type definition for a group of Units. Units that belong to the same UnitKind are managed together, follow the same release model, and are typically updated together through rollouts. +base_url: projects/{{project}}/locations/{{location}}/unitKinds +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/unitKinds/{{unit_kind_id}} +create_url: projects/{{project}}/locations/{{location}}/unitKinds?unitKindId={{unit_kind_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/unitKinds/{{unit_kind_id}} +import_format: + - projects/{{project}}/locations/{{location}}/unitKinds/{{unit_kind_id}} +min_version: beta +examples: + - name: saas_runtime_unit_kind_basic + primary_resource_id: "example" + min_version: 'beta' + vars: + saas_name: example-saas + cluster_unitkind_name: cluster-unitkind + app_unitkind_name: app-unitkind + test_env_vars: + project: 'PROJECT_NAME' + bootstrap_iam: + - member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com" + role: "roles/saasservicemgmt.serviceAgent" +autogen_async: false +autogen_status: VW5pdEtpbmQ= +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: unitKindId + type: String + description: The ID value for the new unit kind. + immutable: true + url_param_only: true + required: true +properties: + - name: annotations + type: KeyValueAnnotations + description: |- + Annotations is an unstructured key-value map stored with a resource that + may be set by external tools to store and retrieve arbitrary metadata. + They are not queryable and should be preserved when modifying objects. + + More info: https://kubernetes.io/docs/user-guide/annotations + - name: createTime + type: String + description: The timestamp when the resource was created. + output: true + - name: dependencies + type: Array + description: |- + List of other unit kinds that this release will depend on. Dependencies + will be automatically provisioned if not found. Maximum 10. + immutable: true + item_type: + type: NestedObject + properties: + - name: alias + type: String + description: An alias for the dependency. Used for input variable mapping. + required: true + - name: unitKind + type: String + description: The unit kind of the dependency. + immutable: true + required: true + - name: etag + type: String + description: |- + An opaque value that uniquely identifies a version or + generation of a resource. It can be used to confirm that the client + and server agree on the ordering of a resource being written. + output: true + - name: inputVariableMappings + type: Array + description: |- + List of inputVariables for this release that will either be retrieved from + a dependency’s outputVariables, or will be passed on to a dependency’s + inputVariables. Maximum 100. + item_type: + type: NestedObject + properties: + - name: from + type: NestedObject + description: Output variables whose values will be passed on to dependencies + properties: + - name: dependency + type: String + description: Alias of the dependency that the outputVariable will pass its value to + required: true + - name: outputVariable + type: String + description: Name of the outputVariable on the dependency + required: true + - name: to + type: NestedObject + description: Input variables whose values will be passed on to dependencies + properties: + - name: dependency + type: String + description: Alias of the dependency that the inputVariable will pass its value to + required: true + - name: ignoreForLookup + type: Boolean + description: Tells SaaS Runtime if this mapping should be used during lookup or not + - name: inputVariable + type: String + description: Name of the inputVariable on the dependency + required: true + - name: variable + type: String + description: name of the variable + required: true + - name: labels + type: KeyValueLabels + description: |- + The labels on the resource, which can be used for categorization. + similar to Kubernetes resource labels. + - name: name + type: String + description: |- + Identifier. The resource name (full URI of the resource) following the standard naming + scheme: + + "projects/{project}/locations/{location}/unitKinds/{unitKind}" + output: true + - name: outputVariableMappings + type: Array + description: |- + List of outputVariables for this unit kind will be passed to this unit's + outputVariables. Maximum 100. + item_type: + type: NestedObject + properties: + - name: from + type: NestedObject + description: Output variables whose values will be passed on to dependencies + properties: + - name: dependency + type: String + description: Alias of the dependency that the outputVariable will pass its value to + required: true + - name: outputVariable + type: String + description: Name of the outputVariable on the dependency + required: true + - name: to + type: NestedObject + description: Input variables whose values will be passed on to dependencies + properties: + - name: dependency + type: String + description: Alias of the dependency that the inputVariable will pass its value to + required: true + - name: ignoreForLookup + type: Boolean + description: Tells SaaS Runtime if this mapping should be used during lookup or not + - name: inputVariable + type: String + description: Name of the inputVariable on the dependency + required: true + - name: variable + type: String + description: name of the variable + required: true + - name: saas + type: String + description: |- + A reference to the Saas that defines the product (managed service) that + the producer wants to manage with SaaS Runtime. Part of the SaaS Runtime + common data model. Immutable once set. + immutable: true + required: true + - name: uid + type: String + description: |- + The unique identifier of the resource. UID is unique in the time + and space for this resource within the scope of the service. It is + typically generated by the server on successful creation of a resource + and must not be changed. UID is used to uniquely identify resources + with resource name reuses. This should be a UUID4. + output: true + - name: updateTime + type: String + description: |- + The timestamp when the resource was last updated. Any + change to the resource made by users must refresh this value. + Changes to a resource made by the service should refresh this value. + output: true diff --git a/mmv1/templates/terraform/examples/saas_runtime_unit_kind_basic.tf.tmpl b/mmv1/templates/terraform/examples/saas_runtime_unit_kind_basic.tf.tmpl new file mode 100644 index 000000000000..169b54e29d46 --- /dev/null +++ b/mmv1/templates/terraform/examples/saas_runtime_unit_kind_basic.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_saas_runtime_saas" "example_saas" { + provider = google-beta + saas_id = "{{index $.Vars "saas_name"}}" + location = "global" + + locations { + name = "us-central1" + } +} + +resource "google_saas_runtime_unit_kind" "cluster_unit_kind" { + provider = google-beta + location = "global" + unit_kind_id = "{{index $.Vars "cluster_unitkind_name"}}" + saas = google_saas_runtime_saas.example_saas.id +} + +resource "google_saas_runtime_unit_kind" "{{$.PrimaryResourceId}}" { + provider = google-beta + location = "global" + unit_kind_id = "{{index $.Vars "app_unitkind_name"}}" + saas = google_saas_runtime_saas.example_saas.id + + dependencies { + unit_kind = google_saas_runtime_unit_kind.cluster_unit_kind.id + alias = "cluster" + } +} diff --git a/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_unit_kind_test.go.tmpl b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_unit_kind_test.go.tmpl new file mode 100644 index 000000000000..5fab9a00d07e --- /dev/null +++ b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_unit_kind_test.go.tmpl @@ -0,0 +1,164 @@ +package saasruntime_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccSaasRuntimeUnitKind_update(t *testing.T) { + t.Parallel() + acctest.BootstrapIamMembers(t, []acctest.IamMember{ + { + Member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com", + Role: "roles/saasservicemgmt.serviceAgent", + }, + }) + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSaasRuntimeUnitKind_basic(context), + }, + { + ResourceName: "google_saas_runtime_unit_kind.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "terraform_labels", "unit_kind_id"}, + }, + { + Config: testAccSaasRuntimeUnitKind_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_saas_runtime_unit_kind.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_saas_runtime_unit_kind.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "terraform_labels", "unit_kind_id"}, + }, + }, + }) +} + +func testAccSaasRuntimeUnitKind_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_saas_runtime_saas" "example_saas" { + provider = google-beta + saas_id = "tf-test-example-saas%{random_suffix}" + location = "global" + + locations { + name = "us-central1" + } +} + +resource "google_saas_runtime_unit_kind" "cluster_unit_kind" { + provider = google-beta + location = "global" + unit_kind_id = "tf-test-cluster-unitkind%{random_suffix}" + saas = google_saas_runtime_saas.example_saas.id +} + +resource "google_saas_runtime_unit_kind" "example" { + provider = google-beta + location = "global" + unit_kind_id = "tf-test-app-unitkind%{random_suffix}" + saas = google_saas_runtime_saas.example_saas.id + + dependencies { + unit_kind = google_saas_runtime_unit_kind.cluster_unit_kind.id + alias = "cluster" + } +} +`, context) +} + +func testAccSaasRuntimeUnitKind_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_saas_runtime_saas" "example_saas" { + provider = google-beta + saas_id = "tf-test-example-saas%{random_suffix}" + location = "global" + + locations { + name = "us-central1" + } +} + +resource "google_saas_runtime_unit_kind" "cluster_unit_kind" { + provider = google-beta + location = "global" + unit_kind_id = "tf-test-cluster-unitkind%{random_suffix}" + saas = google_saas_runtime_saas.example_saas.id +} + +resource "google_saas_runtime_unit_kind" "example" { + provider = google-beta + location = "global" + unit_kind_id = "tf-test-app-unitkind%{random_suffix}" + saas = google_saas_runtime_saas.example_saas.id + + dependencies { + unit_kind = google_saas_runtime_unit_kind.cluster_unit_kind.id + alias = "cluster" + } + + input_variable_mappings { + variable = "cluster_endpoint" + from { + dependency = "cluster" + output_variable = "endpoint" + } + } + + input_variable_mappings { + variable = "tenant_project_number" + to { + dependency = "cluster" + input_variable = "tenant_project_number" + ignore_for_lookup = true + } + } + + input_variable_mappings { + variable = "tenant_project_id" + to { + dependency = "cluster" + input_variable = "tenant_project_id" + } + } + + output_variable_mappings { + variable = "app_cluster_endpoint" + from { + dependency = "cluster" + output_variable = "endpoint" + } + } + + labels = { + "label-one": "foo" + } + + annotations = { + "annotation-one": "bar" + } +} +`, context) +} +{{- end }} From 8e982f5c9ea4bd8f37a6e4bfd829d0c6f13a8169 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 2 Sep 2025 09:11:19 -0700 Subject: [PATCH 115/201] Marked osconfigv2_policy_orchestrator example as beta-only (#15033) --- .../osconfigv2/PolicyOrchestratorForFolder.yaml | 1 + ...2_policy_orchestrator_for_folder_basic.tf.tmpl | 15 +++++++++++---- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/mmv1/products/osconfigv2/PolicyOrchestratorForFolder.yaml b/mmv1/products/osconfigv2/PolicyOrchestratorForFolder.yaml index f695f02b70b4..974ad2fccfd5 100644 --- a/mmv1/products/osconfigv2/PolicyOrchestratorForFolder.yaml +++ b/mmv1/products/osconfigv2/PolicyOrchestratorForFolder.yaml @@ -41,6 +41,7 @@ examples: zone: 'ZONE' external_providers: - "time" + min_version: beta autogen_async: true async: operation: diff --git a/mmv1/templates/terraform/examples/osconfigv2_policy_orchestrator_for_folder_basic.tf.tmpl b/mmv1/templates/terraform/examples/osconfigv2_policy_orchestrator_for_folder_basic.tf.tmpl index 0b39d6c19d12..54b416d89479 100644 --- a/mmv1/templates/terraform/examples/osconfigv2_policy_orchestrator_for_folder_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/osconfigv2_policy_orchestrator_for_folder_basic.tf.tmpl @@ -1,17 +1,20 @@ resource "google_folder" "my_folder" { + provider = google-beta display_name = "{{index $.Vars "folder"}}" parent = "organizations/{{index $.TestEnvVars "org_id"}}" deletion_protection = false } resource "google_folder_service_identity" "osconfig_sa" { - folder = google_folder.my_folder.folder_id - service = "osconfig.googleapis.com" + provider = google-beta + folder = google_folder.my_folder.folder_id + service = "osconfig.googleapis.com" } resource "google_folder_service_identity" "ripple_sa" { - folder = google_folder.my_folder.folder_id - service = "progressiverollout.googleapis.com" + provider = google-beta + folder = google_folder.my_folder.folder_id + service = "progressiverollout.googleapis.com" } resource "time_sleep" "wait_30_sec" { @@ -23,6 +26,7 @@ resource "time_sleep" "wait_30_sec" { } resource "google_folder_iam_member" "iam_osconfig_service_agent" { + provider = google-beta depends_on = [time_sleep.wait_30_sec] folder = google_folder.my_folder.folder_id role = "roles/osconfig.serviceAgent" @@ -30,6 +34,7 @@ resource "google_folder_iam_member" "iam_osconfig_service_agent" { } resource "google_folder_iam_member" "iam_osconfig_rollout_service_agent" { + provider = google-beta depends_on = [google_folder_iam_member.iam_osconfig_service_agent] folder = google_folder.my_folder.folder_id role = "roles/osconfig.rolloutServiceAgent" @@ -37,6 +42,7 @@ resource "google_folder_iam_member" "iam_osconfig_rollout_service_agent" { } resource "google_folder_iam_member" "iam_progressiverollout_service_agent" { + provider = google-beta depends_on = [google_folder_iam_member.iam_osconfig_rollout_service_agent] folder = google_folder.my_folder.folder_id role = "roles/progressiverollout.serviceAgent" @@ -49,6 +55,7 @@ resource "time_sleep" "wait_3_min" { } resource "google_os_config_v2_policy_orchestrator_for_folder" "{{$.PrimaryResourceId}}" { + provider = google-beta depends_on = [time_sleep.wait_3_min] policy_orchestrator_id = "{{index $.Vars "policy_orchestrator_id"}}" From 196839f06b3024f7e44ddbbdfb8546dda75a2e16 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Tue, 2 Sep 2025 11:51:39 -0500 Subject: [PATCH 116/201] Skip openidconnect call for user email if universe_domain detected (#15009) --- mmv1/third_party/terraform/transport/config.go.tmpl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/transport/config.go.tmpl b/mmv1/third_party/terraform/transport/config.go.tmpl index 15deef0d53fb..cb568ca4d642 100644 --- a/mmv1/third_party/terraform/transport/config.go.tmpl +++ b/mmv1/third_party/terraform/transport/config.go.tmpl @@ -1520,6 +1520,12 @@ func ConfigureBasePaths(c *Config) { } func GetCurrentUserEmail(config *Config, userAgent string) (string, error) { + ud := config.UniverseDomain + if ud != "" && ud != "googleapis.com" { + log.Printf("[INFO] Configured universe domain detected. Skipping user email retrieval.") + return "", nil + } + // When environment variables UserProjectOverride and BillingProject are set for the provider, // the header X-Goog-User-Project is set for the API requests. // But it causes an error when calling GetCurrentUserEmail. Set the project to be "NO_BILLING_PROJECT_OVERRIDE". @@ -1528,9 +1534,10 @@ func GetCurrentUserEmail(config *Config, userAgent string) (string, error) { // See https://github.com/golang/oauth2/issues/306 for a recommendation to do this from a Go maintainer // URL retrieved from https://accounts.google.com/.well-known/openid-configuration res, err := SendRequest(SendRequestOptions{ - Config: config, - Method: "GET", + Config: config, + Method: "GET", Project: "NO_BILLING_PROJECT_OVERRIDE", + // URL does not need to be universe domain-aware since we return early for non-GDU universes RawURL: "https://openidconnect.googleapis.com/v1/userinfo", UserAgent: userAgent, }) From 6b83964f2528554b0e60f526bf308ca50085b5f6 Mon Sep 17 00:00:00 2001 From: VeraQin <31418633+VeraQin@users.noreply.github.com> Date: Tue, 2 Sep 2025 10:07:07 -0700 Subject: [PATCH 117/201] Fix google_container_cluster test failing at transparent_hugepage (#15007) --- .../container/resource_container_cluster_test.go.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 9eef6abaf583..be38defe183f 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -8546,8 +8546,8 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings_in min_node_cpus = 1 } linux_node_config { - transparent_hugepage_defrag = %s - transparent_hugepage_enabled = %s + transparent_hugepage_defrag = "%s" + transparent_hugepage_enabled = "%s" } } } From faa711857232f70c9b2c2d012047656fe3ce5095 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 2 Sep 2025 22:42:15 +0530 Subject: [PATCH 118/201] feat: (storage) added field aws_s3_compatible_data_source for google_storage_transfer_job resource (#14835) --- .../resource_storage_transfer_job.go | 142 +++++++++++ .../resource_storage_transfer_job_meta.yaml | 8 + .../resource_storage_transfer_job_test.go | 228 +++++++++++------- .../docs/r/storage_transfer_job.html.markdown | 25 ++ 4 files changed, 321 insertions(+), 82 deletions(-) diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go index 41bd024723ce..080ea04e3f01 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job.go @@ -83,6 +83,7 @@ var ( "transfer_spec.0.azure_blob_storage_data_source", "transfer_spec.0.posix_data_source", "transfer_spec.0.hdfs_data_source", + "transfer_spec.0.aws_s3_compatible_data_source", } transferSpecDataSinkKeys = []string{ "transfer_spec.0.gcs_data_sink", @@ -293,6 +294,14 @@ func ResourceStorageTransferJob() *schema.Resource { ExactlyOneOf: transferSpecDataSourceKeys, Description: `An HDFS Storage data source.`, }, + "aws_s3_compatible_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: awsS3CompatibleDataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `An AWS S3 Compatible data source.`, + }, }, }, Description: `Transfer specification.`, @@ -887,6 +896,69 @@ func azureBlobStorageDataSchema() *schema.Resource { } } +func awsS3CompatibleDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bucket_name": { + Type: schema.TypeString, + Required: true, + Description: `Name of the bucket.`, + }, + "path": { + Type: schema.TypeString, + Optional: true, + Description: `Specifies the path to transfer objects.`, + }, + "endpoint": { + Type: schema.TypeString, + Required: true, + Description: `Endpoint of the storage service.`, + }, + "region": { + Type: schema.TypeString, + Optional: true, + Description: `Specifies the region to sign requests with. This can be left blank if requests should be signed with an empty region.`, + }, + "s3_metadata": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `S3 compatible metadata.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "auth_method": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: verify.ValidateEnum([]string{"AUTH_METHOD_UNSPECIFIED", "AUTH_METHOD_AWS_SIGNATURE_V4", "AUTH_METHOD_AWS_SIGNATURE_V2"}), + Description: `Authentication and authorization method used by the storage service. When not specified, Transfer Service will attempt to determine right auth method to use.`, + }, + "request_model": { + Type: schema.TypeString, + Optional: true, + Default: "REQUEST_MODEL_VIRTUAL_HOSTED_STYLE", + ValidateFunc: verify.ValidateEnum([]string{"REQUEST_MODEL_VIRTUAL_HOSTED_STYLE", "REQUEST_MODEL_UNSPECIFIED", "REQUEST_MODEL_PATH_STYLE"}), + Description: `API request model used to call the storage service. When not specified, the default value of RequestModel REQUEST_MODEL_VIRTUAL_HOSTED_STYLE is used.`, + }, + "protocol": { + Type: schema.TypeString, + Optional: true, + Default: "NETWORK_PROTOCOL_HTTPS", + ValidateFunc: verify.ValidateEnum([]string{"NETWORK_PROTOCOL_UNSPECIFIED", "NETWORK_PROTOCOL_HTTPS", "NETWORK_PROTOCOL_HTTP"}), + Description: `The network protocol of the agent. When not specified, the default value of NetworkProtocol NETWORK_PROTOCOL_HTTPS is used.`, + }, + "list_api": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: verify.ValidateEnum([]string{"LIST_API_UNSPECIFIED", "LIST_OBJECTS_V2", "LIST_OBJECTS"}), + Description: `The Listing API to use for discovering objects. When not specified, Transfer Service will attempt to determine the right API to use.`, + }, + }, + }, + }, + }, + } +} + func diffSuppressEmptyStartTimeOfDay(k, old, new string, d *schema.ResourceData) bool { return k == "schedule.0.start_time_of_day.#" && old == "1" && new == "0" } @@ -1501,6 +1573,73 @@ func flattenHdfsData(hdfsData *storagetransfer.HdfsData) []map[string]interface{ return []map[string]interface{}{data} } +func expandAwsS3CompatibleData(awsS3CompatibleDataSchema []interface{}) *storagetransfer.AwsS3CompatibleData { + if len(awsS3CompatibleDataSchema) == 0 || awsS3CompatibleDataSchema[0] == nil { + return nil + } + + awsS3CompatibleData := awsS3CompatibleDataSchema[0].(map[string]interface{}) + result := &storagetransfer.AwsS3CompatibleData{ + BucketName: awsS3CompatibleData["bucket_name"].(string), + Path: awsS3CompatibleData["path"].(string), + Endpoint: awsS3CompatibleData["endpoint"].(string), + Region: awsS3CompatibleData["region"].(string), + } + + if v, ok := awsS3CompatibleData["s3_metadata"].([]interface{}); ok { + result.S3Metadata = expandS3Metadata(v) + } + return result +} + +func expandS3Metadata(s3Metadata []interface{}) *storagetransfer.S3CompatibleMetadata { + if len(s3Metadata) == 0 || s3Metadata[0] == nil { + return nil + } + metadata := s3Metadata[0].(map[string]interface{}) + data := &storagetransfer.S3CompatibleMetadata{ + AuthMethod: metadata["auth_method"].(string), + ListApi: metadata["list_api"].(string), + RequestModel: metadata["request_model"].(string), + Protocol: metadata["protocol"].(string), + } + return data +} + +func flattenAwsS3CompatibleData(awsS3CompatibleData *storagetransfer.AwsS3CompatibleData, d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "bucket_name": awsS3CompatibleData.BucketName, + "path": awsS3CompatibleData.Path, + "endpoint": awsS3CompatibleData.Endpoint, + "region": awsS3CompatibleData.Region, + } + if awsS3CompatibleData.S3Metadata != nil { + data["s3_metadata"] = flattenS3MetaData(awsS3CompatibleData.S3Metadata, d) + } + + return []map[string]interface{}{data} +} + +func flattenS3MetaData(s3MetaData *storagetransfer.S3CompatibleMetadata, d *schema.ResourceData) []map[string]interface{} { + s3Metadata := map[string]interface{}{ + "protocol": s3MetaData.Protocol, + "request_model": s3MetaData.RequestModel, + } + if d.Get("transfer_spec.0.aws_s3_compatible_data_source.0.s3_metadata.0.auth_method") == "AUTH_METHOD_UNSPECIFIED" { + s3Metadata["auth_method"] = d.Get("transfer_spec.0.aws_s3_compatible_data_source.0.s3_metadata.0.auth_method") + } else { + s3Metadata["auth_method"] = s3MetaData.AuthMethod + } + + if d.Get("transfer_spec.0.aws_s3_compatible_data_source.0.s3_metadata.0.list_api") == "LIST_API_UNSPECIFIED" { + s3Metadata["list_api"] = d.Get("transfer_spec.0.aws_s3_compatible_data_source.0.s3_metadata.0.list_api") + } else { + s3Metadata["list_api"] = s3MetaData.ListApi + } + + return []map[string]interface{}{s3Metadata} +} + func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.AzureCredentials { if len(azureCredentials) == 0 || azureCredentials[0] == nil { return nil @@ -1665,6 +1804,7 @@ func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferS AzureBlobStorageDataSource: expandAzureBlobStorageData(transferSpec["azure_blob_storage_data_source"].([]interface{})), PosixDataSource: expandPosixData(transferSpec["posix_data_source"].([]interface{})), HdfsDataSource: expandHdfsData(transferSpec["hdfs_data_source"].([]interface{})), + AwsS3CompatibleDataSource: expandAwsS3CompatibleData(transferSpec["aws_s3_compatible_data_source"].([]interface{})), } } @@ -1705,6 +1845,8 @@ func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.R data["posix_data_source"] = flattenPosixData(transferSpec.PosixDataSource) } else if transferSpec.HdfsDataSource != nil { data["hdfs_data_source"] = flattenHdfsData(transferSpec.HdfsDataSource) + } else if transferSpec.AwsS3CompatibleDataSource != nil { + data["aws_s3_compatible_data_source"] = flattenAwsS3CompatibleData(transferSpec.AwsS3CompatibleDataSource, d) } return []map[string]interface{}{data} diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml index 8d14a6504710..b8ebde1201b0 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_meta.yaml @@ -57,6 +57,14 @@ fields: - field: 'transfer_spec.azure_blob_storage_data_source.federated_identity_config.tenant_id' - field: 'transfer_spec.azure_blob_storage_data_source.path' - field: 'transfer_spec.azure_blob_storage_data_source.storage_account' + - field: 'transfer_spec.aws_s3_compatible_data_source.bucket_name' + - field: 'transfer_spec.aws_s3_compatible_data_source.path' + - field: 'transfer_spec.aws_s3_compatible_data_source.region' + - field: 'transfer_spec.aws_s3_compatible_data_source.endpoint' + - field: 'transfer_spec.aws_s3_compatible_data_source.s3_metadata.request_model' + - field: 'transfer_spec.aws_s3_compatible_data_source.s3_metadata.protocol' + - field: 'transfer_spec.aws_s3_compatible_data_source.s3_metadata.list_api' + - field: 'transfer_spec.aws_s3_compatible_data_source.s3_metadata.auth_method' - field: 'transfer_spec.gcs_data_sink.bucket_name' - field: 'transfer_spec.gcs_data_sink.path' - field: 'transfer_spec.gcs_data_source.bucket_name' diff --git a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go index 3dc2058b0e31..8a3c2a7091b0 100644 --- a/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go +++ b/mmv1/third_party/terraform/services/storagetransfer/resource_storage_transfer_job_test.go @@ -624,13 +624,11 @@ func TestAccStorageTransferJob_withServiceAccount(t *testing.T) { }) } -func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { +func TestAccStorageTransferJob_awsS3CompatibleDataSource(t *testing.T) { t.Parallel() testDataSourceBucketName := acctest.RandString(t, 10) testDataSinkName := acctest.RandString(t, 10) - testTransferJobDescription := acctest.RandString(t, 10) - testTransferJobName := fmt.Sprintf("tf-test-transfer-job-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, @@ -638,13 +636,20 @@ func TestAccStorageTransferJob_transferUpdateToEmptyString(t *testing.T) { CheckDestroy: testAccStorageTransferJobDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), + Config: testAccStorageTransferJob_awsS3CompatibleDataSource(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName), }, { - Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, ""), + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, }, { - Config: testAccStorageTransferJob_transferJobGcsPath(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName, testTransferJobDescription, testTransferJobName, "bar/"), + Config: testAccStorageTransferJob_updateAwsS3CompatibleDataSource(envvar.GetTestProjectFromEnv(), testDataSourceBucketName, testDataSinkName), + }, + { + ResourceName: "google_storage_transfer_job.transfer_job", + ImportState: true, + ImportStateVerify: true, }, }, }) @@ -988,7 +993,7 @@ resource "google_storage_transfer_job" "transfer_job" { } repeat_interval = "604800s" } - + logging_config { log_actions = [ "COPY", @@ -1163,7 +1168,7 @@ resource "google_storage_transfer_job" "transfer_job" { } repeat_interval = "604800s" } - + logging_config { log_actions = [ "COPY", @@ -1189,7 +1194,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN data "google_storage_transfer_project_service_account" "default" { project = "%s" } - + resource "google_storage_bucket" "data_source" { name = "%s" project = "%s" @@ -1197,13 +1202,13 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_source" { bucket = google_storage_bucket.data_source.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_bucket" "data_sink" { name = "%s" project = "%s" @@ -1211,18 +1216,18 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN force_destroy = true uniform_bucket_level_access = true } - + resource "google_storage_bucket_iam_member" "data_sink" { bucket = google_storage_bucket.data_sink.name role = "roles/storage.admin" member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" } - + resource "google_storage_transfer_job" "transfer_job" { name = "transferJobs/%s" description = "%s" project = "%s" - + transfer_spec { gcs_data_source { bucket_name = google_storage_bucket.data_source.name @@ -1233,7 +1238,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN path = "foo/bar/" } } - + schedule { schedule_start_date { year = 2018 @@ -1253,7 +1258,7 @@ func testAccStorageTransferJob_transferJobName(project string, dataSourceBucketN } repeat_interval = "604800s" } - + depends_on = [ google_storage_bucket_iam_member.data_source, google_storage_bucket_iam_member.data_sink, @@ -1438,7 +1443,7 @@ resource "google_storage_transfer_job" "transfer_job" { path = "foo/bar/" } } - + logging_config { enable_on_prem_gcs_transfer_logs = true } @@ -1573,7 +1578,7 @@ resource "google_storage_transfer_job" "transfer_job" { bucket_name = google_storage_bucket.data_source.name } } - + logging_config { enable_on_prem_gcs_transfer_logs = false } @@ -2206,7 +2211,7 @@ resource "google_storage_transfer_job" "transfer_job" { last_modified_since = "2020-01-01T00:00:00Z" last_modified_before = "2020-01-01T00:00:00Z" exclude_prefixes = [ - "a/b/c", + "a/b/c", ] include_prefixes = [ "a/b" @@ -2292,7 +2297,7 @@ resource "google_storage_transfer_job" "transfer_job" { last_modified_since = "2020-01-01T00:00:00Z" last_modified_before = "2020-01-01T00:00:00Z" exclude_prefixes = [ - "a/b/c", + "a/b/c", ] include_prefixes = [ "a/b" @@ -2477,82 +2482,141 @@ resource "google_storage_transfer_job" "transfer_job" { `, project, dataSourceBucketName, project, dataSinkBucketName, project, transferJobDescription, project) } -func testAccStorageTransferJob_transferJobGcsPath(project string, dataSourceBucketName string, dataSinkBucketName string, transferJobDescription string, testTransferJobName string, gcsPath string) string { +func testAccStorageTransferJob_awsS3CompatibleDataSource(project string, dataSourceBucketName string, dataSinkBucketName string) string { return fmt.Sprintf(` - data "google_storage_transfer_project_service_account" "default" { - project = "%s" - } +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} - resource "google_storage_bucket" "data_source" { - name = "%s" - project = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true - } +resource "google_storage_bucket" "data_sink" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true +} - resource "google_storage_bucket_iam_member" "data_source" { - bucket = google_storage_bucket.data_source.name - role = "roles/storage.admin" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" - } +resource "google_storage_bucket_iam_member" "data_sink" { + bucket = google_storage_bucket.data_sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} - resource "google_storage_bucket" "data_sink" { - name = "%s" - project = "%s" - location = "US" - force_destroy = true - uniform_bucket_level_access = true +resource "google_storage_transfer_job" "transfer_job" { + description = "A test job using an aws s3 compatible data source" + project = "%s" + + transfer_spec { + aws_s3_compatible_data_source { + bucket_name = "%s" + path = "foo/bar/" + endpoint = "https://sample.r2.cloudflarestorage.com/" + region = "enam" + s3_metadata { + auth_method = "AUTH_METHOD_AWS_SIGNATURE_V4" + list_api = "LIST_OBJECTS_V2" + protocol = "NETWORK_PROTOCOL_HTTP" + request_model = "REQUEST_MODEL_PATH_STYLE" + } + } + gcs_data_sink { + bucket_name = google_storage_bucket.data_sink.name + path = "foo/bar/" + } } - resource "google_storage_bucket_iam_member" "data_sink" { - bucket = google_storage_bucket.data_sink.name - role = "roles/storage.admin" - member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 10 + day = 1 + } + start_time_of_day { + hours = 0 + minutes = 30 + seconds = 0 + nanos = 0 + } + repeat_interval = "604800s" } - resource "google_storage_transfer_job" "transfer_job" { - name = "transferJobs/%s" - description = "%s" - project = "%s" + depends_on = [ + google_storage_bucket_iam_member.data_sink, + ] +} +`, project, dataSinkBucketName, project, project, dataSourceBucketName) +} - transfer_spec { - gcs_data_source { - bucket_name = google_storage_bucket.data_source.name - path = "foo/" - } - gcs_data_sink { - bucket_name = google_storage_bucket.data_sink.name - path = "%s" - } - } +func testAccStorageTransferJob_updateAwsS3CompatibleDataSource(project string, dataSourceBucketName string, dataSinkBucketName string) string { + return fmt.Sprintf(` +data "google_storage_transfer_project_service_account" "default" { + project = "%s" +} - schedule { - schedule_start_date { - year = 2018 - month = 10 - day = 1 - } - schedule_end_date { - year = 2019 - month = 10 - day = 1 - } - start_time_of_day { - hours = 0 - minutes = 30 - seconds = 0 - nanos = 0 +resource "google_storage_bucket" "data_sink" { + name = "%s" + project = "%s" + location = "US" + force_destroy = true +} + +resource "google_storage_bucket_iam_member" "data_sink" { + bucket = google_storage_bucket.data_sink.name + role = "roles/storage.admin" + member = "serviceAccount:${data.google_storage_transfer_project_service_account.default.email}" +} + +resource "google_storage_transfer_job" "transfer_job" { + description = "A test job using an aws s3 compatible data source" + project = "%s" + + transfer_spec { + aws_s3_compatible_data_source { + bucket_name = "%s" + path = "foo/bar/foo/" + endpoint = "https://sample-update.r2.cloudflarestorage.com/" + s3_metadata { + auth_method = "AUTH_METHOD_AWS_SIGNATURE_V2" + list_api = "LIST_OBJECTS" + protocol = "NETWORK_PROTOCOL_HTTPS" + request_model = "REQUEST_MODEL_VIRTUAL_HOSTED_STYLE" } - repeat_interval = "604800s" } + gcs_data_sink { + bucket_name = google_storage_bucket.data_sink.name + path = "foo/bar/" + } + } - depends_on = [ - google_storage_bucket_iam_member.data_source, - google_storage_bucket_iam_member.data_sink, - ] + schedule { + schedule_start_date { + year = 2018 + month = 10 + day = 1 + } + schedule_end_date { + year = 2019 + month = 10 + day = 1 + } + start_time_of_day { + hours = 0 + minutes = 30 + seconds = 0 + nanos = 0 + } + repeat_interval = "604800s" } - `, project, dataSourceBucketName, project, dataSinkBucketName, project, testTransferJobName, transferJobDescription, project, gcsPath) + + depends_on = [ + google_storage_bucket_iam_member.data_sink, + ] +} +`, project, dataSinkBucketName, project, project, dataSourceBucketName) } func testAccStorageTransferJob_withServiceAccount(description, dataSourceBucketName, dataSinkBucketName, serviceAccountId, project string) string { diff --git a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown index 65bd53d0a055..cea989eb0021 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_transfer_job.html.markdown @@ -174,6 +174,8 @@ The following arguments are supported: * `hdfs_data_source` - (Optional) An HDFS data source. Structure [documented below](#nested_hdfs_data_source). +* `aws_s3_compatible_data_source` - (Optional) An AWS S3 Compatible data source. Structure [documented below](#nested_aws_s3_compatible_data_source). + The `replication_spec` block supports: * `gcs_data_sink` - (Optional) A Google Cloud Storage data sink. Structure [documented below](#nested_gcs_data_sink). @@ -252,6 +254,29 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam * `path` - (Required) Root directory path to the filesystem. +The `aws_s3_compatible_data_source` block supports: + +* `bucket_name` - (Required) Name of the bucket. + +* `path` - (Optional) Specifies the path to transfer objects. + +* `endpoint` - (Required) Endpoint of the storage service. + +* `region` - (Optional) Specifies the region to sign requests with. This can be left blank if requests should be signed with an empty region. + +* `s3_metadata` - (Optional) S3 compatible metadata. [documented below](#nested_s3_metadata). + +The `s3_metadata` block supports: + +* `auth_method` - (Optional) Authentication and authorization method used by the storage service. When not specified, Transfer Service will attempt to determine right auth method to use. + +* `request_model` - (Optional) API request model used to call the storage service. When not specified, the default value of RequestModel REQUEST_MODEL_VIRTUAL_HOSTED_STYLE is used. + +* `protocol` - (Optional) The network protocol of the agent. When not specified, the default value of NetworkProtocol NETWORK_PROTOCOL_HTTPS is used. + +* `list_api` - (Optional) The Listing API to use for discovering objects. When not specified, Transfer Service will attempt to determine the right API to use. + + The `aws_s3_data_source` block supports: * `bucket_name` - (Required) S3 Bucket name. From f5e24c444fcd78be0e9ef6ac2fa06061869c7334 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Tue, 2 Sep 2025 12:50:40 -0500 Subject: [PATCH 119/201] Correct documentation for disable_on_destroy default breaking change (#15014) --- .../docs/guides/version_7_upgrade.html.markdown | 15 ++++++++++++--- .../docs/r/google_project_service.html.markdown | 7 +++---- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown index e19675a49741..925ef3d5884d 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_7_upgrade.html.markdown @@ -241,11 +241,20 @@ This resource is not functional and can safely be removed from your configuratio ## Resource: `google_project_service` -### `disable_on_destroy` now defaults to `false` +### `disable_on_destroy` no longer defaults to `true` -The default value for `disable_on_destroy` has been changed to `false`. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. +The default value for `disable_on_destroy` has been removed. The previous default (`true`) created a risk of unintended service disruptions, as destroying a single `google_project_service` resource would disable the API for the entire project. -Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. To disable a service when the resource is destroyed, you must now make an explicit decision by setting `disable_on_destroy = true`. +Now, destroying the resource will only remove it from Terraform's state and leave the service enabled. For resources that did not explicitly have `disable_on_destroy` set, users will see a diff similar to the following: + +```hcl + ~ resource "google_project_service" "main" { + - disable_on_destroy = true -> null +``` + +Applying this change is the same as setting the value to `false`. Terraform will not make any additional API requests or change the project service enablement. It will only affect any future removal of this resource. + +To enable the previous default behavior, you must now make an explicit decision by setting `disable_on_destroy = true`. ## Resource: `google_redis_cluster` diff --git a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown index f5a53f913637..c14565cf3e09 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_project_service.html.markdown @@ -45,9 +45,9 @@ The following arguments are supported: * `project` - (Optional) The project ID. If not provided, the provider project is used. -* `disable_on_destroy` - (Optional) If `true` or unset, disable the service when the -Terraform resource is destroyed. If `false`, the service will be left enabled when -the Terraform resource is destroyed. Defaults to `false`. It should generally only +* `disable_on_destroy` - (Optional) If `true`, disable the service when the +Terraform resource is destroyed. If `false` or unset, the service will be left enabled when +the Terraform resource is destroyed. It should generally only be `true` or unset in configurations that manage the `google_project` resource itself. * `disable_dependent_services` - (Optional) If `true`, services that are enabled @@ -59,7 +59,6 @@ services depend on this service when attempting to destroy it. [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) If `true`, the usage of the service to be disabled will be checked and an error will be returned if the service to be disabled has usage in last 30 days. -Defaults to `false`. ## Attributes Reference From 2dab0114b8902dc04187886335d81ca272bd9ea1 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Tue, 2 Sep 2025 11:10:52 -0700 Subject: [PATCH 120/201] Bump UA to dev7 (#15049) --- mmv1/third_party/terraform/version/version.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/version/version.go b/mmv1/third_party/terraform/version/version.go index f21ad9d7203c..051ad6bdf6a4 100644 --- a/mmv1/third_party/terraform/version/version.go +++ b/mmv1/third_party/terraform/version/version.go @@ -2,5 +2,5 @@ package version var ( // ProviderVersion is set during the release process to the release version of the binary - ProviderVersion = "dev6" + ProviderVersion = "dev7" ) From 78ad34e2dbe6679c108ab9cf33de9bf0d08f96c8 Mon Sep 17 00:00:00 2001 From: Abhijeet Dargude <144316709+dargudear-google@users.noreply.github.com> Date: Tue, 2 Sep 2025 23:47:50 +0530 Subject: [PATCH 121/201] (GA) support for SM-GKE Auto rotation (#15040) --- .../services/container/resource_container_cluster.go.tmpl | 6 ------ .../container/resource_container_cluster_test.go.tmpl | 6 ------ .../website/docs/r/container_cluster.html.markdown | 2 +- 3 files changed, 1 insertion(+), 13 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index 16456ccafaf0..f08838fa09b0 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -1678,7 +1678,6 @@ func ResourceContainerCluster() *schema.Resource { Required: true, Description: `Enable the Secret manager csi component.`, }, - {{- if ne $.TargetVersionName "ga" }} "rotation_config" : { Type: schema.TypeList, Optional: true, @@ -1701,7 +1700,6 @@ func ResourceContainerCluster() *schema.Resource { }, }, }, - {{- end }} }, }, }, @@ -6408,7 +6406,6 @@ func expandSecretManagerConfig(configured interface{}) *container.SecretManagerC Enabled: config["enabled"].(bool), ForceSendFields: []string{"Enabled"}, } - {{- if ne $.TargetVersionName "ga" }} if autoRotation, ok := config["rotation_config"]; ok { if autoRotationList, ok := autoRotation.([]interface{}); ok { if len(autoRotationList) > 0 { @@ -6426,7 +6423,6 @@ func expandSecretManagerConfig(configured interface{}) *container.SecretManagerC } } } - {{- end }} return sc } @@ -7485,7 +7481,6 @@ func flattenSecretManagerConfig(c *container.SecretManagerConfig) []map[string]i result["enabled"] = c.Enabled - {{- if ne $.TargetVersionName "ga" }} rotationList := []map[string]interface{}{} if c.RotationConfig != nil { rotationConfigMap := map[string]interface{}{ @@ -7497,7 +7492,6 @@ func flattenSecretManagerConfig(c *container.SecretManagerConfig) []map[string]i rotationList = append(rotationList, rotationConfigMap) } result["rotation_config"] = rotationList - {{- end }} return []map[string]interface{}{result} } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index be38defe183f..30050a4ebba1 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -11713,12 +11713,10 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 secret_manager_config { enabled = true -{{- if ne $.TargetVersionName "ga" }} rotation_config { enabled = true rotation_interval = "300s" } -{{- end }} } deletion_protection = false network = "%s" @@ -11741,12 +11739,10 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 secret_manager_config { enabled = true -{{- if ne $.TargetVersionName "ga" }} rotation_config { enabled = true rotation_interval = "120s" } -{{- end }} } deletion_protection = false network = "%s" @@ -11769,12 +11765,10 @@ resource "google_container_cluster" "primary" { initial_node_count = 1 secret_manager_config { enabled = true -{{- if ne $.TargetVersionName "ga" }} rotation_config { enabled = false rotation_interval = "120s" } -{{- end }} } deletion_protection = false network = "%s" diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index bd0d63961dac..d9f74bc0875e 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1285,7 +1285,7 @@ notification_config { The `secret_manager_config` block supports: * `enabled` (Required) - Enable the Secret Manager add-on for this cluster. -* `rotation_config` (Optional, Beta) - config for secret manager auto rotation. Structure is [docuemented below](#rotation_config) +* `rotation_config` (Optional) - config for secret manager auto rotation. Structure is [docuemented below](#rotation_config) The `rotation_config` block supports: From 9fd482b38f178f15560eab4c6acc1b99b3bb944b Mon Sep 17 00:00:00 2001 From: maayanbeltzer Date: Tue, 2 Sep 2025 18:32:10 +0000 Subject: [PATCH 122/201] VPCFlowLogs - Add Organization Support (#14885) --- .../OrganizationVpcFlowLogsConfig.yaml | 153 ++++++++++++++++++ ...ent_org_vpc_flow_logs_config_basic.tf.tmpl | 7 + ...ment_org_vpc_flow_logs_config_test.go.tmpl | 75 +++++++++ 3 files changed, 235 insertions(+) create mode 100644 mmv1/products/networkmanagement/OrganizationVpcFlowLogsConfig.yaml create mode 100644 mmv1/templates/terraform/examples/network_management_org_vpc_flow_logs_config_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/networkmanagement/resource_network_management_org_vpc_flow_logs_config_test.go.tmpl diff --git a/mmv1/products/networkmanagement/OrganizationVpcFlowLogsConfig.yaml b/mmv1/products/networkmanagement/OrganizationVpcFlowLogsConfig.yaml new file mode 100644 index 000000000000..b362bd376827 --- /dev/null +++ b/mmv1/products/networkmanagement/OrganizationVpcFlowLogsConfig.yaml @@ -0,0 +1,153 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'OrganizationVpcFlowLogsConfig' +description: + VPC Flow Logs Config is a resource that lets you configure + Flow Logs for Organization. +min_version: beta +id_format: 'organizations/{{organization}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' +base_url: 'organizations/{{organization}}/locations/{{location}}/vpcFlowLogsConfigs' +self_link: 'organizations/{{organization}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' +create_url: 'organizations/{{organization}}/locations/{{location}}/vpcFlowLogsConfigs?vpcFlowLogsConfigId={{vpc_flow_logs_config_id}}' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'organizations/{{organization}}/locations/{{location}}/vpcFlowLogsConfigs/{{vpc_flow_logs_config_id}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: true +async: + actions: ['create', 'delete', 'update'] + type: 'OpAsync' + operation: + base_url: '{{op_id}}' + result: + resource_inside_response: true + include_project: true +custom_code: +sweeper: + url_substitutions: + - parent: "organizations/${ORG_ID}" + - region: "global" +examples: + - name: 'network_management_org_vpc_flow_logs_config_basic' + primary_resource_id: 'org-test' + min_version: 'beta' + vars: + vpc_flow_logs_config_id: 'basic-org-test-id' + test_env_vars: + org_id: 'ORG_ID' +parameters: + - name: 'organization' + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: 'location' + type: String + description: | + Resource ID segment making up resource `name`. It identifies the resource + within its parent collection as described in https://google.aip.dev/122. See documentation + for resource type `networkmanagement.googleapis.com/VpcFlowLogsConfig`. + url_param_only: true + required: true + immutable: true + - name: 'vpcFlowLogsConfigId' + type: String + description: | + Required. ID of the `VpcFlowLogsConfig`. + url_param_only: true + required: true + immutable: true +properties: + - name: 'name' + type: String + description: | + Identifier. Unique name of the configuration using the form: `organizations/{org_id}/locations/global/vpcFlowLogsConfigs/{vpc_flow_logs_config_id}` + output: true + - name: 'description' + type: String + description: | + Optional. The user-supplied description of the VPC Flow Logs configuration. Maximum + of 512 characters. + - name: 'state' + type: String + default_from_api: true + description: | + Optional. The state of the VPC Flow Log configuration. Default value + is ENABLED. When creating a new configuration, it must be enabled. + Possible values: ENABLED DISABLED + - name: 'aggregationInterval' + type: String + default_from_api: true + description: | + Optional. The aggregation interval for the logs. Default value is + INTERVAL_5_SEC. Possible values: INTERVAL_5_SEC INTERVAL_30_SEC INTERVAL_1_MIN INTERVAL_5_MIN INTERVAL_10_MIN INTERVAL_15_MIN + - name: 'flowSampling' + type: Double + default_from_api: true + description: | + Optional. The value of the field must be in (0, 1]. The sampling rate + of VPC Flow Logs where 1.0 means all collected logs are reported. Setting the + sampling rate to 0.0 is not allowed. If you want to disable VPC Flow Logs, use + the state field instead. Default value is 1.0. + - name: 'metadata' + type: String + default_from_api: true + description: | + Optional. Configures whether all, none or a subset of metadata fields + should be added to the reported VPC flow logs. Default value is INCLUDE_ALL_METADATA. + Possible values: METADATA_UNSPECIFIED INCLUDE_ALL_METADATA EXCLUDE_ALL_METADATA CUSTOM_METADATA + - name: 'metadataFields' + type: Array + description: | + Optional. Custom metadata fields to include in the reported VPC flow + logs. Can only be specified if \"metadata\" was set to CUSTOM_METADATA. + item_type: + type: String + - name: 'filterExpr' + type: String + description: | + Optional. Export filter used to define which VPC Flow Logs should be logged. + - name: 'labels' + type: KeyValueLabels + description: | + Optional. Resource labels to represent the user-provided metadata. + - name: 'createTime' + type: String + description: | + Output only. The time the config was created. + output: true + - name: 'updateTime' + type: String + description: | + Output only. The time the config was updated. + output: true + - name: 'crossProjectMetadata' + type: Enum + default_from_api: true + description: |- + Determines whether to include cross project annotations in the logs. + This field is available only for organization configurations. If not + specified in org configs will be set to CROSS_PROJECT_METADATA_ENABLED. + Possible values: + CROSS_PROJECT_METADATA_ENABLED + CROSS_PROJECT_METADATA_DISABLED + enum_values: + - CROSS_PROJECT_METADATA_ENABLED + - CROSS_PROJECT_METADATA_DISABLED diff --git a/mmv1/templates/terraform/examples/network_management_org_vpc_flow_logs_config_basic.tf.tmpl b/mmv1/templates/terraform/examples/network_management_org_vpc_flow_logs_config_basic.tf.tmpl new file mode 100644 index 000000000000..bee8f66f1be9 --- /dev/null +++ b/mmv1/templates/terraform/examples/network_management_org_vpc_flow_logs_config_basic.tf.tmpl @@ -0,0 +1,7 @@ +resource "google_network_management_organization_vpc_flow_logs_config" "{{$.PrimaryResourceId}}" { + provider = google-beta + vpc_flow_logs_config_id = "{{index $.Vars "vpc_flow_logs_config_id"}}" + location = "global" + organization = "{{index $.TestEnvVars "org_id"}}" +} + diff --git a/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_org_vpc_flow_logs_config_test.go.tmpl b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_org_vpc_flow_logs_config_test.go.tmpl new file mode 100644 index 000000000000..3a85ef174fdf --- /dev/null +++ b/mmv1/third_party/terraform/services/networkmanagement/resource_network_management_org_vpc_flow_logs_config_test.go.tmpl @@ -0,0 +1,75 @@ +package networkmanagement_test + +{{ if ne $.TargetVersionName "ga" -}} +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccNetworkManagementOrganizationVpcFlowLogsConfig_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetworkManagementVpcFlowLogsConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkManagementOrganizationVpcFlowLogsConfig_basic(context), + }, + { + ResourceName: "google_network_management_organization_vpc_flow_logs_config.org-test-update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"organization", "location", "vpc_flow_logs_config_id"}, + }, + { + Config: testAccNetworkManagementOrganizationVpcFlowLogsConfig_update(context), + }, + { + ResourceName: "google_network_management_organization_vpc_flow_logs_config.org-test-update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"organization", "location", "vpc_flow_logs_config_id"}, + }, + }, + }) +} + +func testAccNetworkManagementOrganizationVpcFlowLogsConfig_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_network_management_organization_vpc_flow_logs_config" "org-test-update" { + provider = google-beta + vpc_flow_logs_config_id = "tf-test-update-org-id-%{random_suffix}" + organization = "%{org_id}" + location = "global" +} +`, context) +} + +func testAccNetworkManagementOrganizationVpcFlowLogsConfig_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_network_management_organization_vpc_flow_logs_config" "org-test-update" { + provider = google-beta + vpc_flow_logs_config_id = "tf-test-update-org-id-%{random_suffix}" + organization = "%{org_id}" + location = "global" + + state = "DISABLED" + aggregation_interval = "INTERVAL_30_SEC" + description = "This is an updated description" + flow_sampling = 0.5 + metadata = "EXCLUDE_ALL_METADATA" + cross_project_metadata = "CROSS_PROJECT_METADATA_DISABLED" +} +`, context) +} +{{ end }} From cc70cc8f8613b9382904844bf56032345f83cece Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Tue, 2 Sep 2025 22:10:51 +0200 Subject: [PATCH 123/201] osconfig: fix permadiff where `patch_config.yum.minimal` doesn't send `false` for empty values (#15046) --- mmv1/products/osconfig/PatchDeployment.yaml | 1 + .../os_config_patch_deployment.go.tmpl | 8 +++ ...esource_os_config_patch_deployment_test.go | 58 +++++++++++++++++++ 3 files changed, 67 insertions(+) create mode 100644 mmv1/third_party/terraform/services/osconfig/resource_os_config_patch_deployment_test.go diff --git a/mmv1/products/osconfig/PatchDeployment.yaml b/mmv1/products/osconfig/PatchDeployment.yaml index 607bd9f4dca4..03c2ecac01ac 100644 --- a/mmv1/products/osconfig/PatchDeployment.yaml +++ b/mmv1/products/osconfig/PatchDeployment.yaml @@ -254,6 +254,7 @@ properties: - 'patch_config.0.yum.0.excludes' - 'patch_config.0.yum.0.exclusive_packages' - name: 'minimal' + send_empty_value: true type: Boolean description: | Will cause patch to run yum update-minimal instead. diff --git a/mmv1/templates/terraform/decoders/os_config_patch_deployment.go.tmpl b/mmv1/templates/terraform/decoders/os_config_patch_deployment.go.tmpl index fce9e9366fc3..13bce0526feb 100644 --- a/mmv1/templates/terraform/decoders/os_config_patch_deployment.go.tmpl +++ b/mmv1/templates/terraform/decoders/os_config_patch_deployment.go.tmpl @@ -4,6 +4,14 @@ if res["patchConfig"] != nil { patchConfig["goo"].(map[string]interface{})["enabled"] = true res["patchConfig"] = patchConfig } + + if patchConfig["yum"] != nil { + patchConfigYum := patchConfig["yum"].(map[string]interface{}) + if _, ok := patchConfigYum["minimal"]; !ok { + patchConfigYum["minimal"] = false + } + patchConfig["yum"] = patchConfigYum + } } return res, nil diff --git a/mmv1/third_party/terraform/services/osconfig/resource_os_config_patch_deployment_test.go b/mmv1/third_party/terraform/services/osconfig/resource_os_config_patch_deployment_test.go new file mode 100644 index 000000000000..3b2e93158de8 --- /dev/null +++ b/mmv1/third_party/terraform/services/osconfig/resource_os_config_patch_deployment_test.go @@ -0,0 +1,58 @@ +package osconfig_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccOSConfigPatchDeployment_osConfigPatchDeployment_yum_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckOSConfigPatchDeploymentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccOSConfigPatchDeployment_osConfigPatchDeployment_yum_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_os_config_patch_deployment.patch", "patch_config.0.yum.0.minimal", "false"), + ), + }, + { + ResourceName: "google_os_config_patch_deployment.patch", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"patch_deployment_id"}, + }, + }, + }) +} + +func testAccOSConfigPatchDeployment_osConfigPatchDeployment_yum_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_os_config_patch_deployment" "patch" { + patch_deployment_id = "tf-test-patch-deploy%{random_suffix}" + + instance_filter { + all = true + } + + patch_config { + yum { + minimal = false + } + } + + one_time_schedule { + execute_time = "2999-10-10T10:10:10.045123456Z" + } +} +`, context) +} From afd50116b0d8d000c6616cbd8471cd581f4deefa Mon Sep 17 00:00:00 2001 From: "copybara-service[bot]" <56741989+copybara-service[bot]@users.noreply.github.com> Date: Tue, 2 Sep 2025 22:16:00 +0000 Subject: [PATCH 124/201] Migrate from gsutil to gcloud storage (#15053) Co-authored-by: Modular Magician --- .../examples/cloudbuild_trigger_build.tf.tmpl | 4 +-- .../resource_cloudbuild_trigger_test.go | 36 +++++++++---------- .../guides/version_2_upgrade.html.markdown | 4 +-- 3 files changed, 22 insertions(+), 22 deletions(-) diff --git a/mmv1/templates/terraform/examples/cloudbuild_trigger_build.tf.tmpl b/mmv1/templates/terraform/examples/cloudbuild_trigger_build.tf.tmpl index 735b33a71ae5..665147f8ee79 100644 --- a/mmv1/templates/terraform/examples/cloudbuild_trigger_build.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudbuild_trigger_build.tf.tmpl @@ -9,8 +9,8 @@ resource "google_cloudbuild_trigger" "{{$.PrimaryResourceId}}" { build { step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "120s" secret_env = ["MY_SECRET"] } diff --git a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_trigger_test.go b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_trigger_test.go index 2e90d9a0fd0e..38134de03ea8 100644 --- a/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_trigger_test.go +++ b/mmv1/third_party/terraform/services/cloudbuild/resource_cloudbuild_trigger_test.go @@ -245,8 +245,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "1800s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "300s" } step { @@ -323,8 +323,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { images = ["gcr.io/$PROJECT_ID/$REPO_NAME:$COMMIT_SHA"] tags = ["team-a", "service-b"] step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] } step { name = "gcr.io/cloud-builders/go" @@ -382,8 +382,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b", "updated"] timeout = "2100s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile-updated.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile-updated.zip"] timeout = "300s" } step { @@ -424,8 +424,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "1800s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "300s" } available_secrets { @@ -452,8 +452,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "1800s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "300s" } } @@ -477,8 +477,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "1800s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "300s" } } @@ -505,8 +505,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "1800s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "300s" } } @@ -661,8 +661,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "900s" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "500s" } step { @@ -695,8 +695,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b"] timeout = "1200" step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile.zip"] timeout = "500s" } } diff --git a/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown index f7a3fafb7a67..9b80b31083d1 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown @@ -300,8 +300,8 @@ resource "google_cloudbuild_trigger" "build_trigger" { tags = ["team-a", "service-b", "updated"] step { - name = "gcr.io/cloud-builders/gsutil" - args = ["cp", "gs://mybucket/remotefile.zip", "localfile-updated.zip"] + name = "gcr.io/cloud-builders/gcloud" + args = ["storage", "cp", "gs://mybucket/remotefile.zip", "localfile-updated.zip"] } step { From 58550b1c4dde492e4bea6c4e20551d3d022ffdec Mon Sep 17 00:00:00 2001 From: Ankit Goyal <51757072+ankitgoyal0301@users.noreply.github.com> Date: Wed, 3 Sep 2025 05:11:31 +0530 Subject: [PATCH 125/201] Fixes issue #24052 related to google_chronicle_reference_list resource (#15036) --- mmv1/products/chronicle/ReferenceList.yaml | 4 +- .../chronicle_referencelist_basic.tf.tmpl | 33 +++++++++++---- .../resource_chronicle_reference_list_test.go | 40 ++++++++++++++++++- 3 files changed, 65 insertions(+), 12 deletions(-) diff --git a/mmv1/products/chronicle/ReferenceList.yaml b/mmv1/products/chronicle/ReferenceList.yaml index dc9df28c2745..b84aa401556b 100644 --- a/mmv1/products/chronicle/ReferenceList.yaml +++ b/mmv1/products/chronicle/ReferenceList.yaml @@ -34,6 +34,7 @@ examples: primary_resource_id: 'example' vars: reference_list_id: reference_list_id + data_access_scope_id: scope-id test_env_vars: chronicle_id: 'CHRONICLE_ID' @@ -92,13 +93,11 @@ properties: required: true - name: scopeInfo type: NestedObject - output: true description: ScopeInfo specifies the scope info of the reference list. properties: - name: referenceListScope type: NestedObject description: ReferenceListScope specifies the list of scope names of the reference list. - required: true properties: - name: scopeNames type: Array @@ -108,6 +107,7 @@ properties: "projects/{project}/locations/{location}/instances/{instance}/dataAccessScopes/{scope_name}". item_type: type: String + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' - name: displayName type: String description: Output only. The unique display name of the reference list. diff --git a/mmv1/templates/terraform/examples/chronicle_referencelist_basic.tf.tmpl b/mmv1/templates/terraform/examples/chronicle_referencelist_basic.tf.tmpl index 025d46cb53e2..0c9e41387f39 100644 --- a/mmv1/templates/terraform/examples/chronicle_referencelist_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/chronicle_referencelist_basic.tf.tmpl @@ -1,10 +1,27 @@ +resource "google_chronicle_data_access_scope" "test_scope" { + location = "us" + instance = "{{index $.TestEnvVars "chronicle_id"}}" + data_access_scope_id = "{{index $.Vars "data_access_scope_id"}}" + description = "test scope description" + allowed_data_access_labels { + log_type = "GCP_CLOUDAUDIT" + } +} + resource "google_chronicle_reference_list" "{{$.PrimaryResourceId}}" { - location = "us" - instance = "{{index $.TestEnvVars "chronicle_id"}}" - reference_list_id = "{{index $.Vars "reference_list_id"}}" - description = "referencelist-description" - entries { - value = "referencelist-entry-value" - } - syntax_type = "REFERENCE_LIST_SYNTAX_TYPE_PLAIN_TEXT_STRING" + location = "us" + instance = "{{index $.TestEnvVars "chronicle_id"}}" + reference_list_id = "{{index $.Vars "reference_list_id"}}" + description = "referencelist-description" + entries { + value = "referencelist-entry-value" + } + syntax_type = "REFERENCE_LIST_SYNTAX_TYPE_PLAIN_TEXT_STRING" + scope_info { + reference_list_scope { + scope_names = [ + google_chronicle_data_access_scope.test_scope.name + ] + } + } } diff --git a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go index fcd5bc9740bf..5d8e0ff11380 100644 --- a/mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go +++ b/mmv1/third_party/terraform/services/chronicle/resource_chronicle_reference_list_test.go @@ -13,8 +13,10 @@ func TestAccChronicleReferenceList_chronicleReferencelistBasicExample_update(t * t.Parallel() context := map[string]interface{}{ - "chronicle_id": envvar.GetTestChronicleInstanceIdFromEnv(t), - "random_suffix": acctest.RandString(t, 10), + "chronicle_id": envvar.GetTestChronicleInstanceIdFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + "data_access_scope_id": "test-scope-id" + acctest.RandString(t, 5), + "data_access_scope_id_new": "new-test-scope-id" + acctest.RandString(t, 5), } acctest.VcrTest(t, resource.TestCase{ @@ -45,6 +47,16 @@ func TestAccChronicleReferenceList_chronicleReferencelistBasicExample_update(t * func testAccChronicleReferenceList_chronicleReferencelistBasicExample_basic(context map[string]interface{}) string { return acctest.Nprintf(` +resource "google_chronicle_data_access_scope" "test_scope" { + location = "us" + instance = "%{chronicle_id}" + data_access_scope_id = "%{data_access_scope_id}" + description = "test scope description" + allowed_data_access_labels { + log_type = "GCP_CLOUDAUDIT" + } +} + resource "google_chronicle_reference_list" "example" { location = "us" instance = "%{chronicle_id}" @@ -54,12 +66,29 @@ resource "google_chronicle_reference_list" "example" { value = "referencelist-entry-value" } syntax_type = "REFERENCE_LIST_SYNTAX_TYPE_PLAIN_TEXT_STRING" + scope_info { + reference_list_scope { + scope_names = [ + google_chronicle_data_access_scope.test_scope.name + ] + } + } } `, context) } func testAccChronicleReferenceList_chronicleReferencelistBasicExample_update(context map[string]interface{}) string { return acctest.Nprintf(` +resource "google_chronicle_data_access_scope" "test_scope" { + location = "us" + instance = "%{chronicle_id}" + data_access_scope_id = "%{data_access_scope_id_new}" + description = "test scope description" + allowed_data_access_labels { + log_type = "GITHUB" + } +} + resource "google_chronicle_reference_list" "example" { location = "us" instance = "%{chronicle_id}" @@ -69,6 +98,13 @@ resource "google_chronicle_reference_list" "example" { value = "referencelist-entry-value-updated" } syntax_type = "REFERENCE_LIST_SYNTAX_TYPE_REGEX" + scope_info { + reference_list_scope { + scope_names = [ + google_chronicle_data_access_scope.test_scope.name + ] + } + } } `, context) } From a80f496000c3bd1afcb7ef5ac795bba033622426 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 3 Sep 2025 09:26:23 -0700 Subject: [PATCH 126/201] Wait for SA creation in bigquery connection tests (#15031) --- .../bigquery/resource_bigquery_table_test.go | 53 ++++++++++++++++--- 1 file changed, 46 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index 08f8863cfff8..4111b60d77e9 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -443,7 +443,10 @@ func TestAccBigQueryBigLakeManagedTable(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigLakeManagedTable(bucketName, connectionID, datasetID, tableID, TEST_SIMPLE_CSV_SCHEMA), @@ -472,10 +475,16 @@ func testAccBigLakeManagedTable(bucketName, connectionID, datasetID, tableID, sc location = "US" cloud_resource {} } + // wait for SA creation + resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_bigquery_connection.test] + } resource "google_project_iam_member" "test" { role = "roles/storage.objectAdmin" project = data.google_project.project.id member = "serviceAccount:${google_bigquery_connection.test.cloud_resource[0].service_account_id}" + depends_on = [time_sleep.wait_120_seconds] } resource "google_bigquery_dataset" "test" { dataset_id = "%s" @@ -987,7 +996,10 @@ func TestAccBigQueryExternalDataTable_queryAcceleration(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigQueryTableFromGCSParquetWithQueryAcceleration(connectionID, datasetID, tableID, bucketName, objectName, metadataCacheMode, maxStaleness), @@ -1043,7 +1055,10 @@ func TestAccBigQueryExternalDataTable_connectionIdDiff_UseNameReference(t *testi acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigQueryTableExternalDataConfigurationConnectionID(location, connectionID, datasetID, tableID, bucketName, objectName, connection_id_reference), @@ -1068,7 +1083,10 @@ func TestAccBigQueryExternalDataTable_connectionIdDiff_UseIdReference(t *testing acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigQueryTableExternalDataConfigurationConnectionID(location, connectionID, datasetID, tableID, bucketName, objectName, connection_id_reference), @@ -1093,7 +1111,10 @@ func TestAccBigQueryExternalDataTable_connectionIdDiff_UseIdReference_UsCentral1 acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigQueryTableExternalDataConfigurationConnectionID(location, connectionID, datasetID, tableID, bucketName, objectName, connection_id_reference), @@ -1118,7 +1139,10 @@ func TestAccBigQueryExternalDataTable_connectionIdDiff_UseIdReference_UsEast1(t acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigQueryTableExternalDataConfigurationConnectionID(location, connectionID, datasetID, tableID, bucketName, objectName, connection_id_reference), @@ -1143,7 +1167,10 @@ func TestAccBigQueryExternalDataTable_connectionIdDiff_UseIdReference_EuropeWest acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccBigQueryTableExternalDataConfigurationConnectionID(location, connectionID, datasetID, tableID, bucketName, objectName, connection_id_reference), @@ -3154,6 +3181,11 @@ resource "google_bigquery_connection" "test" { location = "US" cloud_resource {} } +// wait for SA creation +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_bigquery_connection.test] +} locals { connection_id_split = split("/", google_bigquery_connection.test.name) @@ -3166,6 +3198,7 @@ locals { role = "roles/storage.objectViewer" project = data.google_project.project.id member = "serviceAccount:${google_bigquery_connection.test.cloud_resource[0].service_account_id}" + depends_on = [time_sleep.wait_120_seconds] } resource "google_bigquery_dataset" "test" { @@ -3430,6 +3463,11 @@ resource "google_bigquery_connection" "test" { location = "%s" cloud_resource {} } +// wait for SA creation +resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_bigquery_connection.test] +} data "google_project" "project" {} @@ -3437,6 +3475,7 @@ resource "google_project_iam_member" "test" { role = "roles/storage.objectViewer" project = data.google_project.project.id member = "serviceAccount:${google_bigquery_connection.test.cloud_resource[0].service_account_id}" + depends_on = [time_sleep.wait_120_seconds] } resource "google_bigquery_dataset" "test" { From fd2ed72e3d1d047911c33fb1def4d3526f2501f2 Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Wed, 3 Sep 2025 13:31:40 -0300 Subject: [PATCH 127/201] REGION URL MAP: add support for header_action and path_matcher.header_action in google_compute_region_url_map (#15052) --- mmv1/products/compute/RegionUrlMap.yaml | 116 +++++++++++++ .../resource_compute_region_url_map_test.go | 160 ++++++++++++++++++ 2 files changed, 276 insertions(+) diff --git a/mmv1/products/compute/RegionUrlMap.yaml b/mmv1/products/compute/RegionUrlMap.yaml index 4fe1431636ef..7ea0ac7cdb59 100644 --- a/mmv1/products/compute/RegionUrlMap.yaml +++ b/mmv1/products/compute/RegionUrlMap.yaml @@ -270,6 +270,64 @@ properties: item_type: type: NestedObject properties: + - name: 'headerAction' + type: NestedObject + description: | + Specifies changes to request and response headers that need to take effect for the selected backendService. + headerAction specified here take effect before headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap. + headerAction is not supported for load balancers that have their loadBalancingScheme set to EXTERNAL. + Not supported when the URL map is bound to a target gRPC proxy that has validateForProxyless field set to true. + properties: + - name: 'requestHeadersToRemove' + type: Array + description: | + A list of header names for headers that need to be removed from the request before forwarding the request to the backendService. + item_type: + type: String + - name: 'requestHeadersToAdd' + type: Array + description: | + Headers to add to a matching request before forwarding the request to the backendService. + item_type: + type: NestedObject + properties: + - name: 'headerName' + type: String + description: 'The name of the header.' + - name: 'headerValue' + type: String + description: 'The value of the header to add.' + - name: 'replace' + type: Boolean + description: | + If false, headerValue is appended to any values that already exist for the header. If true, headerValue is set for the header, discarding any values that were set for that header. + The default value is false. + default_value: false + - name: 'responseHeadersToRemove' + type: Array + description: | + A list of header names for headers that need to be removed from the response before sending the response back to the client. + item_type: + type: String + - name: 'responseHeadersToAdd' + type: Array + description: | + Headers to add the response before sending the response back to the client. + item_type: + type: NestedObject + properties: + - name: 'headerName' + type: String + description: 'The name of the header.' + - name: 'headerValue' + type: String + description: 'The value of the header to add.' + - name: 'replace' + type: Boolean + description: | + If false, headerValue is appended to any values that already exist for the header. If true, headerValue is set for the header, discarding any values that were set for that header. + The default value is false. + default_value: false - name: 'defaultService' type: ResourceRef description: | @@ -2571,3 +2629,61 @@ properties: - 'default_route_action.0.fault_injection_policy.0.abort.0.percentage' validation: function: 'validation.FloatBetween(0, 100)' + - name: 'headerAction' + type: NestedObject + description: | + Specifies changes to request and response headers that need to take effect for the selected backendService. + headerAction specified here take effect before headerAction in the enclosing HttpRouteRule, PathMatcher and UrlMap. + headerAction is not supported for load balancers that have their loadBalancingScheme set to EXTERNAL. + Not supported when the URL map is bound to a target gRPC proxy that has validateForProxyless field set to true. + properties: + - name: 'requestHeadersToRemove' + type: Array + description: | + A list of header names for headers that need to be removed from the request before forwarding the request to the backendService. + item_type: + type: String + - name: 'requestHeadersToAdd' + type: Array + description: | + Headers to add to a matching request before forwarding the request to the backendService. + item_type: + type: NestedObject + properties: + - name: 'headerName' + type: String + description: 'The name of the header.' + - name: 'headerValue' + type: String + description: 'The value of the header to add.' + - name: 'replace' + type: Boolean + description: | + If false, headerValue is appended to any values that already exist for the header. If true, headerValue is set for the header, discarding any values that were set for that header. + The default value is false. + default_value: false + - name: 'responseHeadersToRemove' + type: Array + description: | + A list of header names for headers that need to be removed from the response before sending the response back to the client. + item_type: + type: String + - name: 'responseHeadersToAdd' + type: Array + description: | + Headers to add the response before sending the response back to the client. + item_type: + type: NestedObject + properties: + - name: 'headerName' + type: String + description: 'The name of the header.' + - name: 'headerValue' + type: String + description: 'The value of the header to add.' + - name: 'replace' + type: Boolean + description: | + If false, headerValue is appended to any values that already exist for the header. If true, headerValue is set for the header, discarding any values that were set for that header. + The default value is false. + default_value: false diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go index 2602db0de647..2273ce70b284 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_url_map_test.go @@ -8,6 +8,166 @@ import ( "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) +func TestAccComputeRegionUrlMap_headerAction(t *testing.T) { + t.Parallel() + + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionUrlMap_headerAction1(randomSuffix), + }, + { + ResourceName: "google_compute_region_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeRegionUrlMap_headerAction2(randomSuffix), + }, + { + ResourceName: "google_compute_region_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccComputeRegionUrlMap_headerAction1(randomSuffix string) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + region = "us-central1" + name = "regionurlmap-headeraction-%s" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_region_health_check.zero.self_link] +} + +resource "google_compute_region_health_check" "zero" { + region = "us-central1" + name = "regionurlmap-headeraction-%s" + http_health_check { + port = 80 + } +} + +resource "google_compute_region_url_map" "foobar" { + region = "us-central1" + name = "regionurlmap-headeraction-%s" + default_service = google_compute_region_backend_service.foobar.self_link + + // root-level header_action + header_action { + request_headers_to_add { + header_name = "x-root-test" + header_value = "foo" + replace = true + } + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "with-headers" + } + + path_matcher { + name = "with-headers" + default_service = google_compute_region_backend_service.foobar.self_link + + // path_matcher-level header_action + header_action { + request_headers_to_add { + header_name = "x-path-test" + header_value = "bar" + replace = true + } + } + + path_rule { + paths = ["/home"] + service = google_compute_region_backend_service.foobar.self_link + } + } + + test { + host = "mysite.com" + path = "/home" + service = google_compute_region_backend_service.foobar.self_link + } +} +`, randomSuffix, randomSuffix, randomSuffix) +} + +func testAccComputeRegionUrlMap_headerAction2(randomSuffix string) string { + return fmt.Sprintf(` +resource "google_compute_region_backend_service" "foobar" { + region = "us-central1" + name = "regionurlmap-headeraction-%s" + protocol = "HTTP" + load_balancing_scheme = "INTERNAL_MANAGED" + health_checks = [google_compute_region_health_check.zero.self_link] +} + +resource "google_compute_region_health_check" "zero" { + region = "us-central1" + name = "regionurlmap-headeraction-%s" + http_health_check { + port = 80 + } +} + +resource "google_compute_region_url_map" "foobar" { + region = "us-central1" + name = "regionurlmap-headeraction-%s" + default_service = google_compute_region_backend_service.foobar.self_link + + // root-level header_action updated + header_action { + request_headers_to_add { + header_name = "x-root-test" + header_value = "baz" + replace = true + } + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "with-headers" + } + + path_matcher { + name = "with-headers" + default_service = google_compute_region_backend_service.foobar.self_link + + // path_matcher-level header_action updated + header_action { + request_headers_to_add { + header_name = "x-path-test" + header_value = "qux" + replace = true + } + } + + path_rule { + paths = ["/home", "/alt"] + service = google_compute_region_backend_service.foobar.self_link + } + } + + test { + host = "mysite.com" + path = "/alt" + service = google_compute_region_backend_service.foobar.self_link + } +} +`, randomSuffix, randomSuffix, randomSuffix) +} + func TestAccComputeRegionUrlMap_update_path_matcher(t *testing.T) { t.Parallel() From a8d5fd7c57df6a395624bf7895e104796edac202 Mon Sep 17 00:00:00 2001 From: Joel Shapiro Date: Wed, 3 Sep 2025 09:48:03 -0700 Subject: [PATCH 128/201] Added consentConfig to healthcare FHIR store. (#15039) --- mmv1/products/healthcare/FhirStore.yaml | 61 +++++++++++++++++++ ...althcare_fhir_store_consent_config.tf.tmpl | 46 ++++++++++++++ ...esource_healthcare_fhir_store_test.go.tmpl | 47 ++++++++++++++ 3 files changed, 154 insertions(+) create mode 100644 mmv1/templates/terraform/examples/healthcare_fhir_store_consent_config.tf.tmpl diff --git a/mmv1/products/healthcare/FhirStore.yaml b/mmv1/products/healthcare/FhirStore.yaml index 3dcaa8b5b31e..2db30de02f4d 100644 --- a/mmv1/products/healthcare/FhirStore.yaml +++ b/mmv1/products/healthcare/FhirStore.yaml @@ -69,6 +69,13 @@ examples: dataset_name: 'example-dataset' fhir_store_name: 'example-fhir-store' pubsub_topic: 'fhir-notifications' + - name: 'healthcare_fhir_store_consent_config' + primary_resource_id: 'default' + min_version: "beta" + vars: + dataset_name: 'example-dataset' + fhir_store_name: 'example-fhir-store' + pubsub_topic: 'fhir-notifications' parameters: - name: 'dataset' type: ResourceRef @@ -113,6 +120,60 @@ properties: - 'DSTU2' - 'STU3' - 'R4' + - name: 'consentConfig' + type: NestedObject + description: | + Specifies whether this store has consent enforcement. Not available for DSTU2 FHIR version due to absence of Consent resources. Not supported for R5 FHIR version. + min_version: beta + properties: + - name: 'version' + type: Enum + description: | + Specifies which consent enforcement version is being used for this FHIR store. This field can only be set once by either [fhirStores.create][] or [fhirStores.patch][]. After that, you must call [fhirStores.applyConsents][] to change the version. + required: true + enum_values: + - 'CONSENT_ENFORCEMENT_VERSION_UNSPECIFIED' + - 'V1' + - name: 'accessEnforced' + type: Boolean + description: | + The default value is false. If set to true, when accessing FHIR resources, the consent headers will be verified against consents given by patients. See the ConsentEnforcementVersion for the supported consent headers. + - name: 'consentHeaderHandling' + type: NestedObject + description: | + Different options to configure the behaviour of the server when handling the X-Consent-Scope header. + properties: + - name: 'profile' + type: Enum + description: | + Specifies the default server behavior when the header is empty. If not specified, the ScopeProfile.PERMIT_EMPTY_SCOPE option is used. + enum_values: + - 'SCOPE_PROFILE_UNSPECIFIED' + - 'PERMIT_EMPTY_SCOPE' + - 'REQUIRED_ON_READ' + default_value: "PERMIT_EMPTY_SCOPE" + - name: 'accessDeterminationLogConfig' + type: NestedObject + description: | + Specifies how the server logs the consent-aware requests. If not specified, the AccessDeterminationLogConfig.LogLevel.MINIMUM option is used. + properties: + - name: 'logLevel' + type: Enum + description: | + Controls the amount of detail to include as part of the audit logs. + enum_values: + - 'LOG_LEVEL_UNSPECIFIED' + - 'DISABLED' + - 'MINIMUM' + - 'VERBOSE' + default_value: "MINIMUM" + - name: 'enforcedAdminConsents' + type: Array + description: | + The versioned names of the enforced admin Consent resource(s), in the format projects/{projectId}/locations/{location}/datasets/{datasetId}/fhirStores/{fhirStoreId}/fhir/Consent/{resourceId}/_history/{version_id}. For FHIR stores with disableResourceVersioning=true, the format is projects/{projectId}/locations/{location}/datasets/{datasetId}/fhirStores/{fhirStoreId}/fhir/Consent/{resourceId}. This field can only be updated using [fhirStores.applyAdminConsents][]. + output: true + item_type: + type: String - name: 'complexDataTypeReferenceParsing' type: Enum description: | diff --git a/mmv1/templates/terraform/examples/healthcare_fhir_store_consent_config.tf.tmpl b/mmv1/templates/terraform/examples/healthcare_fhir_store_consent_config.tf.tmpl new file mode 100644 index 000000000000..699e7084f64c --- /dev/null +++ b/mmv1/templates/terraform/examples/healthcare_fhir_store_consent_config.tf.tmpl @@ -0,0 +1,46 @@ +resource "google_healthcare_fhir_store" "default" { + name = "{{index $.Vars "fhir_store_name"}}" + dataset = google_healthcare_dataset.dataset.id + version = "R4" + complex_data_type_reference_parsing = "DISABLED" + + enable_update_create = false + disable_referential_integrity = false + disable_resource_versioning = false + enable_history_import = false + default_search_handling_strict = false + + notification_configs { + pubsub_topic = google_pubsub_topic.topic.id + } + + labels = { + label1 = "labelvalue1" + } + + consent_config { + version = "V1" + access_enforced = true + consent_header_handling { + profile = "REQUIRED_ON_READ" + } + access_determination_log_config { + log_level = "VERBOSE" + } + } + + provider = google-beta +} + +resource "google_pubsub_topic" "topic" { + name = "{{index $.Vars "pubsub_topic"}}" + + provider = google-beta +} + +resource "google_healthcare_dataset" "dataset" { + name = "{{index $.Vars "dataset_name"}}" + location = "us-central1" + + provider = google-beta +} diff --git a/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl b/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl index 9e51446d98e2..30418b32bd1c 100644 --- a/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl +++ b/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl @@ -135,6 +135,19 @@ resource "google_healthcare_fhir_store" "default" { {{- if ne $.TargetVersionName "ga" }} enable_history_modifications = false {{- end }} + +{{- if ne $.TargetVersionName "ga" }} + consent_config { + version = "V1" + access_enforced = false + consent_header_handling { + profile = "PERMIT_EMPTY_SCOPE" + } + access_determination_log_config { + log_level = "DISABLED" + } + } +{{- end }} } resource "google_healthcare_dataset" "dataset" { @@ -163,6 +176,19 @@ resource "google_healthcare_fhir_store" "default" { enable_history_modifications = true {{- end }} +{{- if ne $.TargetVersionName "ga" }} + consent_config { + version = "V1" + access_enforced = true + consent_header_handling { + profile = "REQUIRED_ON_READ" + } + access_determination_log_config { + log_level = "VERBOSE" + } + } +{{- end }} + labels = { label1 = "labelvalue1" } @@ -200,6 +226,27 @@ func testAccCheckGoogleHealthcareFhirStoreUpdate(t *testing.T, pubsubTopic strin return fmt.Errorf("Unexpected failure while verifying 'updated' dataset: %s", err) } + {{- if ne $.TargetVersionName "ga" }} + if response.ConsentConfig == nil { + return fmt.Errorf("fhirStore 'ConsentConfig' missing: %s", gcpResourceUri) + } + if !response.ConsentConfig.AccessEnforced { + return fmt.Errorf("fhirStore 'ConsentConfig.AccessEnforced' not updated: %s", gcpResourceUri) + } + if response.ConsentConfig.ConsentHeaderHandling == nil { + return fmt.Errorf("fhirStore 'ConsentConfig.ConsentHeaderHandling' missing: %s", gcpResourceUri) + } + if response.ConsentConfig.ConsentHeaderHandling.Profile != "REQUIRED_ON_READ" { + return fmt.Errorf("fhirStore 'ConsentConfig.ConsentHeaderHandling.Profile' not updated: %s", gcpResourceUri) + } + if response.ConsentConfig.AccessDeterminationLogConfig == nil { + return fmt.Errorf("fhirStore 'ConsentConfig.AccessDeterminationLogConfig' missing: %s", gcpResourceUri) + } + if response.ConsentConfig.AccessDeterminationLogConfig.LogLevel != "VERBOSE" { + return fmt.Errorf("fhirStore 'ConsentConfig.AccessDeterminationLogConfig.LogLevel' not updated: %s", gcpResourceUri) + } + {{- end }} + if !response.EnableUpdateCreate { return fmt.Errorf("fhirStore 'EnableUpdateCreate' not updated: %s", gcpResourceUri) } From daa32c04729be3b5ab905e7cbe3a05d61327a788 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Wed, 3 Sep 2025 10:01:00 -0700 Subject: [PATCH 129/201] Make contributor membership checker warn instead of fail (#15051) --- .ci/magician/cloudbuild/community.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.ci/magician/cloudbuild/community.go b/.ci/magician/cloudbuild/community.go index 9c01d7c94f01..e72e8569d913 100644 --- a/.ci/magician/cloudbuild/community.go +++ b/.ci/magician/cloudbuild/community.go @@ -30,7 +30,8 @@ func (cb *Client) ApproveDownstreamGenAndTest(prNumber, commitSha string) error } if buildId == "" { - return fmt.Errorf("Failed to find pending build for PR %s", prNumber) + fmt.Printf("WARNING: Failed to find pending build for PR %s\nThis build may have been approved already.\n", prNumber) + return nil } err = approveBuild(PROJECT_ID, buildId) From b175083dcccfb400adfa5a9a397bcf4414b26034 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Wed, 3 Sep 2025 20:41:39 +0200 Subject: [PATCH 130/201] bigtable: IAM resources were not able to upgrade to `v7.X.X` without state upgrader (#15027) Co-authored-by: Thomas Rodgers --- .../provider/provider_mmv1_resources.go.tmpl | 6 +-- .../bigtable/iam_bigtable_table_migrate.go | 51 +++++++++++++++++++ .../terraform/tpgiamresource/iam.go.tmpl | 14 +++++ .../tpgiamresource/resource_iam_binding.go | 5 +- .../tpgiamresource/resource_iam_member.go | 4 +- .../tpgiamresource/resource_iam_policy.go | 4 +- 6 files changed, 77 insertions(+), 7 deletions(-) create mode 100644 mmv1/third_party/terraform/services/bigtable/iam_bigtable_table_migrate.go diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 06f8767048a9..a87ffaa2fafd 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -456,9 +456,9 @@ var handwrittenIAMResources = map[string]*schema.Resource{ "google_bigtable_instance_iam_binding": tpgiamresource.ResourceIamBinding(bigtable.IamBigtableInstanceSchema, bigtable.NewBigtableInstanceUpdater, bigtable.BigtableInstanceIdParseFunc), "google_bigtable_instance_iam_member": tpgiamresource.ResourceIamMember(bigtable.IamBigtableInstanceSchema, bigtable.NewBigtableInstanceUpdater, bigtable.BigtableInstanceIdParseFunc), "google_bigtable_instance_iam_policy": tpgiamresource.ResourceIamPolicy(bigtable.IamBigtableInstanceSchema, bigtable.NewBigtableInstanceUpdater, bigtable.BigtableInstanceIdParseFunc), - "google_bigtable_table_iam_binding": tpgiamresource.ResourceIamBinding(bigtable.IamBigtableTableSchema, bigtable.NewBigtableTableUpdater, bigtable.BigtableTableIdParseFunc), - "google_bigtable_table_iam_member": tpgiamresource.ResourceIamMember(bigtable.IamBigtableTableSchema, bigtable.NewBigtableTableUpdater, bigtable.BigtableTableIdParseFunc), - "google_bigtable_table_iam_policy": tpgiamresource.ResourceIamPolicy(bigtable.IamBigtableTableSchema, bigtable.NewBigtableTableUpdater, bigtable.BigtableTableIdParseFunc), + "google_bigtable_table_iam_binding": tpgiamresource.ResourceIamBinding(bigtable.IamBigtableTableSchema, bigtable.NewBigtableTableUpdater, bigtable.BigtableTableIdParseFunc, tpgiamresource.IamWithStateUpgraders(bigtable.BigtableTableIamStateUpgraders), tpgiamresource.IamWithSchemaVersion(1)), + "google_bigtable_table_iam_member": tpgiamresource.ResourceIamMember(bigtable.IamBigtableTableSchema, bigtable.NewBigtableTableUpdater, bigtable.BigtableTableIdParseFunc, tpgiamresource.IamWithStateUpgraders(bigtable.BigtableTableIamStateUpgraders), tpgiamresource.IamWithSchemaVersion(1)), + "google_bigtable_table_iam_policy": tpgiamresource.ResourceIamPolicy(bigtable.IamBigtableTableSchema, bigtable.NewBigtableTableUpdater, bigtable.BigtableTableIdParseFunc, tpgiamresource.IamWithStateUpgraders(bigtable.BigtableTableIamStateUpgraders), tpgiamresource.IamWithSchemaVersion(1)), "google_bigquery_dataset_iam_binding": tpgiamresource.ResourceIamBinding(bigquery.IamBigqueryDatasetSchema, bigquery.NewBigqueryDatasetIamUpdater, bigquery.BigqueryDatasetIdParseFunc), "google_bigquery_dataset_iam_member": tpgiamresource.ResourceIamMember(bigquery.IamMemberBigqueryDatasetSchema, bigquery.NewBigqueryDatasetIamMemberUpdater, bigquery.BigqueryDatasetIdParseFunc), "google_bigquery_dataset_iam_policy": tpgiamresource.ResourceIamPolicy(bigquery.IamBigqueryDatasetSchema, bigquery.NewBigqueryDatasetIamUpdater, bigquery.BigqueryDatasetIdParseFunc), diff --git a/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table_migrate.go b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table_migrate.go new file mode 100644 index 000000000000..bb6140d36f3b --- /dev/null +++ b/mmv1/third_party/terraform/services/bigtable/iam_bigtable_table_migrate.go @@ -0,0 +1,51 @@ +package bigtable + +import ( + "context" + "log" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var BigtableTableIamStateUpgraders = []schema.StateUpgrader{ + { + Type: resourceBigtableTableIAMV0().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceBigtableTableIAMUpgradeV0, + Version: 0, + }, +} + +func resourceBigtableTableIAMV0() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "instance": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + "table": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func ResourceBigtableTableIAMUpgradeV0(_ context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + log.Printf("[DEBUG] Attributes before migration: %#v", rawState) + + if _, ok := rawState["instance"]; ok { + rawState["instance_name"] = rawState["instance"] + } + + log.Printf("[DEBUG] Attributes after migration: %#v", rawState) + return rawState, nil +} diff --git a/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl b/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl index 3222c6adfaee..6abe5a12745d 100644 --- a/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl +++ b/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl @@ -467,6 +467,8 @@ func CompareAuditConfigs(a, b []*cloudresourcemanager.AuditConfig) bool { type IamSettings struct { DeprecationMessage string EnableBatching bool + StateUpgraders []schema.StateUpgrader + SchemaVersion int } func NewIamSettings(options ...func(*IamSettings)) *IamSettings { @@ -495,6 +497,18 @@ func IamWithBatching (s *IamSettings) { s.EnableBatching = true } +func IamWithStateUpgraders(upgraders []schema.StateUpgrader) func(*IamSettings) { + return func(s *IamSettings) { + s.StateUpgraders = upgraders + } +} + +func IamWithSchemaVersion(version int) func(*IamSettings) { + return func(s *IamSettings) { + s.SchemaVersion = version + } +} + // Util to deref and print auditConfigs func DebugPrintAuditConfigs(bs []*cloudresourcemanager.AuditConfig) string { v, _ := json.MarshalIndent(bs, "", "\t") diff --git a/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go b/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go index ca5df060bb07..cb814c8fe902 100644 --- a/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go +++ b/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go @@ -75,8 +75,9 @@ func ResourceIamBinding(parentSpecificSchema map[string]*schema.Schema, newUpdat // if non-empty, this will be used to send a deprecation message when the // resource is used. DeprecationMessage: settings.DeprecationMessage, - - Schema: tpgresource.MergeSchemas(iamBindingSchema, parentSpecificSchema), + Schema: tpgresource.MergeSchemas(iamBindingSchema, parentSpecificSchema), + SchemaVersion: settings.SchemaVersion, + StateUpgraders: settings.StateUpgraders, Importer: &schema.ResourceImporter{ State: iamBindingImport(newUpdaterFunc, resourceIdParser), }, diff --git a/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go b/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go index d7ad3021ab7c..6a7b83d03ff2 100644 --- a/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go +++ b/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go @@ -181,7 +181,9 @@ func ResourceIamMember(parentSpecificSchema map[string]*schema.Schema, newUpdate // resource is used. DeprecationMessage: settings.DeprecationMessage, - Schema: tpgresource.MergeSchemas(IamMemberBaseSchema, parentSpecificSchema), + Schema: tpgresource.MergeSchemas(IamMemberBaseSchema, parentSpecificSchema), + SchemaVersion: settings.SchemaVersion, + StateUpgraders: settings.StateUpgraders, Importer: &schema.ResourceImporter{ State: iamMemberImport(newUpdaterFunc, resourceIdParser), }, diff --git a/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go b/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go index 13cb30292021..ae02c2eb1373 100644 --- a/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go +++ b/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go @@ -53,7 +53,9 @@ func ResourceIamPolicy(parentSpecificSchema map[string]*schema.Schema, newUpdate // resource is used. DeprecationMessage: settings.DeprecationMessage, - Schema: tpgresource.MergeSchemas(IamPolicyBaseSchema, parentSpecificSchema), + Schema: tpgresource.MergeSchemas(IamPolicyBaseSchema, parentSpecificSchema), + SchemaVersion: settings.SchemaVersion, + StateUpgraders: settings.StateUpgraders, Importer: &schema.ResourceImporter{ State: iamPolicyImport(resourceIdParser), }, From 227a097e9ab6a46168a86785955e0e304615c1a8 Mon Sep 17 00:00:00 2001 From: Harshal Neelkamal <17376513+HarshalNeelkamal@users.noreply.github.com> Date: Wed, 3 Sep 2025 11:48:45 -0700 Subject: [PATCH 131/201] fix bug that causes updates on no-cp clusters without sa keys changing (#14980) --- .../resource_container_cluster.go.tmpl | 10 ++++- .../resource_container_cluster_test.go.tmpl | 37 ++++++++++++++++++- 2 files changed, 43 insertions(+), 4 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl index f08838fa09b0..e61bbefc9887 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster.go.tmpl @@ -6547,11 +6547,17 @@ func expandUserManagedKeysConfig(configured interface{}) *container.UserManagedK } if v, ok := config["service_account_signing_keys"]; ok { sk := v.(*schema.Set) - umkc.ServiceAccountSigningKeys = tpgresource.ConvertStringSet(sk) + skss := tpgresource.ConvertStringSet(sk) + if len(skss) > 0 { + umkc.ServiceAccountSigningKeys = skss + } } if v, ok := config["service_account_verification_keys"]; ok { vk := v.(*schema.Set) - umkc.ServiceAccountVerificationKeys = tpgresource.ConvertStringSet(vk) + vkss := tpgresource.ConvertStringSet(vk) + if len(vkss) > 0 { + umkc.ServiceAccountVerificationKeys = vkss + } } return umkc } diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 30050a4ebba1..962bca482b11 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -63,14 +63,14 @@ func TestAccContainerCluster_basic(t *testing.T) { ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - ResourceName: "google_container_cluster.primary", + ResourceName: "google_container_cluster.primary", ImportStateId: fmt.Sprintf("%s/us-central1-a/%s", envvar.GetTestProjectFromEnv(), clusterName), ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, }, { - ResourceName: "google_container_cluster.primary", + ResourceName: "google_container_cluster.primary", ImportState: true, ImportStateVerify: true, ImportStateVerifyIgnore: []string{"deletion_protection"}, @@ -79,6 +79,39 @@ func TestAccContainerCluster_basic(t *testing.T) { }) } +// This is to ensure that updates don't get trigerred with incorrect interpration of +// nil serviceAccount keys as empty array. +func TestAccContainerCluster_basic_noCpaUpgrade(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_basic(clusterName, networkName, subnetworkName), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttrSet("google_container_cluster.primary", "services_ipv4_cidr"), + resource.TestCheckResourceAttrSet("google_container_cluster.primary", "self_link"), + resource.TestCheckResourceAttr("google_container_cluster.primary", "networking_mode", "VPC_NATIVE"), + ), + }, + { + Config: testAccContainerCluster_basic(clusterName, networkName, subnetworkName), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_container_cluster.primary", plancheck.ResourceActionNoop), + }, + }, + }, + }, + }) +} + func TestAccContainerCluster_resourceManagerTags(t *testing.T) { t.Parallel() From c166d337a46b036611ac4062f38a05ce30c20dd2 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 3 Sep 2025 13:59:06 -0700 Subject: [PATCH 132/201] Added missing google-beta lines to beta test (#15034) --- .../terraform/examples/shared_future_reservation.tf.tmpl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl index 1cc7fd2342c5..772f6611362c 100644 --- a/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl +++ b/mmv1/templates/terraform/examples/shared_future_reservation.tf.tmpl @@ -1,4 +1,5 @@ resource "google_project" "owner_project" { + provider = google-beta project_id = "tf-test%{random_suffix}" name = "tf-test%{random_suffix}" org_id = "{{index $.TestEnvVars "org_id"}}" @@ -7,11 +8,13 @@ resource "google_project" "owner_project" { } resource "google_project_service" "compute" { + provider = google-beta project = google_project.owner_project.project_id service = "compute.googleapis.com" } resource "google_project" "guest_project" { + provider = google-beta project_id = "tf-test-2%{random_suffix}" name = "tf-test-2%{random_suffix}" org_id = "{{index $.TestEnvVars "org_id"}}" @@ -19,6 +22,7 @@ resource "google_project" "guest_project" { } resource "google_org_policy_policy" "shared_reservation_org_policy" { + provider = google-beta name = "projects/${google_project.owner_project.project_id}/policies/compute.sharedReservationsOwnerProjects" parent = "projects/${google_project.owner_project.project_id}" @@ -30,6 +34,7 @@ resource "google_org_policy_policy" "shared_reservation_org_policy" { } resource "google_compute_future_reservation" "{{$.PrimaryResourceId}}" { + provider = google-beta project = google_project.owner_project.project_id name = "{{index $.Vars "future_reservation_name"}}" time_window { From 1a00987c8a6d6b93701564a0d17516bc1beee7df Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Wed, 3 Sep 2025 14:32:38 -0700 Subject: [PATCH 133/201] tgc-revival: add cloudbuild resources (#14953) --- mmv1/api/resource.go | 2 +- mmv1/api/type.go | 2 ++ mmv1/products/cloudbuild/BitbucketServerConfig.yaml | 1 + mmv1/products/cloudbuildv2/Connection.yaml | 4 ++++ mmv1/products/cloudbuildv2/Repository.yaml | 9 +++++++++ .../custom_expand/cloudbuildv2_connection_name.go.tmpl | 7 +++++++ .../custom_expand/cloudbuildv2_repository_name.go.tmpl | 7 +++++++ 7 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 mmv1/templates/tgc_next/custom_expand/cloudbuildv2_connection_name.go.tmpl create mode 100644 mmv1/templates/tgc_next/custom_expand/cloudbuildv2_repository_name.go.tmpl diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 5468905cddae..4d88776422c8 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -2164,7 +2164,7 @@ func (r Resource) TGCTestIgnorePropertiesToStrings(e resource.Examples) []string // Filters out computed properties during cai2hcl func (r Resource) ReadPropertiesForTgc() []*Type { return google.Reject(r.AllUserProperties(), func(v *Type) bool { - return v.Output || v.UrlParamOnly + return v.Output || v.UrlParamOnly || v.TGCIgnoreRead }) } diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 341b31d44ee7..049539b18168 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -327,6 +327,8 @@ type Type struct { // If true, the custom flatten function is not applied during cai2hcl TGCIgnoreTerraformCustomFlatten bool `yaml:"tgc_ignore_terraform_custom_flatten,omitempty"` + + TGCIgnoreRead bool `yaml:"tgc_ignore_read,omitempty"` } const MAX_NAME = 20 diff --git a/mmv1/products/cloudbuild/BitbucketServerConfig.yaml b/mmv1/products/cloudbuild/BitbucketServerConfig.yaml index a16fc8c7c4dd..3bf302c626cb 100644 --- a/mmv1/products/cloudbuild/BitbucketServerConfig.yaml +++ b/mmv1/products/cloudbuild/BitbucketServerConfig.yaml @@ -40,6 +40,7 @@ async: base_url: '{{op_id}}' result: resource_inside_response: true +include_in_tgc_next_DO_NOT_USE: true custom_code: encoder: 'templates/terraform/encoders/cloudbuild_bitbucketserver_config.go.tmpl' post_create: 'templates/terraform/post_create/cloudbuild_bitbucketserver_config.go.tmpl' diff --git a/mmv1/products/cloudbuildv2/Connection.yaml b/mmv1/products/cloudbuildv2/Connection.yaml index 5c3193efbb2b..a727e98c6190 100644 --- a/mmv1/products/cloudbuildv2/Connection.yaml +++ b/mmv1/products/cloudbuildv2/Connection.yaml @@ -49,6 +49,7 @@ iam_policy: custom_code: exclude_tgc: true legacy_long_form_project: true +include_in_tgc_next_DO_NOT_USE: true sweeper: url_substitutions: - region: "us-central1" @@ -59,6 +60,8 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-connection%s", context["random_suffix"])' vars: connection_name: 'tf-test-connection' + tgc_test_ignore_extra: + - github_config.app_installation_id # It has 0 in Terraform configuration - name: 'cloudbuildv2_connection_ghe' exclude_test: true - name: 'cloudbuildv2_connection_github' @@ -70,6 +73,7 @@ parameters: url_param_only: true required: true immutable: true + custom_tgc_expand: 'templates/tgc_next/custom_expand/cloudbuildv2_repository_name.go.tmpl' - name: 'location' type: String description: The location for the resource diff --git a/mmv1/products/cloudbuildv2/Repository.yaml b/mmv1/products/cloudbuildv2/Repository.yaml index 31e65c9c70bd..9fd46c146af7 100644 --- a/mmv1/products/cloudbuildv2/Repository.yaml +++ b/mmv1/products/cloudbuildv2/Repository.yaml @@ -39,6 +39,7 @@ custom_code: encoder: 'templates/terraform/encoders/cloudbuildv2_repository.go.tmpl' exclude_tgc: true legacy_long_form_project: true +include_in_tgc_next_DO_NOT_USE: true examples: # These tests depend on secrets stored in a separate project, so we prefer not # to show them in the docs. @@ -48,18 +49,24 @@ examples: connection_name: 'connection' repository_name: 'repository' exclude_docs: true + tgc_test_ignore_extra: + - annotations # It has empty object in Terraform configuration - name: 'cloudbuildv2_repository_github' primary_resource_id: 'primary' vars: connection_name: 'connection' repository_name: 'repository' exclude_docs: true + tgc_test_ignore_extra: + - annotations # It has empty object in Terraform configuration - name: 'cloudbuildv2_repository_gle' primary_resource_id: 'primary' vars: connection_name: 'connection' repository_name: 'repository' exclude_docs: true + tgc_test_ignore_extra: + - annotations # It has empty object in Terraform configuration - name: 'cloudbuildv2_repository_ghe_doc' exclude_test: true - name: 'cloudbuildv2_repository_github_doc' @@ -88,6 +95,8 @@ properties: required: true immutable: true ignore_read: true + tgc_ignore_read: true + custom_tgc_expand: 'templates/tgc_next/custom_expand/cloudbuildv2_repository_name.go.tmpl' - name: 'remoteUri' type: String description: Required. Git Clone HTTPS URI. diff --git a/mmv1/templates/tgc_next/custom_expand/cloudbuildv2_connection_name.go.tmpl b/mmv1/templates/tgc_next/custom_expand/cloudbuildv2_connection_name.go.tmpl new file mode 100644 index 000000000000..a6b8b8823c23 --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/cloudbuildv2_connection_name.go.tmpl @@ -0,0 +1,7 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + m, err := tpgresource.ReplaceVarsForId(d, config, "{{"projects/{{project}}/locations/{{location}}/connections/{{name}}"}}") + if err != nil { + return nil, fmt.Errorf("Error constructing name: %s", err) + } + return m, nil +} \ No newline at end of file diff --git a/mmv1/templates/tgc_next/custom_expand/cloudbuildv2_repository_name.go.tmpl b/mmv1/templates/tgc_next/custom_expand/cloudbuildv2_repository_name.go.tmpl new file mode 100644 index 000000000000..c746be1bf512 --- /dev/null +++ b/mmv1/templates/tgc_next/custom_expand/cloudbuildv2_repository_name.go.tmpl @@ -0,0 +1,7 @@ +func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + m, err := tpgresource.ReplaceVarsForId(d, config, "{{"projects/{{project}}/locations/{{location}}/connections/{{parent_connection}}/repositories/{{name}}"}}") + if err != nil { + return nil, fmt.Errorf("Error constructing name: %s", err) + } + return m, nil +} \ No newline at end of file From 488e4ed776bea4cea24fce8f630e98b96a495dfe Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Wed, 3 Sep 2025 16:53:23 -0700 Subject: [PATCH 134/201] skip TestAccResourceFWPubsubLiteReservation_basic test (#15056) --- .../pubsublite/fw_resource_pubsub_lite_reservation_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go index e4507dfaec41..004e143e106a 100644 --- a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go +++ b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation_test.go @@ -8,6 +8,7 @@ import ( ) func TestAccResourceFWPubsubLiteReservation_basic(t *testing.T) { + acctest.SkipIfVcr(t) t.Parallel() context := map[string]interface{}{ From 158793fc764fc999dd281df93ee7a56332ccaf53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Thu, 4 Sep 2025 18:24:32 +0200 Subject: [PATCH 135/201] Add singular data source for retrieving a Python package from an Artifact Registry repository (#14611) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...source_artifact_registry_python_package.go | 275 ++++++++++++++++++ ...e_artifact_registry_python_package_test.go | 67 +++++ ...fact_registry_python_package.html.markdown | 63 ++++ 4 files changed, 406 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_python_package.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index a87ffaa2fafd..174ccccad9cc 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -31,6 +31,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), "google_artifact_registry_npm_package": artifactregistry.DataSourceArtifactRegistryNpmPackage(), "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), + "google_artifact_registry_python_package": artifactregistry.DataSourceArtifactRegistryPythonPackage(), "google_artifact_registry_repositories": artifactregistry.DataSourceArtifactRegistryRepositories(), "google_artifact_registry_repository": artifactregistry.DataSourceArtifactRegistryRepository(), "google_artifact_registry_tag": artifactregistry.DataSourceArtifactRegistryTag(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package.go new file mode 100644 index 000000000000..5c53c0140ecb --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package.go @@ -0,0 +1,275 @@ +package artifactregistry + +import ( + "fmt" + "net/url" + "sort" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type PythonPackage struct { + name string + packageName string + version string + createTime string + updateTime string +} + +func DataSourceArtifactRegistryPythonPackage() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryPythonPackageRead, + + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Description: "Project ID of the project.", + }, + "location": { + Type: schema.TypeString, + Required: true, + Description: "The region of the Artifact Registry repository.", + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + Description: "The repository ID containing the Python package.", + }, + "package_name": { + Type: schema.TypeString, + Required: true, + Description: "The name of the Python package.", + }, + "version": { + Type: schema.TypeString, + Computed: true, + Description: "The version of the Python package.", + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: "The fully qualified name of the Python package.", + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: "The time the package was created.", + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: "The time the package was last updated.", + }, + }, + } +} + +func DataSourceArtifactRegistryPythonPackageRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + var res PythonPackage + + packageName, version := parsePythonPackage(d.Get("package_name").(string)) + + if version != "" { + // fetch package by version + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.pythonPackages/get + packageUrlSafe := url.QueryEscape(packageName) + urlRequest, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/pythonPackages/%s:%s", packageUrlSafe, version)) + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + resGet, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return err + } + + res = convertPythonPackageResponseToStruct(resGet) + } else { + // fetch the list of packages, ordered by update time + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.pythonPackages/list + urlRequest, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/pythonPackages") + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + // to reduce the number of pages we need to fetch, we set the pageSize to 1000(max) + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageSize": "1000"}) + if err != nil { + return err + } + + res, err = retrieveAndFilterPythonPackages(d, config, urlRequest, userAgent, packageName, version) + if err != nil { + return err + } + } + + // Set Terraform schema fields + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("name", res.name); err != nil { + return err + } + if err := d.Set("version", res.version); err != nil { + return err + } + if err := d.Set("create_time", res.createTime); err != nil { + return err + } + if err := d.Set("update_time", res.updateTime); err != nil { + return err + } + + d.SetId(res.name) + + return nil +} + +func parsePythonPackage(pkg string) (packageName string, version string) { + splitByColon := strings.Split(pkg, ":") + + if len(splitByColon) == 2 { + packageName = splitByColon[0] + version = splitByColon[1] + } else { + packageName = pkg + } + + return packageName, version +} + +func retrieveAndFilterPythonPackages(d *schema.ResourceData, config *transport_tpg.Config, urlRequest string, userAgent string, packageName string, version string) (PythonPackage, error) { + // Paging through the list method until either: + // if a version was provided, the matching package name and version pair + // otherwise, return the first matching package name + + var allPackages []PythonPackage + + for { + resListPythonPackages, token, err := retrieveListOfPythonPackages(config, urlRequest, userAgent) + if err != nil { + return PythonPackage{}, err + } + + for _, pkg := range resListPythonPackages { + if strings.Contains(pkg.name, "/"+url.QueryEscape(packageName)+":") { + allPackages = append(allPackages, pkg) + } + } + + if token == "" { + break + } + + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageToken": token}) + if err != nil { + return PythonPackage{}, err + } + } + + if len(allPackages) == 0 { + return PythonPackage{}, fmt.Errorf("Requested Python package was not found.") + } + + // Client-side sort by updateTime descending (latest first) + sort.Slice(allPackages, func(i, j int) bool { + // Parse RFC3339 timestamps, fallback to string compare if parse fails + ti, err1 := time.Parse(time.RFC3339, allPackages[i].updateTime) + tj, err2 := time.Parse(time.RFC3339, allPackages[j].updateTime) + if err1 == nil && err2 == nil { + return ti.After(tj) + } + return allPackages[i].updateTime > allPackages[j].updateTime + }) + + if version != "" { + for _, pkg := range allPackages { + if pkg.version == version { + return pkg, nil + } + } + return PythonPackage{}, fmt.Errorf("Requested version was not found.") + } + + // Return the latest package if no version specified + return allPackages[0], nil +} + +func retrieveListOfPythonPackages(config *transport_tpg.Config, urlRequest string, userAgent string) ([]PythonPackage, string, error) { + resList, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return make([]PythonPackage, 0), "", err + } + + if nextPageToken, ok := resList["nextPageToken"].(string); ok { + return flattenPythonPackageDataSourceListResponse(resList), nextPageToken, nil + } else { + return flattenPythonPackageDataSourceListResponse(resList), "", nil + } +} + +func flattenPythonPackageDataSourceListResponse(res map[string]interface{}) []PythonPackage { + var pythonPackages []PythonPackage + + resPythonPackages, _ := res["pythonPackages"].([]interface{}) + + for _, resPackage := range resPythonPackages { + pkg, _ := resPackage.(map[string]interface{}) + pythonPackages = append(pythonPackages, convertPythonPackageResponseToStruct(pkg)) + } + + return pythonPackages +} + +func convertPythonPackageResponseToStruct(res map[string]interface{}) PythonPackage { + var pythonPackage PythonPackage + + if name, ok := res["name"].(string); ok { + pythonPackage.name = name + } + + if packageName, ok := res["packageName"].(string); ok { + pythonPackage.packageName = packageName + } + + if version, ok := res["version"].(string); ok { + pythonPackage.version = version + } + + if createTime, ok := res["createTime"].(string); ok { + pythonPackage.createTime = createTime + } + + if updateTime, ok := res["updateTime"].(string); ok { + pythonPackage.updateTime = updateTime + } + + return pythonPackage +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package_test.go new file mode 100644 index 000000000000..1d8b08c04cfb --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_python_package_test.go @@ -0,0 +1,67 @@ +package artifactregistry_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryPythonPackage_basic(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + // At the moment there are no public Python packages available in Artifact Registry. + // This test is skipped to avoid unnecessary failures. + // As soon as there are public packages available, this test can be enabled by removing the skip and adjusting the configuration accordingly. + t.Skip("No public Python packages available in Artifact Registry") + + resourceName := "data.google_artifact_registry_python_package.test" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryPythonPackageConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "location"), + resource.TestCheckResourceAttrSet(resourceName, "repository_id"), + resource.TestCheckResourceAttrSet(resourceName, "package_name"), + resource.TestCheckResourceAttrSet(resourceName, "name"), + resource.TestCheckResourceAttrSet(resourceName, "version"), + validatePythonPackageTimestamps(resourceName), + ), + }, + }, + }) +} + +const testAccDataSourceArtifactRegistryPythonPackageConfig = ` +data "google_artifact_registry_python_package" "test" { + project = "example-project" + location = "us" + repository_id = "example-repo" + package_name = "example-package" +} +` + +func validatePythonPackageTimestamps(dataSourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + res, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("can't find %s in state", dataSourceName) + } + + for _, attr := range []string{"create_time", "update_time"} { + if ts, ok := res.Primary.Attributes[attr]; !ok || !isRFC3339(ts) { + return fmt.Errorf("%s is not RFC3339: %s", attr, ts) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_python_package.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_python_package.html.markdown new file mode 100644 index 000000000000..27840873c7ad --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_python_package.html.markdown @@ -0,0 +1,63 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about a Python package within a Google Artifact Registry Repository. +--- + +# google_artifact_registry_python_package + +This data source fetches information from a provided Artifact Registry repository, based on a the latest version of the package and optional version. + +## Example Usage + +```hcl +resource "google_artifact_registry_repository" "python_repo" { + location = "us-central1" + repository_id = "my-python-repo" + format = "PYTHON" +} + +data "google_artifact_registry_python_package" "latest" { + location = google_artifact_registry_repository.python_repo.location + repository_id = google_artifact_registry_repository.python_repo.repository_id + package_name = "example_pkg" +} + +data "google_artifact_registry_python_package" "with_version" { + location = google_artifact_registry_repository.python_repo.location + repository_id = google_artifact_registry_repository.python_repo.repository_id + package_name = "example_pkg:1.0.0" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` – (Required) The location of the Artifact Registry repository. + +* `repository_id` – (Required) The ID of the repository containing the Python package. + +* `package_name` – (Required) The name of the package to fetch. Can optionally include a specific version (e.g., `my_pkg:1.2.3`). If no version is provided, the latest version is used. + +* `project` – (Optional) The ID of the project that owns the repository. If not provided, the provider-level project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `id` – The fully qualified name of the fetched package. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/pythonPackages/{{package}}:{{version}} + ``` + +* `name` – The fully qualified name of the fetched package. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/pythonPackages/{{package}}:{{version}} + ``` + +* `version` – The version of the Python package. + +* `create_time` – The time the package was created. + +* `update_time` – The time the package was last updated. From 22350553354dd17b816e799d5bb05ad3daf63599 Mon Sep 17 00:00:00 2001 From: Kushal Lunkad Date: Thu, 4 Sep 2025 22:00:18 +0530 Subject: [PATCH 136/201] API implementation for fetch Resource Type of data source references (#15048) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ..._source_backup_dr_data_source_reference.go | 167 ++++++++++++++++++ ...ce_backup_dr_data_source_reference_test.go | 148 ++++++++++++++++ ...up_dr_data_source_references.html.markdown | 60 +++++++ 4 files changed, 376 insertions(+) create mode 100644 mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go create mode 100644 mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_references.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 174ccccad9cc..5bf6eaaaea97 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -47,6 +47,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_backup_dr_backup": backupdr.DataSourceGoogleCloudBackupDRBackup(), "google_backup_dr_data_source": backupdr.DataSourceGoogleCloudBackupDRDataSource(), "google_backup_dr_backup_vault": backupdr.DataSourceGoogleCloudBackupDRBackupVault(), + "google_backup_dr_data_source_references": backupdr.DataSourceGoogleCloudBackupDRDataSourceReferences(), "google_beyondcorp_app_connection": beyondcorp.DataSourceGoogleBeyondcorpAppConnection(), "google_beyondcorp_app_connector": beyondcorp.DataSourceGoogleBeyondcorpAppConnector(), "google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(), diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go new file mode 100644 index 000000000000..b541fa7e44c6 --- /dev/null +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go @@ -0,0 +1,167 @@ +package backupdr + +import ( + "fmt" + "strconv" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleCloudBackupDRDataSourceReferences() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleCloudBackupDRDataSourceReferencesRead, + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + Description: "The location to list the data source references from.", + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The ID of the project in which the resource belongs.", + }, + "resource_type": { + Type: schema.TypeString, + Required: true, + Description: `The resource type to get the data source references for. Examples include, "compute.googleapis.com/Instance", "sqladmin.googleapis.com/Instance".`, + }, + + // Output: a computed list of the data source references found + "data_source_references": { + Type: schema.TypeList, + Computed: true, + Description: "A list of the data source references found.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "data_source": { + Type: schema.TypeString, + Computed: true, + Description: "The underlying data source resource.", + }, + "gcp_resource_name": { + Type: schema.TypeString, + Computed: true, + Description: "The GCP resource name for the data source.", + }, + "backup_config_state": { + Type: schema.TypeString, + Computed: true, + Description: "The state of the backup config for the data source.", + }, + "backup_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The number of backups for the data source.", + }, + "last_backup_state": { + Type: schema.TypeString, + Computed: true, + Description: "The state of the last backup.", + }, + "last_successful_backup_time": { + Type: schema.TypeString, + Computed: true, + Description: "The last time a successful backup was made.", + }, + "resource_type": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceGoogleCloudBackupDRDataSourceReferencesRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + location := d.Get("location").(string) + resourceType := d.Get("resource_type").(string) + + url := fmt.Sprintf("%sprojects/%s/locations/%s/dataSourceReferences:fetchForResourceType?resourceType=%s", config.BackupDRBasePath, project, location, resourceType) + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return fmt.Errorf("Error reading DataSourceReferences: %s", err) + } + + items, ok := res["dataSourceReferences"].([]interface{}) + if !ok { + items = make([]interface{}, 0) + } + + flattenedDataSourceReferences, err := flattenDataSourceReferences(items) + if err != nil { + return err + } + + if err := d.Set("data_source_references", flattenedDataSourceReferences); err != nil { + return fmt.Errorf("Error setting data_source_references: %s", err) + } + + d.SetId(url) + + return nil +} + +func flattenDataSourceReferences(items []interface{}) ([]map[string]interface{}, error) { + references := make([]map[string]interface{}, 0, len(items)) + for _, item := range items { + data, ok := item.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("cannot cast item to map[string]interface{}") + } + + ref := map[string]interface{}{ + "name": data["name"], + "data_source": data["dataSource"], + "backup_config_state": data["dataSourceBackupConfigState"], + } + + // The API returns backup count as a string, so we parse it to an integer. + if v, ok := data["dataSourceBackupCount"].(string); ok { + if i, err := strconv.Atoi(v); err == nil { + ref["backup_count"] = i + } + } + + // Flatten the nested dataSourceBackupConfigInfo object. + if configInfo, ok := data["dataSourceBackupConfigInfo"].(map[string]interface{}); ok { + ref["last_backup_state"] = configInfo["lastBackupState"] + ref["last_successful_backup_time"] = configInfo["lastSuccessfulBackupConsistencyTime"] + } + + if resourceInfo, ok := data["dataSourceGcpResourceInfo"].(map[string]interface{}); ok { + ref["gcp_resource_name"] = resourceInfo["gcpResourcename"] + ref["resource_type"] = resourceInfo["type"] + } + + references = append(references, ref) + } + return references, nil +} diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go new file mode 100644 index 000000000000..a3c4e2175b75 --- /dev/null +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go @@ -0,0 +1,148 @@ +package backupdr_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceGoogleBackupDRDataSourceReferences_basic(t *testing.T) { + t.Parallel() + + projectDsName := "data.google_project.project" + var projectID string + context := map[string]interface{}{ + "location": "us-central1", + "resource_type": "sqladmin.googleapis.com/Instance", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBackupDRDataSourceReferences_basic(context), + Check: func(s *terraform.State) error { + // Extract project ID from the project data source in the state + project, ok := s.RootModule().Resources[projectDsName] + if !ok { + return fmt.Errorf("project data source not found: %s", projectDsName) + } + projectID = project.Primary.Attributes["project_id"] + + return resource.ComposeTestCheckFunc( + // Basic attribute checks + resource.TestCheckResourceAttr("data.google_backup_dr_data_source_references.default", "project", projectID), + resource.TestCheckResourceAttr("data.google_backup_dr_data_source_references.default", "location", context["location"].(string)), + resource.TestCheckResourceAttr("data.google_backup_dr_data_source_references.default", "resource_type", context["resource_type"].(string)), + + // Check that the list itself is populated + resource.TestCheckResourceAttrSet("data.google_backup_dr_data_source_references.default", "data_source_references.#"), + + // Checks for existing and new fields within the list + resource.TestCheckResourceAttrSet("data.google_backup_dr_data_source_references.default", "data_source_references.0.name"), + resource.TestCheckResourceAttrSet("data.google_backup_dr_data_source_references.default", "data_source_references.0.data_source"), + resource.TestCheckResourceAttrSet("data.google_backup_dr_data_source_references.default", "data_source_references.0.backup_config_state"), + resource.TestCheckResourceAttrSet("data.google_backup_dr_data_source_references.default", "data_source_references.0.gcp_resource_name"), + resource.TestCheckResourceAttrSet("data.google_backup_dr_data_source_references.default", "data_source_references.0.resource_type"), + )(s) + }, + }, + }, + }) +} + +func testAccDataSourceGoogleBackupDRDataSourceReferences_basic(context map[string]interface{}) string { + return acctest.Nprintf(` + + +data "google_project" "project" {} + + + resource "google_service_account" "default" { + account_id = "tf-test-my-custom-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + + +resource "google_sql_database_instance" "instance" { + name = "default-%{random_suffix}" + database_version = "MYSQL_8_0" + region = "us-central1" + deletion_protection = false + settings { + tier = "db-f1-micro" + availability_type = "ZONAL" + activation_policy = "ALWAYS" + } +} + + +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a second backup vault built by Terraform." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + + +resource "google_backup_dr_backup_plan" "foo" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 2 + + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + + + backup_window { + start_hour_of_day = 12 + end_hour_of_day = 18 + } + } + } +} + + +resource "google_backup_dr_backup_plan_association" "bpa" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = "projects/${data.google_project.project.project_id}/instances/${google_sql_database_instance.instance.name}" + resource_type= "sqladmin.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.foo.name + depends_on = [ google_sql_database_instance.instance ] +} + + +data "google_backup_dr_data_source_references" "default" { + project = data.google_project.project.project_id + location = "%{location}" + resource_type = "%{resource_type}" + depends_on= [ google_backup_dr_backup_plan_association.bpa ] + } +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_references.html.markdown b/mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_references.html.markdown new file mode 100644 index 000000000000..cec4edb86c3a --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_references.html.markdown @@ -0,0 +1,60 @@ +--- +subcategory: "Backup and DR Service" +description: |- + Get information about Backup and DR data source references. +--- + +# google_backup_dr_data_source_references + +A list of Backup and DR data source references. + +~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. + +## Example Usage + +```hcl +data "google_backup_dr_data_source_references" "my_sql_references" { + location = "us-central1" + resource_type = "sqladmin.googleapis.com/Instance" +} + +output "first_sql_reference_name" { + name = data.google_backup_dr_data_source_references.my_sql_references.data_source_references[0].name +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location `- (Required) The location of the data source references. + +* `resource_type` - (Required) The resource type to get the data source references for. Examples include, "sqladmin.googleapis.com/Instance" , "compute.googleapis.com/Instance" (right now this service not available for compute Instances , it will be added soon ) + +* `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. + + +## Attributes Reference + +In addition to the arguments listed above, the following attributes are exported: + +* data\_source\_references - A list of the data source references found. Each element of this list has the following attributes: + + 1. `name`- The full name of the data source reference. + + 2. `data_source`- The underlying data source resource. + + 3. `backup_config_state`- The state of the backup config for the data source. + + 4. `backup_count`- The number of backups for the data source. + + 5. `last_backup_state`- The state of the last backup. + + 6. `last_successful_backup_time`- The last time a successful backup was made. + + 7. `gcp_resource_name`- The GCP resource name for the data source. + + 8. `resource_type`- The type of the referenced resource. + +See [google_backup_dr_data_source_references](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/backup_dr_data_source_references) resource for details of the available attributes. From e0af072a428aeacce51b4441e16896f649ad16c6 Mon Sep 17 00:00:00 2001 From: "Laurenz K." <45950275+laurenz-k@users.noreply.github.com> Date: Thu, 4 Sep 2025 19:55:27 +0200 Subject: [PATCH 137/201] Add support for `networksecurity.googleapis.com/BackendAuthenticationConfig` to TGC cai2hcl (#15044) --- mmv1/third_party/cai2hcl/convert_test.go | 1 + mmv1/third_party/cai2hcl/converter_map.go | 6 +- .../backend_authentication_config.go | 94 +++++++++ .../backend_authentication_config_test.go | 13 ++ .../networksecurity/server_tls_policy.go | 14 -- .../backend_authentication_config.json | 195 ++++++++++++++++++ .../testdata/backend_authentication_config.tf | 75 +++++++ .../cai2hcl/services/networksecurity/utils.go | 16 ++ 8 files changed, 398 insertions(+), 16 deletions(-) create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config_test.go create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.json create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.tf create mode 100644 mmv1/third_party/cai2hcl/services/networksecurity/utils.go diff --git a/mmv1/third_party/cai2hcl/convert_test.go b/mmv1/third_party/cai2hcl/convert_test.go index 0d8fc7bac6bb..84092e17fe62 100644 --- a/mmv1/third_party/cai2hcl/convert_test.go +++ b/mmv1/third_party/cai2hcl/convert_test.go @@ -29,5 +29,6 @@ func TestConvertNetworksecurity(t *testing.T) { "./services/networksecurity/testdata", []string{ "server_tls_policy", + "backend_authentication_config", }) } diff --git a/mmv1/third_party/cai2hcl/converter_map.go b/mmv1/third_party/cai2hcl/converter_map.go index 81f909ff4e24..7998c8e8f8c5 100644 --- a/mmv1/third_party/cai2hcl/converter_map.go +++ b/mmv1/third_party/cai2hcl/converter_map.go @@ -24,7 +24,8 @@ var AssetTypeToConverter = map[string]string{ resourcemanager.ProjectAssetType: "google_project", resourcemanager.ProjectBillingAssetType: "google_project", - networksecurity.ServerTLSPolicyAssetType: "google_network_security_server_tls_policy", + networksecurity.ServerTLSPolicyAssetType: "google_network_security_server_tls_policy", + networksecurity.BackendAuthenticationConfigAssetType: "google_network_security_backend_authentication_config", } // ConverterMap is a collection of converters instances, indexed by name. @@ -39,5 +40,6 @@ var ConverterMap = map[string]common.Converter{ "google_project": resourcemanager.NewProjectConverter(provider), - "google_network_security_server_tls_policy": networksecurity.NewServerTLSPolicyConverter(provider), + "google_network_security_server_tls_policy": networksecurity.NewServerTLSPolicyConverter(provider), + "google_network_security_backend_authentication_config": networksecurity.NewBackendAuthenticationConfigConverter(provider), } diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go b/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go new file mode 100644 index 000000000000..9590dbf3dd41 --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go @@ -0,0 +1,94 @@ +package networksecurity + +import ( + "errors" + "fmt" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/common" + "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/caiasset" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + netsecapi "google.golang.org/api/networksecurity/v1" +) + +// BackendAuthenticationConfigAssetType is the CAI asset type name. +const BackendAuthenticationConfigAssetType string = "networksecurity.googleapis.com/BackendAuthenticationConfig" + +// BackendAuthenticationConfigSchemaName is the TF resource schema name. +const BackendAuthenticationConfigSchemaName string = "google_network_security_backend_authentication_config" + +// BackendAuthenticationConfigConverter for networksecurity backend authentication config resource. +type BackendAuthenticationConfigConverter struct { + name string + schema map[string]*schema.Schema +} + +// NewBackendAuthenticationConfigConverter returns an HCL converter. +func NewBackendAuthenticationConfigConverter(provider *schema.Provider) common.Converter { + schema := provider.ResourcesMap[BackendAuthenticationConfigSchemaName].Schema + + return &BackendAuthenticationConfigConverter{ + name: BackendAuthenticationConfigSchemaName, + schema: schema, + } +} + +// Convert converts CAI assets to HCL resource blocks (Provider version: 7.0.1) +func (c *BackendAuthenticationConfigConverter) Convert(assets []*caiasset.Asset) ([]*common.HCLResourceBlock, error) { + var blocks []*common.HCLResourceBlock + var err error + + for _, asset := range assets { + if asset == nil { + continue + } else if asset.Resource == nil || asset.Resource.Data == nil { + return nil, fmt.Errorf("INVALID_ARGUMENT: Asset resource data is nil") + } else if asset.Type != BackendAuthenticationConfigAssetType { + return nil, fmt.Errorf("INVALID_ARGUMENT: Expected asset of type %s, but received %s", BackendAuthenticationConfigAssetType, asset.Type) + } + block, errConvert := c.convertResourceData(asset) + blocks = append(blocks, block) + if errConvert != nil { + err = errors.Join(err, errConvert) + } + } + return blocks, err +} + +func (c *BackendAuthenticationConfigConverter) convertResourceData(asset *caiasset.Asset) (*common.HCLResourceBlock, error) { + if asset == nil || asset.Resource == nil || asset.Resource.Data == nil { + return nil, fmt.Errorf("INVALID_ARGUMENT: Asset resource data is nil") + } + + hcl, _ := flattenBackendAuthenticationConfig(asset.Resource) + + ctyVal, err := common.MapToCtyValWithSchema(hcl, c.schema) + if err != nil { + return nil, err + } + + resourceName := hcl["name"].(string) + return &common.HCLResourceBlock{ + Labels: []string{c.name, resourceName}, + Value: ctyVal, + }, nil +} + +func flattenBackendAuthenticationConfig(resource *caiasset.AssetResource) (map[string]any, error) { + result := make(map[string]any) + + var backendAuthenticationConfig *netsecapi.BackendAuthenticationConfig + if err := common.DecodeJSON(resource.Data, &backendAuthenticationConfig); err != nil { + return nil, err + } + + result["name"] = flattenName(backendAuthenticationConfig.Name) + result["labels"] = backendAuthenticationConfig.Labels + result["description"] = backendAuthenticationConfig.Description + result["client_certificate"] = backendAuthenticationConfig.ClientCertificate + result["trust_config"] = backendAuthenticationConfig.TrustConfig + result["well_known_roots"] = backendAuthenticationConfig.WellKnownRoots + result["project"] = flattenProjectName(backendAuthenticationConfig.Name) + + result["location"] = resource.Location + + return result, nil +} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config_test.go b/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config_test.go new file mode 100644 index 000000000000..a5518b621cca --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config_test.go @@ -0,0 +1,13 @@ +package networksecurity_test + +import ( + cai2hcl_testing "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/testing" + "testing" +) + +func TestBackendAuthenticationConfig(t *testing.T) { + cai2hcl_testing.AssertTestFiles( + t, + "./testdata", + []string{"backend_authentication_config"}) +} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go index 23b476b006f4..10c0969c0bc0 100644 --- a/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go +++ b/mmv1/third_party/cai2hcl/services/networksecurity/server_tls_policy.go @@ -7,7 +7,6 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/caiasset" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" netsecapi "google.golang.org/api/networksecurity/v1" - "strings" ) // ServerTLSPolicyAssetType is the CAI asset type name. @@ -94,11 +93,6 @@ func flattenServerTLSPolicy(resource *caiasset.AssetResource) (map[string]any, e return result, nil } -func flattenName(name string) string { - tokens := strings.Split(name, "/") - return tokens[len(tokens)-1] -} - func flattenServerCertificate(certificate *netsecapi.GoogleCloudNetworksecurityV1CertificateProvider) []map[string]any { if certificate == nil { return nil @@ -163,11 +157,3 @@ func flattenClientValidationCA(cas []*netsecapi.ValidationCA) []map[string]any { return result } - -func flattenProjectName(name string) string { - tokens := strings.Split(name, "/") - if len(tokens) < 2 || tokens[0] != "projects" { - return "" - } - return tokens[1] -} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.json b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.json new file mode 100644 index 000000000000..f282f25f93e3 --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.json @@ -0,0 +1,195 @@ +[ + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test1", + "resource": { + "data": { + "createTime": "2025-09-01T12:06:59.449575828Z", + "etag": "tyOJj9L43CxYuKifz5lEwq4SkQqs5426-4H7BCgyUMw", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test1", + "updateTime": "2025-09-01T12:07:03.557427913Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:07:03.557427913Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test2", + "resource": { + "data": { + "createTime": "2025-09-01T12:22:50.489447184Z", + "etag": "hI87OGITW_38twEfrG1qMbgXTjulOs0PvGVm5zgpNfQ", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test2", + "trustConfig": "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9", + "updateTime": "2025-09-01T12:22:56.430273835Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:22:56.430273835Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test3", + "resource": { + "data": { + "clientCertificate": "projects/ccm-breakit/locations/global/certificates/anatolisaukhin-27101", + "createTime": "2025-09-01T12:23:21.187162159Z", + "etag": "TbNENVDPeneynkqLnTmLvn757xA-GnuI_XTsk2F00y0", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test3", + "updateTime": "2025-09-01T12:23:25.982840761Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:23:25.982840761Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test4", + "resource": { + "data": { + "clientCertificate": "projects/ccm-breakit/locations/global/certificates/anatolisaukhin-27101", + "createTime": "2025-09-01T12:23:59.175425527Z", + "etag": "MtMLKQSPwOIjp25H_ndWUe9zKCcVvtHdoHxv5XvBvHU", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test4", + "trustConfig": "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9", + "updateTime": "2025-09-01T12:24:09.189436548Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:24:09.189436548Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test5", + "resource": { + "data": { + "createTime": "2025-09-01T12:24:38.165237290Z", + "etag": "q-3pJ_Ae7LorXoNfPMbuxSiwH4JiS4KMlk6Ojm50qbo", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test5", + "trustConfig": "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9", + "updateTime": "2025-09-01T12:24:42.574599551Z", + "wellKnownRoots": "NONE" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:24:42.574599551Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test7", + "resource": { + "data": { + "clientCertificate": "projects/ccm-breakit/locations/global/certificates/anatolisaukhin-27101", + "createTime": "2025-09-01T12:25:29.338526364Z", + "etag": "5dYcNBll7z2KaHuJd2nxr9Qp4U1JPuPBzJtFkVdRO_k", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test7", + "trustConfig": "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9", + "updateTime": "2025-09-01T12:25:36.419856961Z", + "wellKnownRoots": "NONE" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:25:36.419856961Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test8", + "resource": { + "data": { + "createTime": "2025-09-01T12:28:43.012225935Z", + "description": "My test description", + "etag": "jAgYExhvS1-odwm8v6WzKxXcWqnMgOqyQNxz0LpLzcE", + "labels": { + "foo": "bar" + }, + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test8", + "updateTime": "2025-09-01T12:28:48.571512164Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:28:48.571512164Z" + }, + + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/europe-west1/backendAuthenticationConfigs/laurenzk-test9", + "resource": { + "data": { + "createTime": "2025-09-01T12:37:43.341940613Z", + "etag": "DQgzWLri0AvaD72f8Xk5SBtT6nEoH4B3krtcsjS7V2A", + "name": "projects/ccm-breakit/locations/europe-west1/backendAuthenticationConfigs/laurenzk-test9", + "updateTime": "2025-09-01T12:37:43.402977101Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "europe-west1", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:37:43.402977101Z" + }, + { + "ancestors": ["projects/307841421122"], + "asset_type": "networksecurity.googleapis.com/BackendAuthenticationConfig", + "name": "//networksecurity.googleapis.com/projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test10", + "resource": { + "data": { + "createTime": "2025-09-01T12:28:42.936655254Z", + "etag": "7hxmlYo8sLeaML92DAHdBZGpNDehxn6ksGJsTQDXcHE", + "name": "projects/ccm-breakit/locations/global/backendAuthenticationConfigs/laurenzk-test10", + "updateTime": "2025-09-01T12:28:46.761498301Z", + "wellKnownRoots": "PUBLIC_ROOTS" + }, + "discovery_document_uri": "https://networksecurity.googleapis.com/$discovery/rest", + "discovery_name": "BackendAuthenticationConfig", + "location": "global", + "parent": "//cloudresourcemanager.googleapis.com/projects/307841421122", + "version": "v1" + }, + "updateTime": "2025-09-01T12:28:46.761498301Z" + } +] \ No newline at end of file diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.tf b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.tf new file mode 100644 index 000000000000..c70539f3ae4b --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/testdata/backend_authentication_config.tf @@ -0,0 +1,75 @@ +resource "google_network_security_backend_authentication_config" "laurenzk-test1" { + location = "global" + name = "laurenzk-test1" + project = "ccm-breakit" + well_known_roots = "PUBLIC_ROOTS" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test2" { + location = "global" + name = "laurenzk-test2" + project = "ccm-breakit" + trust_config = "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9" + well_known_roots = "PUBLIC_ROOTS" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test3" { + client_certificate = "projects/ccm-breakit/locations/global/certificates/anatolisaukhin-27101" + location = "global" + name = "laurenzk-test3" + project = "ccm-breakit" + well_known_roots = "PUBLIC_ROOTS" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test4" { + client_certificate = "projects/ccm-breakit/locations/global/certificates/anatolisaukhin-27101" + location = "global" + name = "laurenzk-test4" + project = "ccm-breakit" + trust_config = "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9" + well_known_roots = "PUBLIC_ROOTS" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test5" { + location = "global" + name = "laurenzk-test5" + project = "ccm-breakit" + trust_config = "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9" + well_known_roots = "NONE" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test7" { + client_certificate = "projects/ccm-breakit/locations/global/certificates/anatolisaukhin-27101" + location = "global" + name = "laurenzk-test7" + project = "ccm-breakit" + trust_config = "projects/ccm-breakit/locations/global/trustConfigs/id-2de0d4b7-89cf-476f-893d-4567b3791ca9" + well_known_roots = "NONE" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test8" { + description = "My test description" + + labels = { + foo = "bar" + } + + location = "global" + name = "laurenzk-test8" + project = "ccm-breakit" + well_known_roots = "PUBLIC_ROOTS" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test9" { + location = "europe-west1" + name = "laurenzk-test9" + project = "ccm-breakit" + well_known_roots = "PUBLIC_ROOTS" +} + +resource "google_network_security_backend_authentication_config" "laurenzk-test10" { + location = "global" + name = "laurenzk-test10" + project = "ccm-breakit" + well_known_roots = "PUBLIC_ROOTS" +} diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/utils.go b/mmv1/third_party/cai2hcl/services/networksecurity/utils.go new file mode 100644 index 000000000000..97a4b53ffaed --- /dev/null +++ b/mmv1/third_party/cai2hcl/services/networksecurity/utils.go @@ -0,0 +1,16 @@ +package networksecurity + +import "strings" + +func flattenName(name string) string { + tokens := strings.Split(name, "/") + return tokens[len(tokens)-1] +} + +func flattenProjectName(name string) string { + tokens := strings.Split(name, "/") + if len(tokens) < 2 || tokens[0] != "projects" { + return "" + } + return tokens[1] +} From 5a0cf38c5279b2ca96d7473918cd4ab23beaa79c Mon Sep 17 00:00:00 2001 From: kminsu-google <87674088+kminsu-google@users.noreply.github.com> Date: Thu, 4 Sep 2025 18:01:46 +0000 Subject: [PATCH 138/201] Update launch stage for GPU on WorkerPools from ALPHA to BETA (#15058) --- .../terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl index fe3c2dab66e7..9a737ed6a9a2 100644 --- a/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl +++ b/mmv1/templates/terraform/examples/cloudrunv2_worker_pool_gpu.tf.tmpl @@ -2,7 +2,7 @@ resource "google_cloud_run_v2_worker_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "cloud_run_worker_pool_name"}}" location = "us-central1" deletion_protection = false - launch_stage = "ALPHA" + launch_stage = "BETA" template { containers { From b0ae3d70ef686bbaf3a963745c03e48a81b20c71 Mon Sep 17 00:00:00 2001 From: Eric Pang Date: Thu, 4 Sep 2025 14:51:46 -0400 Subject: [PATCH 139/201] Update Secure Source Manager deletion policy comment (#14998) --- mmv1/products/securesourcemanager/Instance.yaml | 2 +- mmv1/products/securesourcemanager/Repository.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/mmv1/products/securesourcemanager/Instance.yaml b/mmv1/products/securesourcemanager/Instance.yaml index b8078c6d244d..8a721289143f 100644 --- a/mmv1/products/securesourcemanager/Instance.yaml +++ b/mmv1/products/securesourcemanager/Instance.yaml @@ -188,7 +188,7 @@ virtual_fields: to be abandoned, rather than deleted. Setting `DELETE` deletes the resource and all its contents. Setting `PREVENT` prevents the resource from accidental deletion by erroring out during plan. - Default is `DELETE`. Possible values are: + Default is `PREVENT`. Possible values are: * DELETE * PREVENT * ABANDON diff --git a/mmv1/products/securesourcemanager/Repository.yaml b/mmv1/products/securesourcemanager/Repository.yaml index 49b481949da9..9532488b3a7c 100644 --- a/mmv1/products/securesourcemanager/Repository.yaml +++ b/mmv1/products/securesourcemanager/Repository.yaml @@ -98,7 +98,7 @@ virtual_fields: to be abandoned, rather than deleted. Setting `DELETE` deletes the resource and all its contents. Setting `PREVENT` prevents the resource from accidental deletion by erroring out during plan. - Default is `DELETE`. Possible values are: + Default is `PREVENT`. Possible values are: * DELETE * PREVENT * ABANDON From defbc8d07bc1c0e896471cb81036571b1f1c6870 Mon Sep 17 00:00:00 2001 From: Jiongxin Ye <48576162+JessieYee@users.noreply.github.com> Date: Thu, 4 Sep 2025 12:20:56 -0700 Subject: [PATCH 140/201] add final_backup_config field to google_sql_database_instance (#14891) --- .../resource_sql_database_instance.go.tmpl | 62 +++++++++++++++- ...esource_sql_database_instance_test.go.tmpl | 73 +++++++++++++++++++ .../r/sql_database_instance.html.markdown | 8 ++ 3 files changed, 142 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl index 6abe0b4d3d4e..70289f8b647d 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance.go.tmpl @@ -172,6 +172,12 @@ func ResourceSqlDatabaseInstance() *schema.Resource { Optional: true, Description: `Used to block Terraform from deleting a SQL Instance. Defaults to true.`, }, + "final_backup_description": { + Type: schema.TypeString, + Optional: true, + Description: `The description of final backup if instance enable create final backup during instance deletion. `, + + }, "settings": { Type: schema.TypeList, Optional: true, @@ -792,6 +798,28 @@ API (for read pools, effective_availability_type may differ from availability_ty Optional: true, Description: `When this parameter is set to true, Cloud SQL retains backups of the instance even after the instance is deleted. The ON_DEMAND backup will be retained until customer deletes the backup or the project. The AUTOMATED backup will be retained based on the backups retention setting.`, }, + "final_backup_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Optional: true, + Description: `When this parameter is set to true, the final backup is enabled for the instance`, + }, + "retention_days": { + Type: schema.TypeInt, + Optional: true, + ValidateFunc: validation.IntBetween(1, 36135), + Description: `The number of days to retain the final backup after the instance deletion. The valid range is between 1 and 365. For instances managed by BackupDR, the valid range is between 1 day and 99 years. The final backup will be purged at (time_of_instance_deletion + retention_days).`, + }, + }, + }, + Description: `Config used to determine the final backup settings for the instance`, + + }, }, }, Description: `The settings to use for the database. The configuration is detailed below.`, @@ -1507,6 +1535,7 @@ func expandSqlDatabaseInstanceSettings(configured []interface{}, databaseVersion EnableGoogleMlIntegration: _settings["enable_google_ml_integration"].(bool), EnableDataplexIntegration: _settings["enable_dataplex_integration"].(bool), RetainBackupsOnDelete: _settings["retain_backups_on_delete"].(bool), + FinalBackupConfig: expandFinalBackupConfig(_settings["final_backup_config"].([]interface{})), UserLabels: tpgresource.ConvertStringMap(_settings["user_labels"].(map[string]interface{})), BackupConfiguration: expandBackupConfiguration(_settings["backup_configuration"].([]interface{})), DatabaseFlags: expandDatabaseFlags(_settings["database_flags"].(*schema.Set).List()), @@ -1749,6 +1778,19 @@ func expandBackupConfiguration(configured []interface{}) *sqladmin.BackupConfigu } } +func expandFinalBackupConfig(configured []interface{}) *sqladmin.FinalBackupConfig{ + if len(configured) == 0 || configured[0] == nil { + return nil + } + + _finalBackupConfig := configured[0].(map[string]interface{}) + return &sqladmin.FinalBackupConfig{ + Enabled: _finalBackupConfig["enabled"].(bool), + RetentionDays: int64(_finalBackupConfig["retention_days"].(int)), + ForceSendFields: []string{"Enabled"}, + } +} + func expandBackupRetentionSettings(configured interface{}) *sqladmin.BackupRetentionSettings { l := configured.([]interface{}) if len(l) == 0 { @@ -2377,9 +2419,13 @@ func resourceSqlDatabaseInstanceDelete(d *schema.ResourceData, meta interface{}) } var op *sqladmin.Operation + finalBackupDescription := "" + if v, ok := d.GetOk("finalBackupDescription"); ok { + finalBackupDescription = v.(string) + } err = transport_tpg.Retry(transport_tpg.RetryOptions{ RetryFunc: func() (rerr error) { - op, rerr = config.NewSqlAdminClient(userAgent).Instances.Delete(project, d.Get("name").(string)).Do() + op, rerr = config.NewSqlAdminClient(userAgent).Instances.Delete(project, d.Get("name").(string)).FinalBackupDescription(finalBackupDescription).Do() if rerr != nil { return rerr } @@ -2443,6 +2489,7 @@ func flattenSettings(settings *sqladmin.Settings, iType string, d *schema.Resour "time_zone": settings.TimeZone, "deletion_protection_enabled": settings.DeletionProtectionEnabled, "retain_backups_on_delete": settings.RetainBackupsOnDelete, + "final_backup_config": settings.FinalBackupConfig, } if data["availability_type"] == "" { @@ -2473,6 +2520,10 @@ func flattenSettings(settings *sqladmin.Settings, iType string, d *schema.Resour data["backup_configuration"] = flattenBackupConfiguration(settings.BackupConfiguration) } + if settings.FinalBackupConfig != nil { + data["final_backup_config"] = flattenFinalBackupConfig(settings.FinalBackupConfig) + } + if settings.DatabaseFlags != nil { data["database_flags"] = flattenDatabaseFlags(settings.DatabaseFlags) } @@ -2557,6 +2608,15 @@ func flattenBackupConfiguration(backupConfiguration *sqladmin.BackupConfiguratio return []map[string]interface{}{data} } +func flattenFinalBackupConfig(finalBackupConfig *sqladmin.FinalBackupConfig) []map[string]interface{} { + data := map[string]interface{}{ + "enabled": finalBackupConfig.Enabled, + "retention_days": finalBackupConfig.RetentionDays, + } + + return []map[string]interface{}{data} +} + func flattenBackupRetentionSettings(b *sqladmin.BackupRetentionSettings) []map[string]interface{} { if b == nil { return nil diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 5bf0eeeeace0..4e572f28ad0a 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -1700,6 +1700,38 @@ func TestAccSqlDatabaseInstance_RetainBackupOnDelete(t *testing.T) { }) } +func TestAccSqlDatabaseInstance_FinalBackupConfig(t *testing.T) { + t.Parallel() + + masterID := acctest.RandInt(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccSqlDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleSqlDatabaseInstance_FinalBackupConfig(masterID, true, 10), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "final_backup_description"}, + }, + { + Config: testGoogleSqlDatabaseInstance_FinalBackupConfig(masterID, false, -1), + }, + { + ResourceName: "google_sql_database_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "final_backup_description"}, + }, + }, + }) +} + func TestAccSqlDatabaseInstance_PointInTimeRecoveryEnabled(t *testing.T) { t.Parallel() @@ -6432,6 +6464,47 @@ resource "google_sql_database_instance" "instance" { `, masterID, retainBackupOnDelete) } +func testGoogleSqlDatabaseInstance_FinalBackupConfig(masterID int, enabled bool, retention_days int64) string { + retentionSetting := "" + if retention_days >= 0 { + retentionSetting = fmt.Sprintf(`retention_days = %d`, retention_days) + } + + finalBackupConfig := fmt.Sprintf(`final_backup_config { + enabled = %v + %v + }`, enabled, retentionSetting) + + finalBackupDescription := `""` + if enabled { + finalBackupDescription = `"Test FinalBackup Description"` + } + + return fmt.Sprintf(` +resource "google_sql_database_instance" "instance" { + name = "tf-test-%d" + region = "us-central1" + database_version = "MYSQL_8_0" + deletion_protection = false + final_backup_description = %v + + settings { + tier = "db-g1-small" + backup_configuration { + enabled = true + start_time = "00:00" + binary_log_enabled = true + transaction_log_retention_days = 2 + backup_retention_settings { + retained_backups = 4 + } + } +%v + } +} +`, masterID, finalBackupDescription, finalBackupConfig) +} + func testAccSqlDatabaseInstance_beforeBackup(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_sql_database_instance" "instance" { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index f3c424607478..2e110a6ed166 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -327,6 +327,8 @@ includes an up-to-date reference of supported versions. or `terraform destroy` that would delete the instance will fail. When the field is set to false, deleting the instance is allowed. +* `final_backup_description` - (Optional) The description of final backup. Only set this field when `final_backup_config.enabled` is true. + ~> **NOTE:** This flag only protects instances from deletion within Terraform. To protect your instances from accidental deletion across all surfaces (API, gcloud, Cloud Console and Terraform), use the API flag `settings.deletion_protection_enabled`. * `restore_backup_context` - (optional) The context needed to restore the database to a backup run. This field will @@ -395,6 +397,12 @@ The `settings` block supports: * `retain_backups_on_delete` - (Optional) When this parameter is set to true, Cloud SQL retains backups of the instance even after the instance is deleted. The `ON_DEMAND` backup will be retained until customer deletes the backup or the project. The `AUTOMATED` backup will be retained based on the backups retention setting. +The optional `final_backup_config` subblock supports: + +* `enabled` - (Optional) True if enabled final backup. + +* `retention_days` - (Optional) The number of days we retain the final backup after instance deletion. The valid range is between 1 and 365. For instances managed by BackupDR, the valid range is between 1 day and 99 years. + The optional `settings.advanced_machine_features` subblock supports: * `threads_per_core` - (Optional) The number of threads per core. The value of this flag can be 1 or 2. To disable SMT, set this flag to 1. Only available in Cloud SQL for SQL Server instances. See [smt](https://cloud.google.com/sql/docs/sqlserver/create-instance#smt-create-instance) for more details. From 983a15d9edb637381bead542f940edccd0d13581 Mon Sep 17 00:00:00 2001 From: Shuya Ma <87669292+shuyama1@users.noreply.github.com> Date: Thu, 4 Sep 2025 13:58:35 -0700 Subject: [PATCH 141/201] fix TestAccContainerCluster_withNodeConfigKubeletConfigSettingsInNodePool (#15067) --- .../resource_container_cluster_test.go.tmpl | 65 ++++++++++--------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 962bca482b11..26b33ad2d2e0 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -8537,50 +8537,51 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings_in node_pool { name = "%s" initial_node_count = 1 - machine_type = "n1-standard-1" + node_config { + machine_type = "n1-standard-1" kubelet_config { max_parallel_image_pulls = 5 - eviction_max_pod_grace_period_seconds = 200 - eviction_soft { - memory_available = "200Mi" - nodefs_available = "10%%" - nodefs_inodes_free = "20%%" - imagefs_available = "30%%" - imagefs_inodes_free = "40%%" - pid_available = "50%%" - } - eviction_soft_grace_period { - memory_available = "1m" - nodefs_available = "2s" - nodefs_inodes_free = "3m" - imagefs_available = "100s" - imagefs_inodes_free = "2m" - pid_available = "3m2.6s" - } - eviction_minimum_reclaim { - memory_available = "10%%" - nodefs_available = "8.5%%" - nodefs_inodes_free = "5.0%%" - imagefs_available = "3%%" - imagefs_inodes_free = "9%%" - pid_available = "5%%" - } + eviction_max_pod_grace_period_seconds = 200 + eviction_soft { + memory_available = "200Mi" + nodefs_available = "10%%" + nodefs_inodes_free = "20%%" + imagefs_available = "30%%" + imagefs_inodes_free = "40%%" + pid_available = "50%%" + } + eviction_soft_grace_period { + memory_available = "1m" + nodefs_available = "2s" + nodefs_inodes_free = "3m" + imagefs_available = "100s" + imagefs_inodes_free = "2m" + pid_available = "3m2.6s" + } + eviction_minimum_reclaim { + memory_available = "10%%" + nodefs_available = "8.5%%" + nodefs_inodes_free = "5.0%%" + imagefs_available = "3%%" + imagefs_inodes_free = "9%%" + pid_available = "5%%" + } } disk_size_gb = 15 disk_type = "pd-ssd" node_group = google_compute_node_group.group.name sole_tenant_config { node_affinity { - key = "compute.googleapis.com/node-group-name" - operator = "IN" - values = [google_compute_node_group.group.name] - } - min_node_cpus = 1 + key = "compute.googleapis.com/node-group-name" + operator = "IN" + values = [google_compute_node_group.group.name] + } + min_node_cpus = 1 } linux_node_config { transparent_hugepage_defrag = "%s" - transparent_hugepage_enabled = "%s" + transparent_hugepage_enabled = "%s" } } } From 44ced68fef0244e834f330ab539685d7ceeb8777 Mon Sep 17 00:00:00 2001 From: jialei-chen <147877028+jialei-chen@users.noreply.github.com> Date: Thu, 4 Sep 2025 14:43:10 -0700 Subject: [PATCH 142/201] Add a new resource google_discovery_engine_acl_config (#14875) --- mmv1/products/discoveryengine/AclConfig.yaml | 71 +++++++++++++++++++ .../discoveryengine_aclconfig_basic.tf.tmpl | 9 +++ ...source_discovery_engine_acl_config_test.go | 66 +++++++++++++++++ 3 files changed, 146 insertions(+) create mode 100644 mmv1/products/discoveryengine/AclConfig.yaml create mode 100644 mmv1/templates/terraform/examples/discoveryengine_aclconfig_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_acl_config_test.go diff --git a/mmv1/products/discoveryengine/AclConfig.yaml b/mmv1/products/discoveryengine/AclConfig.yaml new file mode 100644 index 000000000000..777306bb2009 --- /dev/null +++ b/mmv1/products/discoveryengine/AclConfig.yaml @@ -0,0 +1,71 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: 'AclConfig' +description: | + Access Control Configuration. +references: + api: 'https://cloud.google.com/generative-ai-app-builder/docs/reference/rpc/google.cloud.discoveryengine.v1alpha#aclconfigservice' +base_url: 'projects/{{project}}/locations/{{location}}/aclConfig' +self_link: 'projects/{{project}}/locations/{{location}}/aclConfig' +create_url: 'projects/{{project}}/locations/{{location}}/aclConfig' +create_verb: 'PATCH' +update_url: 'projects/{{project}}/locations/{{location}}/aclConfig' +update_verb: 'PATCH' +exclude_delete: true +import_format: + - 'projects/{{project}}/locations/{{location}}/aclConfig' +timeouts: + insert_minutes: 5 + update_minutes: 5 +examples: + - name: 'discoveryengine_aclconfig_basic' + primary_resource_id: 'basic' +parameters: + - name: 'location' + type: String + description: | + The geographic location where the data store should reside. The value can + only be one of "global", "us" and "eu". + url_param_only: true + required: true + immutable: true +properties: + - name: 'name' + type: String + description: | + The unique full resource name of the aclConfig. Values are of the format + `projects/{project}/locations/{location}/aclConfig`. + output: true + - name: 'idpConfig' + type: NestedObject + description: | + Identity provider config. + properties: + - name: 'idpType' + type: Enum + description: | + Identity provider type. + enum_values: + - 'GSUITE' + - 'THIRD_PARTY' + - name: 'externalIdpConfig' + type: NestedObject + description: | + External third party identity provider config. + properties: + - name: 'workforcePoolName' + type: String + description: | + Workforce pool name: "locations/global/workforcePools/pool_id" diff --git a/mmv1/templates/terraform/examples/discoveryengine_aclconfig_basic.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_aclconfig_basic.tf.tmpl new file mode 100644 index 000000000000..47d8b58ebeef --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_aclconfig_basic.tf.tmpl @@ -0,0 +1,9 @@ +resource "google_discovery_engine_acl_config" "basic" { + location = "global" + idp_config { + idp_type = "THIRD_PARTY" + external_idp_config { + workforce_pool_name = "locations/global/workforcePools/cloud-console-pool-manual" + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_acl_config_test.go b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_acl_config_test.go new file mode 100644 index 000000000000..c6b385d7a19b --- /dev/null +++ b/mmv1/third_party/terraform/services/discoveryengine/resource_discovery_engine_acl_config_test.go @@ -0,0 +1,66 @@ +package discoveryengine_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDiscoveryEngineAclConfig_discoveryengineAclconfigBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDiscoveryEngineAclConfig_discoveryengineAclconfigBasicExample_basic(context), + }, + { + ResourceName: "google_discovery_engine_acl_config.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + { + Config: testAccDiscoveryEngineAclConfig_discoveryengineAclconfigBasicExample_update(context), + }, + { + ResourceName: "google_discovery_engine_acl_config.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccDiscoveryEngineAclConfig_discoveryengineAclconfigBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_acl_config" "basic" { + location = "global" + idp_config { + idp_type = "THIRD_PARTY" + external_idp_config { + workforce_pool_name = "locations/global/workforcePools/cloud-console-pool-manual" + } + } +} +`, context) +} + +func testAccDiscoveryEngineAclConfig_discoveryengineAclconfigBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_discovery_engine_acl_config" "basic" { + location = "global" + idp_config { + idp_type = "GSUITE" + } +} +`, context) +} From 70c707704a4a783329a02020090902d3c08a0c48 Mon Sep 17 00:00:00 2001 From: Dixita Date: Thu, 4 Sep 2025 16:50:47 -0700 Subject: [PATCH 143/201] Add support for topology manager and memory manager (#14754) --- .../services/container/node_config.go.tmpl | 121 ++++++++++++++ .../resource_container_cluster_test.go.tmpl | 152 +++++++++++++++++- .../resource_container_node_pool_test.go.tmpl | 31 +++- .../docs/r/container_cluster.html.markdown | 21 ++- 4 files changed, 318 insertions(+), 7 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/node_config.go.tmpl b/mmv1/third_party/terraform/services/container/node_config.go.tmpl index 9525e93f6c99..86a64d852c0f 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.tmpl +++ b/mmv1/third_party/terraform/services/container/node_config.go.tmpl @@ -615,6 +615,48 @@ func schemaNodeConfig() *schema.Schema { ValidateFunc: validation.StringInSlice([]string{"static", "none", ""}, false), Description: `Control the CPU management policy on the node.`, }, + "memory_manager": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Configuration for the Memory Manager on the node. The memory manager optimizes memory and hugepages allocation for pods, especially those in the Guaranteed QoS class, by influencing NUMA affinity.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "policy": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The Memory Manager policy to use. This policy guides how memory and hugepages are allocated and managed for pods on the node, influencing NUMA affinity.`, + ValidateFunc: validation.StringInSlice([]string{"None", "Static", ""}, false), + }, + }, + }, + }, + "topology_manager": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Description: `Configuration for the Topology Manager on the node. The Topology Manager aligns CPU, memory, and device resources on a node to optimize performance, especially for NUMA-aware workloads, by ensuring resource co-location.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "policy": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The Topology Manager policy to use. This policy dictates how resource alignment is handled on the node.`, + ValidateFunc: validation.StringInSlice([]string{"none", "restricted", "single-numa-node", "best-effort", ""}, false), + + }, + "scope": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The Topology Manager scope, defining the granularity at which policy decisions are applied. Valid values are "container" (resources are aligned per container within a pod) or "pod" (resources are aligned for the entire pod).`, + ValidateFunc: validation.StringInSlice([]string{"container", "pod", ""}, false), + }, + }, + }, + }, "cpu_cfs_quota": { Type: schema.TypeBool, Optional: true, @@ -1667,6 +1709,14 @@ func expandKubeletConfig(v interface{}) *container.NodeKubeletConfig { } kConfig.EvictionSoft = evictionSoft } + + if v, ok := cfg["memory_manager"]; ok { + kConfig.MemoryManager = expandMemoryManager(v) + } + if v, ok := cfg["topology_manager"]; ok { + kConfig.TopologyManager = expandTopologyManager(v) + } + if v, ok := cfg["eviction_soft_grace_period"]; ok && len(v.([]interface{})) > 0 { es := v.([]interface{})[0].(map[string]interface{}) periods := &container.EvictionGracePeriod{} @@ -1716,6 +1766,54 @@ func expandKubeletConfig(v interface{}) *container.NodeKubeletConfig { return kConfig } +func expandTopologyManager(v interface{}) *container.TopologyManager { + if v == nil { + return nil + } + ls := v.([]interface{}) + if len(ls) == 0 { + return nil + } + if ls[0] == nil { + return &container.TopologyManager{} + } + cfg := ls[0].(map[string]interface{}) + + topologyManager := &container.TopologyManager{} + + if v, ok := cfg["policy"]; ok { + topologyManager.Policy = v.(string) + } + + if v, ok := cfg["scope"]; ok { + topologyManager.Scope = v.(string) + } + + return topologyManager +} + +func expandMemoryManager(v interface{}) *container.MemoryManager { + if v == nil { + return nil + } + ls := v.([]interface{}) + if len(ls) == 0 { + return nil + } + if ls[0] == nil { + return &container.MemoryManager{} + } + cfg := ls[0].(map[string]interface{}) + + memoryManager := &container.MemoryManager{} + + if v, ok := cfg["policy"]; ok { + memoryManager.Policy = v.(string) + } + + return memoryManager +} + func expandLinuxNodeConfig(v interface{}) *container.LinuxNodeConfig { if v == nil { return nil @@ -2364,6 +2462,8 @@ func flattenKubeletConfig(c *container.NodeKubeletConfig) []map[string]interface "cpu_cfs_quota": c.CpuCfsQuota, "cpu_cfs_quota_period": c.CpuCfsQuotaPeriod, "cpu_manager_policy": c.CpuManagerPolicy, + "memory_manager": flattenMemoryManager(c.MemoryManager), + "topology_manager": flattenTopologyManager(c.TopologyManager), "insecure_kubelet_readonly_port_enabled": flattenInsecureKubeletReadonlyPortEnabled(c), "pod_pids_limit": c.PodPidsLimit, "container_log_max_size": c.ContainerLogMaxSize, @@ -2384,6 +2484,27 @@ func flattenKubeletConfig(c *container.NodeKubeletConfig) []map[string]interface return result } +func flattenTopologyManager(c *container.TopologyManager) []map[string]interface{} { + result := []map[string]interface{}{} + if c != nil { + result = append(result, map[string]interface{}{ + "policy": c.Policy, + "scope": c.Scope, + }) + } + return result +} + +func flattenMemoryManager(c *container.MemoryManager) []map[string]interface{} { + result := []map[string]interface{}{} + if c != nil { + result = append(result, map[string]interface{}{ + "policy": c.Policy, + }) + } + return result +} + func flattenNodePoolAutoConfigNodeKubeletConfig(c *container.NodeKubeletConfig) []map[string]interface{} { result := []map[string]interface{}{} if c != nil { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl index 26b33ad2d2e0..b3c5ed3ab4bf 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.tmpl @@ -2057,6 +2057,68 @@ func TestAccContainerCluster_withNodeConfigLinuxNodeConfig(t *testing.T) { }) } +func TestAccContainerCluster_withKubeletConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withKubeletConfig(clusterName, networkName, subnetworkName, "none", "None", "best-effort", "pod"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.cpu_manager_policy", "none"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.memory_manager.0.policy", "None"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.policy", "best-effort"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.scope", "pod"), + ), + }, + { + ResourceName: "google_container_cluster.with_kubelet_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_withKubeletConfig(clusterName, networkName, subnetworkName, "static", "Static", "single-numa-node", "pod"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.cpu_manager_policy", "static"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.memory_manager.0.policy", "Static"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.policy", "single-numa-node"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.scope", "pod"), + ), + }, + { + ResourceName: "google_container_cluster.with_kubelet_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccContainerCluster_withNodeConfigFastSocket(t *testing.T) { t.Parallel() @@ -8537,7 +8599,6 @@ resource "google_container_cluster" "with_node_config_kubelet_config_settings_in node_pool { name = "%s" initial_node_count = 1 - node_config { machine_type = "n1-standard-1" kubelet_config { @@ -14795,3 +14856,92 @@ resource "google_container_cluster" "primary" { } `, clusterName, networkName, subnetworkName, unauthenticated, authenticated) } + +func TestAccContainerCluster_withKubeletResourceManagerConfig(t *testing.T) { + t.Parallel() + + clusterName := fmt.Sprintf("tf-test-cluster-%s", acctest.RandString(t, 10)) + networkName := acctest.BootstrapSharedTestNetwork(t, "gke-cluster") + subnetworkName := acctest.BootstrapSubnet(t, "gke-cluster", networkName) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccContainerCluster_withKubeletConfig(clusterName, networkName, subnetworkName, "none", "None", "best-effort", "container"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.cpu_manager_policy", "none"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.memory_manager.0.policy", "None"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.policy", "best-effort"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.scope", "container"), + ), + }, + { + ResourceName: "google_container_cluster.with_kubelet_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + { + Config: testAccContainerCluster_withKubeletConfig(clusterName, networkName, subnetworkName, "static", "Static", "single-numa-node", "container"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.cpu_manager_policy", "static"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.memory_manager.0.policy", "Static"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.policy", "single-numa-node"), + resource.TestCheckResourceAttr( + "google_container_cluster.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.scope", "container"), + ), + }, + { + ResourceName: "google_container_cluster.with_kubelet_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + +func testAccContainerCluster_withKubeletConfig(clusterName, networkName, subnetworkName, cpuManagerPolicy, memoryManagerPolicy, topologyManagerPolicy , topologyManagerScope string) string { + return fmt.Sprintf(` +resource "google_container_cluster" "with_kubelet_config" { + name = %q + location = "us-central1-a" + initial_node_count = 1 + network = %q + subnetwork = %q + deletion_protection = false + + node_config { + machine_type = "c4-standard-2" + kubelet_config { + cpu_manager_policy = %q + memory_manager { + policy = %q + } + topology_manager { + policy = %q + scope = %q + } + } + } +} +`, clusterName, networkName, subnetworkName, cpuManagerPolicy, memoryManagerPolicy, topologyManagerPolicy, topologyManagerScope) +} diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl index 571f99164a7a..017a91975d27 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.tmpl @@ -947,7 +947,7 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "static", "100ms", networkName, subnetworkName, "TRUE", "100Mi", "1m", "10m", true, true, 2048, 10, 10, 85), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "static", "None", "best-effort", "pod", "100ms", networkName, subnetworkName, "TRUE", "100Mi", "1m", "10m", true, true, 2048, 10, 10, 85), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -974,6 +974,12 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { "node_config.0.kubelet_config.0.image_minimum_gc_age", "1m"), resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", "node_config.0.kubelet_config.0.image_maximum_gc_age", "10m"), + resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.memory_manager.0.policy", "None"), + resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.policy", "best-effort"), + resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.scope", "pod"), // resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", // "node_config.0.kubelet_config.0.allowed_unsafe_sysctls.0", "kernel.shm*"), ), @@ -984,7 +990,7 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { ImportStateVerify: true, }, { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "", "", networkName, subnetworkName, "FALSE", "200Mi", "30s", "", false, true, 1024, 5, 50, 80), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "", "Static", "single-numa-node", "container", "", networkName, subnetworkName, "FALSE", "200Mi", "30s", "", false, true, 1024, 5, 50, 80), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ acctest.ExpectNoDelete(), @@ -995,6 +1001,13 @@ func TestAccContainerNodePool_withKubeletConfig(t *testing.T) { "node_config.0.kubelet_config.0.cpu_cfs_quota", "false"), resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", "node_config.0.kubelet_config.0.insecure_kubelet_readonly_port_enabled", "FALSE"), + resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.memory_manager.0.policy", "Static"), + resource.TestCheckResourceAttr("google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.policy", "single-numa-node"), + resource.TestCheckResourceAttr( + "google_container_node_pool.with_kubelet_config", + "node_config.0.kubelet_config.0.topology_manager.0.scope", "container"), ), }, { @@ -1022,7 +1035,7 @@ func TestAccContainerNodePool_withInvalidKubeletCpuManagerPolicy(t *testing.T) { CheckDestroy: testAccCheckContainerClusterDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "dontexist", "100us", networkName, subnetworkName,"TRUE", "", "", "", false, true, 1024, 2, 70, 75), + Config: testAccContainerNodePool_withKubeletConfig(cluster, np, "dontexist", "", "", "", "100us", networkName, subnetworkName,"TRUE", "", "", "", false, true, 1024, 2, 70, 75), ExpectError: regexp.MustCompile(`.*to be one of \["?static"? "?none"? "?"?\].*`), }, }, @@ -3856,7 +3869,7 @@ resource "google_container_node_pool" "with_sandbox_config" { } {{- end }} -func testAccContainerNodePool_withKubeletConfig(cluster, np, policy, period, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled, containerLogMaxSize, imageMinimumGcAge, imageMaximumGcAge string, quota, singleProcessOomKill bool, podPidsLimit, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent int) string { +func testAccContainerNodePool_withKubeletConfig(cluster, np, policy, memoryManagerPolicy, topologyManagerPolicy, topologyManagerScope, period, networkName, subnetworkName, insecureKubeletReadonlyPortEnabled, containerLogMaxSize, imageMinimumGcAge, imageMaximumGcAge string, quota, singleProcessOomKill bool, podPidsLimit, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent int) string { return fmt.Sprintf(` data "google_container_engine_versions" "central1a" { location = "us-central1-a" @@ -3880,9 +3893,17 @@ resource "google_container_node_pool" "with_kubelet_config" { cluster = google_container_cluster.cluster.name initial_node_count = 1 node_config { + machine_type = "c4-standard-2" image_type = "COS_CONTAINERD" kubelet_config { cpu_manager_policy = %q + memory_manager { + policy = %q + } + topology_manager { + policy = %q + scope = %q + } cpu_cfs_quota = %v cpu_cfs_quota_period = %q insecure_kubelet_readonly_port_enabled = "%s" @@ -3929,7 +3950,7 @@ resource "google_container_node_pool" "with_kubelet_config" { logging_variant = "DEFAULT" } } -`, cluster, networkName, subnetworkName, np, policy, quota, period, insecureKubeletReadonlyPortEnabled, podPidsLimit, containerLogMaxSize, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent, imageMinimumGcAge, imageMaximumGcAge, singleProcessOomKill) +`, cluster, networkName, subnetworkName, np, policy, memoryManagerPolicy, topologyManagerPolicy, topologyManagerScope, quota, period, insecureKubeletReadonlyPortEnabled, podPidsLimit, containerLogMaxSize, containerLogMaxFiles, imageGcLowThresholdPercent, imageGcHighThresholdPercent, imageMinimumGcAge, imageMaximumGcAge, singleProcessOomKill) } func testAccContainerNodePool_withLinuxNodeConfig(cluster, np, tcpMem, networkName, subnetworkName string) string { diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index d9f74bc0875e..9308e70ba7a1 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -1212,7 +1212,7 @@ Structure is [documented below](#nested_node_kubelet_config). The `node_kubelet_config` block supports: * `insecure_kubelet_readonly_port_enabled` - (Optional) Controls whether the kubelet read-only port is enabled. It is strongly recommended to set this to `FALSE`. Possible values: `TRUE`, `FALSE`. - + The `network_tags` block supports: * `tags` (Optional) - List of network tags applied to auto-provisioned node pools. @@ -1493,6 +1493,12 @@ such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", * `single_process_oom_kill` - (Optional) Defines whether to enable single process OOM killer. If true, the processes in the container will be OOM killed individually instead of as a group. +* `topology_manager` - (Optional) These settings control the kubelet's [Topology Manager policy](https://kubernetes.io/docs/tasks/administer-cluster/topology-manager/#topology-manager-policies), which coordinates the set of components responsible for performance optimizations related to CPU isolation, memory, and device locality. Structure is [documented below](#nested_topology_manager). + +* `memory_manager` - (Optional) Configuration for the [memory manager](https://kubernetes.io/docs/tasks/administer-cluster/memory-manager/) on the node. +The memory manager optimizes memory and hugepages allocation for pods, especially +those in the Guaranteed QoS class, by influencing NUMA affinity. Structure is [documented below](#nested_memory_manager). + * `max_parallel_image_pulls` - (Optional) Set the maximum number of image pulls in parallel. The integer must be between 2 and 5, inclusive. * `eviction_max_pod_grace_period_seconds` - (Optional) Defines the maximum allowed grace period (in seconds) to use when terminating pods in response to a soft eviction threshold being met. The integer must be positive and not exceed 300. @@ -1530,6 +1536,19 @@ such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", * `imagefs_inodes_free` - (Optional) Defines percentage of minimum reclaim for imagefs.inodesFree. The value must be a percentage no more than `"10%"`, such as `"5%"`. * `pid_available` - (Optional) Defines percentage of minimum reclaim for pid.available. The value must be a percentage no more than `"10%"`, such as `"5%"`. +The `topology_manager` block supports: + +* `policy` - (Optional) The Topology Manager policy controls resource alignment on the node and can be set to one of the following: none (default), best-effort, restricted, or single-numa-node. If unset (or set to the empty string `""`), the API will treat the field as if set to "none". +* `scope` - (Optional) The Topology Manager scope, defining the granularity at which + policy decisions are applied. Valid values are "container" (resources are aligned + per container within a pod which is set by default) or "pod" (resources are aligned for the entire pod). If unset (or set to the empty string `""`), the API will treat the field as if set to "container". + +The `memory_manager` block supports: + +* `policy` - (Optional) The [Memory + Manager](https://kubernetes.io/docs/tasks/administer-cluster/memory-manager/) + policy can be set to None (default) or Static. This policy dictates how memory alignment is handled on the node. If unset (or set to the empty string `""`), the API will treat the field as if set to "None". + The `linux_node_config` block supports: * `sysctls` - (Optional) The Linux kernel parameters to be applied to the nodes From d74edc196d16052716bebf3038fbe191c17a1d7b Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Fri, 5 Sep 2025 19:01:27 +0200 Subject: [PATCH 144/201] publicca: use `RawURLEncoding` instead of `URLEncoding` for unpadded base64 encoding (#15010) --- .../terraform/custom_flatten/publicca_b64url_mac_key.go.tmpl | 4 ++-- mmv1/third_party/terraform/verify/validation.go | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/mmv1/templates/terraform/custom_flatten/publicca_b64url_mac_key.go.tmpl b/mmv1/templates/terraform/custom_flatten/publicca_b64url_mac_key.go.tmpl index 02ed64f6491a..9ba46218de03 100644 --- a/mmv1/templates/terraform/custom_flatten/publicca_b64url_mac_key.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/publicca_b64url_mac_key.go.tmpl @@ -15,10 +15,10 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso return "" } - dec, err := base64.StdEncoding.DecodeString(v.(string)) + dec, err := base64.RawStdEncoding.DecodeString(v.(string)) if err != nil { return "" } - return base64.URLEncoding.EncodeToString(dec) + return base64.RawURLEncoding.EncodeToString(dec) } diff --git a/mmv1/third_party/terraform/verify/validation.go b/mmv1/third_party/terraform/verify/validation.go index 5f4393137f69..004a8bc0caa6 100644 --- a/mmv1/third_party/terraform/verify/validation.go +++ b/mmv1/third_party/terraform/verify/validation.go @@ -290,7 +290,7 @@ func ValidateBase64String(i interface{}, val string) ([]string, []error) { } func ValidateBase64URLString(i interface{}, val string) ([]string, []error) { - _, err := base64.URLEncoding.DecodeString(i.(string)) + _, err := base64.RawURLEncoding.DecodeString(i.(string)) if err != nil { return nil, []error{fmt.Errorf("could not decode %q as a valid base64URL value.", val)} } From ca38991aa9a6cff767e5f256cb5aee2ffc9c9513 Mon Sep 17 00:00:00 2001 From: wj-chen Date: Fri, 5 Sep 2025 10:05:50 -0700 Subject: [PATCH 145/201] Update google_bigquery_table schema change detection to take into account presence of row access policy (#15028) --- .../bigquery/resource_bigquery_table.go.tmpl | 62 ++++++- ...ource_bigquery_table_internal_test.go.tmpl | 21 ++- .../bigquery/resource_bigquery_table_test.go | 168 +++++++++++++++++- 3 files changed, 232 insertions(+), 19 deletions(-) diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl index e6abc6db1021..9427ca19fab8 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table.go.tmpl @@ -271,9 +271,38 @@ func bigQueryTableNormalizePolicyTags(val interface{}) interface{} { return val } +func bigQueryTableHasRowAccessPolicy(config *transport_tpg.Config, project, datasetId, tableId string) (bool, error) { + url := fmt.Sprintf("https://bigquery.googleapis.com/bigquery/v2/projects/%s/datasets/%s/tables/%s/rowAccessPolicies", project, datasetId, tableId) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: url, + UserAgent: config.UserAgent, + }) + + if err != nil { + return false, err + } + + if policies, ok := res["rowAccessPolicies"]; ok { + if policiesList, ok := policies.([]interface{}); ok && len(policiesList) > 0 { + log.Printf("[INFO] Table has row access policies, schema change detected dropped columns and will force the table to recreate.") + return true, nil + } + } + + return false, nil +} + +func bigQueryTableHasRowAccessPolicyFunc(config *transport_tpg.Config, project, datasetId, tableId string) func() (bool, error) { + return func() (bool, error) { + return bigQueryTableHasRowAccessPolicy(config, project, datasetId, tableId) + } +} + // Compares two existing schema implementations and decides if // it is changeable.. pairs with a force new on not changeable -func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTable bool, topLevel bool) (bool, error) { +func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTable bool, topLevel bool, hasRowAccessPolicyFunc func() (bool, error)) (bool, error) { switch old.(type) { case []interface{}: arrayOld := old.([]interface{}) @@ -314,7 +343,7 @@ func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTab continue } - isChangable, err := resourceBigQueryTableSchemaIsChangeable(mapOld[key], mapNew[key], isExternalTable, false) + isChangable, err := resourceBigQueryTableSchemaIsChangeable(mapOld[key], mapNew[key], isExternalTable, false, hasRowAccessPolicyFunc) if err != nil || !isChangable { return false, err } else if isChangable && topLevel { @@ -325,7 +354,18 @@ func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTab // in-place column dropping alongside column additions is not allowed // as of now because user intention can be ambiguous (e.g. column renaming) newColumns := len(arrayNew) - sameNameColumns - return (droppedColumns == 0) || (newColumns == 0), nil + isSchemaChangeable := (droppedColumns == 0) || (newColumns == 0) + if isSchemaChangeable && topLevel { + hasRowAccessPolicy, err := hasRowAccessPolicyFunc() + if err != nil { + // Default behavior when we can't get row access policies data. + return isSchemaChangeable, nil + } + if hasRowAccessPolicy { + isSchemaChangeable = false + } + } + return isSchemaChangeable, nil case map[string]interface{}: objectOld := old.(map[string]interface{}) objectNew, ok := new.(map[string]interface{}) @@ -371,7 +411,7 @@ func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTab return false, nil } case "fields": - return resourceBigQueryTableSchemaIsChangeable(valOld, valNew, isExternalTable, false) + return resourceBigQueryTableSchemaIsChangeable(valOld, valNew, isExternalTable, false, hasRowAccessPolicyFunc) // other parameters: description, policyTags and // policyTags.names[] are changeable @@ -388,7 +428,7 @@ func resourceBigQueryTableSchemaIsChangeable(old, new interface{}, isExternalTab } } -func resourceBigQueryTableSchemaCustomizeDiffFunc(d tpgresource.TerraformResourceDiff) error { +func resourceBigQueryTableSchemaCustomizeDiffFunc(d tpgresource.TerraformResourceDiff, hasRowAccessPolicyFunc func() (bool, error)) error { if _, hasSchema := d.GetOk("schema"); hasSchema { oldSchema, newSchema := d.GetChange("schema") oldSchemaText := oldSchema.(string) @@ -411,7 +451,7 @@ func resourceBigQueryTableSchemaCustomizeDiffFunc(d tpgresource.TerraformResourc log.Printf("[DEBUG] unable to unmarshal json customized diff - %v", err) } _, isExternalTable := d.GetOk("external_data_configuration") - isChangeable, err := resourceBigQueryTableSchemaIsChangeable(old, new, isExternalTable, true) + isChangeable, err := resourceBigQueryTableSchemaIsChangeable(old, new, isExternalTable, true, hasRowAccessPolicyFunc) if err != nil { return err } @@ -426,7 +466,15 @@ func resourceBigQueryTableSchemaCustomizeDiffFunc(d tpgresource.TerraformResourc } func resourceBigQueryTableSchemaCustomizeDiff(_ context.Context, d *schema.ResourceDiff, meta interface{}) error { - return resourceBigQueryTableSchemaCustomizeDiffFunc(d) + config := meta.(*transport_tpg.Config) + project, err := tpgresource.GetProjectFromDiff(d, config) + if err != nil { + return err + } + datasetId := d.Get("dataset_id").(string) + tableId := d.Get("table_id").(string) + hasRowAccessPolicyFunc := bigQueryTableHasRowAccessPolicyFunc(config, project, datasetId, tableId) + return resourceBigQueryTableSchemaCustomizeDiffFunc(d, hasRowAccessPolicyFunc) } func validateBigQueryTableSchema(v interface{}, k string) (warnings []string, errs []error) { diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_internal_test.go.tmpl b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_internal_test.go.tmpl index 0771224f6cfc..fe403abc987f 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_internal_test.go.tmpl +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_internal_test.go.tmpl @@ -411,13 +411,6 @@ func (testcase *testUnitBigQueryDataTableJSONChangeableTestCase) check(t *testin if err := json.Unmarshal([]byte(testcase.jsonNew), &new); err != nil { t.Fatalf("unable to unmarshal json - %v", err) } - changeable, err := resourceBigQueryTableSchemaIsChangeable(old, new, testcase.isExternalTable, true) - if err != nil { - t.Errorf("%s failed unexpectedly: %s", testcase.name, err) - } - if changeable != testcase.changeable { - t.Errorf("expected changeable result of %v but got %v for testcase %s", testcase.changeable, changeable, testcase.name) - } d := &tpgresource.ResourceDiffMock{ Before: map[string]interface{}{}, @@ -432,7 +425,19 @@ func (testcase *testUnitBigQueryDataTableJSONChangeableTestCase) check(t *testin d.After["external_data_configuration"] = "" } - err = resourceBigQueryTableSchemaCustomizeDiffFunc(d) + hasRowAccessPolicyFunc := func() (bool, error) { + return false, nil + } + + changeable, err := resourceBigQueryTableSchemaIsChangeable(old, new, testcase.isExternalTable, true, hasRowAccessPolicyFunc) + if err != nil { + t.Errorf("%s failed unexpectedly: %s", testcase.name, err) + } + if changeable != testcase.changeable { + t.Errorf("expected changeable result of %v but got %v for testcase %s", testcase.changeable, changeable, testcase.name) + } + + err = resourceBigQueryTableSchemaCustomizeDiffFunc(d, hasRowAccessPolicyFunc) if err != nil { t.Errorf("error on testcase %s - %v", testcase.name, err) } diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go index 4111b60d77e9..d2d8b540bf29 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_table_test.go @@ -2,14 +2,13 @@ package bigquery_test import ( "fmt" - "regexp" - "strings" - "testing" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" "github.com/hashicorp/terraform-provider-google/google/envvar" + "regexp" + "strings" + "testing" ) func TestAccBigQueryTable_Basic(t *testing.T) { @@ -1803,7 +1802,74 @@ func TestAccBigQueryTable_invalidSchemas(t *testing.T) { }) } +func TestAccBigQueryTable_schemaColumnDropWithRowAccessPolicy(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "dataset_id": fmt.Sprintf("tf_test_dataset_%s", acctest.RandString(t, 10)), + "table_id": fmt.Sprintf("tf_test_table_%s", acctest.RandString(t, 10)), + "policy_id": fmt.Sprintf("tf_test_policy_%s", acctest.RandString(t, 10)), + } + + var tableCreationTime string + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableWithSchemaAndRowAccessPolicy(context), + Check: resource.ComposeTestCheckFunc( + // Store the creationTime of the original table + func(s *terraform.State) error { + rs, ok := s.RootModule().Resources["google_bigquery_table.test"] + if !ok { + return fmt.Errorf("Not found: google_bigquery_table.test") + } + tableCreationTime = rs.Primary.Attributes["creation_time"] + return nil + }, + ), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + }, + { + Config: testAccBigQueryTableWithSchemaColumnDroppedAndRowAccessPolicy(context), // Change column to trigger ForceNew + Check: resource.ComposeTestCheckFunc( + // Verify that creationTime has changed, implying that the table was recreated. + func(s *terraform.State) error { + rs, ok := s.RootModule().Resources["google_bigquery_table.test"] + if !ok { + return fmt.Errorf("Not found: google_bigquery_table.test") + } + newTimeCreated := rs.Primary.Attributes["creation_time"] + if newTimeCreated == tableCreationTime { + return fmt.Errorf("creationTime should have changed on recreation, but it's still %s", newTimeCreated) + } + return nil + }, + ), + ExpectNonEmptyPlan: true, + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection", "ignore_auto_generated_schema", "generated_schema_columns"}, + }, + }, + }) +} + func TestAccBigQueryTable_schemaWithRequiredFieldAndView(t *testing.T) { + t.Parallel() + datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(t, 10)) @@ -4672,6 +4738,100 @@ resource "google_bigquery_table" "test" { `, datasetID, tableID, schema) } +func testAccBigQueryTableWithSchemaAndRowAccessPolicy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%{dataset_id}" +} + +resource "google_bigquery_table" "test" { + deletion_protection = false + dataset_id = google_bigquery_dataset.test.dataset_id + table_id = "%{table_id}" + + schema = < Date: Fri, 5 Sep 2025 11:18:26 -0700 Subject: [PATCH 146/201] tgc-revival: add google_clouddeploy_custom_target_type (#15071) --- mmv1/products/clouddeploy/CustomTargetType.yaml | 1 + .../tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/mmv1/products/clouddeploy/CustomTargetType.yaml b/mmv1/products/clouddeploy/CustomTargetType.yaml index 1034fcf7fe1f..0cecb472e268 100644 --- a/mmv1/products/clouddeploy/CustomTargetType.yaml +++ b/mmv1/products/clouddeploy/CustomTargetType.yaml @@ -50,6 +50,7 @@ iam_policy: - 'projects/{{project}}/locations/{{location}}/customTargetTypes/{{name}}' - '{{name}}' custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'clouddeploy_custom_target_type_basic' primary_resource_id: 'custom-target-type' diff --git a/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl b/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl index 4cb7d48d8b92..8b7974f179cc 100644 --- a/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl +++ b/mmv1/templates/tgc_next/cai2hcl/flatten_property_method_tgc.go.tmpl @@ -18,6 +18,10 @@ {{- else if $.IsA "KeyValueLabels" }} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return tgcresource.RemoveTerraformAttributionLabel(v) +} + {{- else if $.IsA "KeyValueAnnotations" }} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v } {{- else if or (and (eq $.Name "zone") $.ResourceMetadata.HasZone) (and (eq $.Name "region") $.ResourceMetadata.HasRegion) -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { From f261d7106cba0de4d4d8c0e98ce09be18b3362e5 Mon Sep 17 00:00:00 2001 From: g-dreva Date: Fri, 5 Sep 2025 22:08:15 +0000 Subject: [PATCH 147/201] Saasruntime autogen release (#15061) --- mmv1/products/saasservicemgmt/Release.yaml | 221 ++++++++++++++++++ .../saas_runtime_release_basic.tf.tmpl | 41 ++++ ...resource_saas_runtime_release_test.go.tmpl | 160 +++++++++++++ 3 files changed, 422 insertions(+) create mode 100644 mmv1/products/saasservicemgmt/Release.yaml create mode 100644 mmv1/templates/terraform/examples/saas_runtime_release_basic.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_release_test.go.tmpl diff --git a/mmv1/products/saasservicemgmt/Release.yaml b/mmv1/products/saasservicemgmt/Release.yaml new file mode 100644 index 000000000000..1baf50398bd8 --- /dev/null +++ b/mmv1/products/saasservicemgmt/Release.yaml @@ -0,0 +1,221 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Release +description: A version to be propagated and deployed to Units. It points to a specific version of a Blueprint that can be applied to Units, for example, via a Rollout. +base_url: projects/{{project}}/locations/{{location}}/releases +update_mask: true +self_link: projects/{{project}}/locations/{{location}}/releases/{{release_id}} +create_url: projects/{{project}}/locations/{{location}}/releases?releaseId={{release_id}} +update_verb: PATCH +id_format: projects/{{project}}/locations/{{location}}/releases/{{release_id}} +import_format: + - projects/{{project}}/locations/{{location}}/releases/{{release_id}} +min_version: beta +examples: + - name: saas_runtime_release_basic + primary_resource_id: "example" + min_version: "beta" + vars: + saas_name: example-saas + unitkind_name: example-unitkind + release_name: example-release + previous_release_name: previous-release + test_env_vars: + project: "PROJECT_NAME" + bootstrap_iam: + - member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com" + role: "roles/saasservicemgmt.serviceAgent" +autogen_async: false +autogen_status: UmVsZWFzZQ== +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true + - name: releaseId + type: String + description: The ID value for the new release. + immutable: true + url_param_only: true + required: true +properties: + - name: annotations + type: KeyValueAnnotations + description: |- + Annotations is an unstructured key-value map stored with a resource that + may be set by external tools to store and retrieve arbitrary metadata. + They are not queryable and should be preserved when modifying objects. + + More info: https://kubernetes.io/docs/user-guide/annotations + - name: blueprint + type: NestedObject + description: |- + Blueprints are OCI Images that contain all of the artifacts needed to + provision a unit. Metadata such as, type of the engine used to actuate the + blueprint (e.g. terraform, helm etc) and version will come from the image + manifest. If the hostname is omitted, it will be assumed to be the regional + path to Artifact Registry (eg. us-east1-docker.pkg.dev). + properties: + - name: engine + type: String + description: Type of the engine used to actuate the blueprint. e.g. terraform, helm etc. + output: true + - name: package + type: String + description: |- + URI to a blueprint used by the Unit (required unless unitKind or release is + set). + immutable: true + - name: version + type: String + description: Version metadata if present on the blueprint. + output: true + - name: createTime + type: String + description: The timestamp when the resource was created. + output: true + - name: etag + type: String + description: |- + An opaque value that uniquely identifies a version or + generation of a resource. It can be used to confirm that the client + and server agree on the ordering of a resource being written. + output: true + - name: inputVariableDefaults + type: Array + description: Mapping of input variables to default values. Maximum 100 + item_type: + type: NestedObject + properties: + - name: type + type: Enum + description: "Name of a supported variable type. Supported types are STRING, INT, BOOL." + enum_values: + - "TYPE_UNSPECIFIED" + - "STRING" + - "INT" + - "BOOL" + immutable: true + - name: value + type: String + description: String encoded value for the variable. + - name: variable + type: String + description: Name of the variable from actuation configs. + immutable: true + required: true + - name: inputVariables + type: Array + description: |- + List of input variables declared on the blueprint and can be present with + their values on the unit spec + output: true + item_type: + type: NestedObject + properties: + - name: type + type: Enum + description: "Name of a supported variable type. Supported types are STRING, INT, BOOL." + enum_values: + - "TYPE_UNSPECIFIED" + - "STRING" + - "INT" + - "BOOL" + immutable: true + - name: value + type: String + description: String encoded value for the variable. + - name: variable + type: String + description: Name of the variable from actuation configs. + immutable: true + required: true + - name: labels + type: KeyValueLabels + description: |- + The labels on the resource, which can be used for categorization. + similar to Kubernetes resource labels. + - name: name + type: String + description: |- + Identifier. The resource name (full URI of the resource) following the standard naming + scheme: + + "projects/{project}/locations/{location}/releases/{release}" + output: true + - name: outputVariables + type: Array + description: |- + List of output variables declared on the blueprint and can be present with + their values on the unit status + output: true + item_type: + type: NestedObject + properties: + - name: type + type: Enum + description: "Name of a supported variable type. Supported types are STRING, INT, BOOL." + enum_values: + - "TYPE_UNSPECIFIED" + - "STRING" + - "INT" + - "BOOL" + immutable: true + - name: value + type: String + description: String encoded value for the variable. + - name: variable + type: String + description: Name of the variable from actuation configs. + immutable: true + required: true + - name: releaseRequirements + type: NestedObject + description: Set of requirements to be fulfilled on the Unit when using this Release. + properties: + - name: upgradeableFromReleases + type: Array + description: |- + A list of releases from which a unit can be upgraded to this one + (optional). If left empty no constraints will be applied. When provided, + unit upgrade requests to this release will check and enforce this + constraint. + item_type: + type: String + - name: uid + type: String + description: |- + The unique identifier of the resource. UID is unique in the time + and space for this resource within the scope of the service. It is + typically generated by the server on successful creation of a resource + and must not be changed. UID is used to uniquely identify resources + with resource name reuses. This should be a UUID4. + output: true + - name: unitKind + type: String + description: |- + Reference to the UnitKind this Release corresponds to (required and + immutable once created). + immutable: true + required: true + - name: updateTime + type: String + description: |- + The timestamp when the resource was last updated. Any + change to the resource made by users must refresh this value. + Changes to a resource made by the service should refresh this value. + output: true diff --git a/mmv1/templates/terraform/examples/saas_runtime_release_basic.tf.tmpl b/mmv1/templates/terraform/examples/saas_runtime_release_basic.tf.tmpl new file mode 100644 index 000000000000..deedebb1fb6d --- /dev/null +++ b/mmv1/templates/terraform/examples/saas_runtime_release_basic.tf.tmpl @@ -0,0 +1,41 @@ +resource "google_saas_runtime_saas" "example_saas" { + provider = google-beta + saas_id = "{{index $.Vars "saas_name"}}" + location = "global" + + locations { + name = "us-central1" + } +} + +resource "google_saas_runtime_unit_kind" "example_unitkind" { + provider = google-beta + location = "global" + unit_kind_id = "{{index $.Vars "unitkind_name"}}" + saas = google_saas_runtime_saas.example_saas.id +} + +resource "google_saas_runtime_release" "example_previous" { + provider = google-beta + location = "global" + release_id = "{{index $.Vars "previous_release_name"}}" + unit_kind = google_saas_runtime_unit_kind.example_unitkind.id + blueprint { + package = "us-central1-docker.pkg.dev/ci-test-project-188019/test-repo/tf-test-easysaas-alpha-image@sha256:7992fdbaeaf998ecd31a7f937bb26e38a781ecf49b24857a6176c1e9bfc299ee" + } +} + +resource "google_saas_runtime_release" "{{$.PrimaryResourceId}}" { + provider = google-beta + location = "global" + release_id = "{{index $.Vars "release_name"}}" + unit_kind = google_saas_runtime_unit_kind.example_unitkind.id + blueprint { + package = "us-central1-docker.pkg.dev/ci-test-project-188019/test-repo/tf-test-easysaas-beta-image@sha256:7bba0fa85b2956df7768f7b32e715b6fe11f4f4193e2a70a35bf3f286a6cdf9e" + } + input_variable_defaults { + variable = "name" + value = "test" + type = "STRING" + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_release_test.go.tmpl b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_release_test.go.tmpl new file mode 100644 index 000000000000..d6f37ef0a683 --- /dev/null +++ b/mmv1/third_party/terraform/services/saasruntime/resource_saas_runtime_release_test.go.tmpl @@ -0,0 +1,160 @@ +package saasruntime_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccSaasRuntimeRelease_update(t *testing.T) { + t.Parallel() + acctest.BootstrapIamMembers(t, []acctest.IamMember{ + { + Member: "serviceAccount:service-{project_number}@gcp-sa-saasservicemgmt.iam.gserviceaccount.com", + Role: "roles/saasservicemgmt.serviceAgent", + }, + }) + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSaasRuntimeRelease_basic(context), + }, + { + ResourceName: "google_saas_runtime_release.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "release_id", "terraform_labels"}, + }, + { + Config: testAccSaasRuntimeRelease_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_saas_runtime_release.example", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_saas_runtime_release.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations", "labels", "location", "release_id", "terraform_labels"}, + }, + }, + }) +} + +func testAccSaasRuntimeRelease_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_saas_runtime_saas" "example_saas" { + provider = google-beta + saas_id = "tf-test-example-saas%{random_suffix}" + location = "global" + + locations { + name = "us-central1" + } +} + +resource "google_saas_runtime_unit_kind" "example_unitkind" { + provider = google-beta + location = "global" + unit_kind_id = "tf-test-example-unitkind%{random_suffix}" + saas = google_saas_runtime_saas.example_saas.id +} + +resource "google_saas_runtime_release" "example_previous" { + provider = google-beta + location = "global" + release_id = "tf-test-previous-release%{random_suffix}" + unit_kind = google_saas_runtime_unit_kind.example_unitkind.id + blueprint { + package = "us-central1-docker.pkg.dev/ci-test-project-188019/test-repo/tf-test-easysaas-alpha-image@sha256:7992fdbaeaf998ecd31a7f937bb26e38a781ecf49b24857a6176c1e9bfc299ee" + } +} + +resource "google_saas_runtime_release" "example" { + provider = google-beta + location = "global" + release_id = "tf-test-example-release%{random_suffix}" + unit_kind = google_saas_runtime_unit_kind.example_unitkind.id + blueprint { + package = "us-central1-docker.pkg.dev/ci-test-project-188019/test-repo/tf-test-easysaas-beta-image@sha256:7bba0fa85b2956df7768f7b32e715b6fe11f4f4193e2a70a35bf3f286a6cdf9e" + } + input_variable_defaults { + variable = "name" + value = "test" + type = "STRING" + } +} +`, context) +} + +func testAccSaasRuntimeRelease_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_saas_runtime_saas" "example_saas" { + provider = google-beta + saas_id = "tf-test-example-saas%{random_suffix}" + location = "global" + + locations { + name = "us-central1" + } +} + +resource "google_saas_runtime_unit_kind" "example_unitkind" { + provider = google-beta + location = "global" + unit_kind_id = "tf-test-example-unitkind%{random_suffix}" + saas = google_saas_runtime_saas.example_saas.id +} + +resource "google_saas_runtime_release" "example_previous" { + provider = google-beta + location = "global" + release_id = "tf-test-previous-release%{random_suffix}" + unit_kind = google_saas_runtime_unit_kind.example_unitkind.id + blueprint { + package = "us-central1-docker.pkg.dev/ci-test-project-188019/test-repo/tf-test-easysaas-alpha-image@sha256:7992fdbaeaf998ecd31a7f937bb26e38a781ecf49b24857a6176c1e9bfc299ee" + } +} + +resource "google_saas_runtime_release" "example" { + provider = google-beta + location = "global" + release_id = "tf-test-example-release%{random_suffix}" + unit_kind = google_saas_runtime_unit_kind.example_unitkind.id + blueprint { + package = "us-central1-docker.pkg.dev/ci-test-project-188019/test-repo/tf-test-easysaas-beta-image@sha256:7bba0fa85b2956df7768f7b32e715b6fe11f4f4193e2a70a35bf3f286a6cdf9e" + } + input_variable_defaults { + variable = "name" + value = "test" + type = "STRING" + } + labels = { + "key" = "value" + } + annotations = { + "key" = "value" + } + release_requirements { + upgradeable_from_releases = [google_saas_runtime_release.example_previous.id] + } +} +`, context) +} +{{- end }} From 53b287e6f06ce9beec9d6bfeee285a61348e34ac Mon Sep 17 00:00:00 2001 From: Kushal Lunkad Date: Mon, 8 Sep 2025 20:40:15 +0530 Subject: [PATCH 148/201] fetchforresourceType bpa added (#15043) Co-authored-by: Cameron Thornton --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...ource_backup_dr_backup_plan_association.go | 108 +++++++++++ ..._backup_dr_backup_plan_association_test.go | 172 +++++++++++++++++- ..._dr_backup_plan_associations.html.markdown | 48 +++++ 4 files changed, 328 insertions(+), 1 deletion(-) create mode 100644 mmv1/third_party/terraform/website/docs/d/backup_dr_backup_plan_associations.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 5bf6eaaaea97..e33c780cfdb0 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -43,6 +43,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_apphub_discovered_service": apphub.DataSourceApphubDiscoveredService(), "google_backup_dr_management_server": backupdr.DataSourceGoogleCloudBackupDRService(), "google_backup_dr_backup_plan_association": backupdr.DataSourceGoogleCloudBackupDRBackupPlanAssociation(), + "google_backup_dr_backup_plan_associations": backupdr.DataSourceGoogleCloudBackupDRBackupPlanAssociations(), "google_backup_dr_backup_plan": backupdr.DataSourceGoogleCloudBackupDRBackupPlan(), "google_backup_dr_backup": backupdr.DataSourceGoogleCloudBackupDRBackup(), "google_backup_dr_data_source": backupdr.DataSourceGoogleCloudBackupDRDataSource(), diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association.go index 50625e61448a..a38ce8e0903a 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association.go +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association.go @@ -2,6 +2,7 @@ package backupdr import ( "fmt" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -44,3 +45,110 @@ func dataSourceGoogleCloudBackupDRBackupPlanAssociationRead(d *schema.ResourceDa } return nil } + +// Plural datasource to Fetch BackupPlanAssociations for a given resource type +func DataSourceGoogleCloudBackupDRBackupPlanAssociations() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleCloudBackupDRBackupPlanAssociationsRead, + Schema: map[string]*schema.Schema{ + "location": { + Type: schema.TypeString, + Required: true, + Description: "The location to list the backup plan associations from.", + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The ID of the project in which the resource belongs.", + }, + "resource_type": { + Type: schema.TypeString, + Required: true, + Description: `The resource type of workload on which backup plan is applied. Examples include, "compute.googleapis.com/Instance", "compute.googleapis.com/Disk".`, + }, + + "associations": { + Type: schema.TypeList, + Computed: true, + Description: "A list of the backup plan associations found.", + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "resource": { + Type: schema.TypeString, + Computed: true, + }, + "backup_plan": { + Type: schema.TypeString, + Computed: true, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + }, + }, + }, + } +} + +func dataSourceGoogleCloudBackupDRBackupPlanAssociationsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + location := d.Get("location").(string) + resourceType := d.Get("resource_type").(string) + + url := fmt.Sprintf("%sprojects/%s/locations/%s/backupPlanAssociations:fetchForResourceType?resourceType=%s", config.BackupDRBasePath, project, location, resourceType) + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return fmt.Errorf("Error reading BackupPlanAssociations: %s", err) + } + + // Adjust "backupPlanAssociations" to match the key in the actual API response. + items, ok := res["backupPlanAssociations"].([]interface{}) + if !ok { + items = make([]interface{}, 0) + } + + // Flatten the list of items from the API response into the schema + associations := make([]map[string]interface{}, 0, len(items)) + for _, item := range items { + association := item.(map[string]interface{}) + flattened := map[string]interface{}{ + "name": association["name"], + "resource": association["resource"], + "backup_plan": association["backupPlan"], + "create_time": association["createTime"], + } + associations = append(associations, flattened) + } + + if err := d.Set("associations", associations); err != nil { + return fmt.Errorf("Error setting associations: %s", err) + } + + d.SetId(url) + + return nil +} diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association_test.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association_test.go index d2735c8e46e6..15c4397d1f4c 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association_test.go +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_backup_plan_association_test.go @@ -1,9 +1,14 @@ package backupdr_test import ( + "fmt" + "strconv" + "strings" + "testing" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/hashicorp/terraform-provider-google/google/acctest" - "testing" ) func TestAccDataSourceGoogleBackupDRBackupPlanAssociation_basic(t *testing.T) { @@ -121,3 +126,168 @@ data "google_backup_dr_backup_plan_association" "bpa-test" { } `, context) } + +func TestAccDataSourceGoogleBackupDRBackupPlanAssociations(t *testing.T) { + t.Parallel() + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "bpa_id": "tf-test-bpa-plural-" + acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleBackupDRBackupPlanAssociations_config(context), + Check: testAccCheckBackupPlanAssociationInList( + "data.google_backup_dr_backup_plan_associations.bpas", + "google_compute_instance.default", + "google_backup_dr_backup_plan.foo1", + "data.google_project.project", + ), + }, + }, + }) +} + +func testAccCheckBackupPlanAssociationInList(dataSourceName, instanceName, backupPlanName, projectDsName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("data source not found: %s", dataSourceName) + } + + instance, ok := s.RootModule().Resources[instanceName] + if !ok { + return fmt.Errorf("instance resource not found: %s", instanceName) + } + + backupPlan, ok := s.RootModule().Resources[backupPlanName] + if !ok { + return fmt.Errorf("backup plan resource not found: %s", backupPlanName) + } + backupPlanNameFromState := backupPlan.Primary.Attributes["name"] + + project, ok := s.RootModule().Resources[projectDsName] + if !ok { + return fmt.Errorf("project data source not found: %s", projectDsName) + } + projectID := project.Primary.Attributes["project_id"] + projectNumber := project.Primary.Attributes["number"] + + fmt.Printf("\n--- Performing Direct Association Check ---\n") + + // 1. Reconstruct the 'resource' string using the project NUMBER and instance ID + // to match the format returned by the BackupDR API. + instanceID := instance.Primary.Attributes["instance_id"] + zone := instance.Primary.Attributes["zone"] + expectedResource := fmt.Sprintf("projects/%s/zones/%s/instances/%s", projectNumber, zone, instanceID) + fmt.Printf("Expected Resource (constructed): %s\n", expectedResource) + + // 2. Normalize the backup plan name to also use the project NUMBER. + expectedBackupPlan := strings.Replace(backupPlanNameFromState, "projects/"+projectID, "projects/"+projectNumber, 1) + fmt.Printf("Expected Backup Plan (normalized): %s\n", expectedBackupPlan) + + associationsCount, _ := strconv.Atoi(ds.Primary.Attributes["associations.#"]) + fmt.Printf("Total associations found by data source: %d\n", associationsCount) + + for i := 0; i < associationsCount; i++ { + resourceAttr := ds.Primary.Attributes[fmt.Sprintf("associations.%d.resource", i)] + backupPlanAttr := ds.Primary.Attributes[fmt.Sprintf("associations.%d.backup_plan", i)] + + fmt.Printf("Found Association #%d: Resource='%s', BackupPlan='%s'\n", i, resourceAttr, backupPlanAttr) + + if resourceAttr == expectedResource && backupPlanAttr == expectedBackupPlan { + fmt.Println("--- Match found! Test successful. ---") + return nil + } + } + + fmt.Println("--- No match found after checking all associations. ---") + return fmt.Errorf("no matching backup plan association found in data source '%s' for resource '%s'", dataSourceName, expectedResource) + } +} + +func testAccDataSourceGoogleBackupDRBackupPlanAssociations_config(context map[string]interface{}) string { + return acctest.Nprintf(` + data "google_project" "project" {} + +resource "google_service_account" "default" { + account_id = "tf-test-my-custom1-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + +resource "google_compute_instance" "default" { + name = "tf-test-compute-instance1-%{random_suffix}" + machine_type = "n2-standard-2" + zone = "us-central1-a" + tags = ["foo", "bar"] + boot_disk { + initialize_params { + image = "debian-cloud/debian-11" + } + } + network_interface { + network = "default" + } + service_account { + email = google_service_account.default.email + scopes = ["cloud-platform"] + } +} + +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location = "us-central1" + backup_vault_id = "tf-test-bv1-%{random_suffix}" + description = "This is a backup vault for list datasource test." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + +resource "google_backup_dr_backup_plan" "foo1" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test1-%{random_suffix}" + resource_type = "compute.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 2 + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + backup_window { + start_hour_of_day = 12 + end_hour_of_day = 18 + } + } + } +} + +resource "google_backup_dr_backup_plan_association" "bpa" { + location = "us-central1" + backup_plan_association_id = "%{bpa_id}" + resource = google_compute_instance.default.id + resource_type = "compute.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.foo1.name +} + +data "google_backup_dr_backup_plan_associations" "bpas" { + location = "us-central1" + resource_type = "compute.googleapis.com/Instance" + depends_on = [google_backup_dr_backup_plan_association.bpa] +} +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/backup_dr_backup_plan_associations.html.markdown b/mmv1/third_party/terraform/website/docs/d/backup_dr_backup_plan_associations.html.markdown new file mode 100644 index 000000000000..72a8d434380e --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/backup_dr_backup_plan_associations.html.markdown @@ -0,0 +1,48 @@ +--- +subcategory: "Backup and DR Service" +description: |- + Get information about a list of Backup and DR BackupPlanAssociations for a specific resource type . +--- + +# google_backup_dr_backup_plan_associations + +Provides a list of Backup and DR BackupPlanAssociations for a specific resource type. + +~> **Warning:** This datasource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta datasources. + +## Example Usage + +```hcl +data "google_backup_dr_backup_plan_associations" "compute_instance_associations" { + location = "us-central1" + resource_type = "compute.googleapis.com/Instance" +} + +## Argument Reference + +The following arguments are supported: + +* `location` - (Required)The location where the Backup Plan Association resources reside. +* `resource_type` - (Required) The resource type of the workload. For example, sqladmin.googleapis.com/Instance or compute.googleapis.com/Instance. + +- - - + +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `project` - The ID of the project in which the resource belongs. +* `associations` - A list of the backup plan associations found. + +Each entry in the `associations` list contains the following fields: + +* `name` - The full name of the backup plan association resource. +* `resource` - The resource to which the backup plan is applied. +* `backup_plan` - The backup plan to which the resource is attached. +* `create_time` - The time when the association was created. + +See [google_backup_dr_backup_plan_associations](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/backup_dr_backup_plan_associations) resource for details of the available attributes. \ No newline at end of file From a5340099418bd1c14c8b710b11ea25982c473848 Mon Sep 17 00:00:00 2001 From: Rajesh Guptha Date: Mon, 8 Sep 2025 21:57:11 +0530 Subject: [PATCH 149/201] update DATA_GOVERNANCE field (#15045) --- mmv1/products/tags/TagKey.yaml | 1 + .../services/tags/resource_tags_test.go | 58 ++++++++++++++----- 2 files changed, 45 insertions(+), 14 deletions(-) diff --git a/mmv1/products/tags/TagKey.yaml b/mmv1/products/tags/TagKey.yaml index ee56ca730705..1d6c580b047b 100644 --- a/mmv1/products/tags/TagKey.yaml +++ b/mmv1/products/tags/TagKey.yaml @@ -117,6 +117,7 @@ properties: immutable: true enum_values: - 'GCE_FIREWALL' + - 'DATA_GOVERNANCE' - name: 'purposeData' type: KeyValuePairs description: | diff --git a/mmv1/third_party/terraform/services/tags/resource_tags_test.go b/mmv1/third_party/terraform/services/tags/resource_tags_test.go index 3501e8f3a342..9c197816cb45 100644 --- a/mmv1/third_party/terraform/services/tags/resource_tags_test.go +++ b/mmv1/third_party/terraform/services/tags/resource_tags_test.go @@ -19,20 +19,21 @@ import ( func TestAccTags(t *testing.T) { testCases := map[string]func(t *testing.T){ - "tagKeyBasic": testAccTagsTagKey_tagKeyBasic, - "tagKeyBasicWithPurposeGceFirewall": testAccTagsTagKey_tagKeyBasicWithPurposeGceFirewall, - "tagKeyUpdate": testAccTagsTagKey_tagKeyUpdate, - "tagKeyIamBinding": testAccTagsTagKeyIamBinding, - "tagKeyIamMember": testAccTagsTagKeyIamMember, - "tagKeyIamPolicy": testAccTagsTagKeyIamPolicy, - "tagValueBasic": testAccTagsTagValue_tagValueBasic, - "tagValueUpdate": testAccTagsTagValue_tagValueUpdate, - "tagBindingBasic": testAccTagsTagBinding_tagBindingBasic, - "tagValueIamBinding": testAccTagsTagValueIamBinding, - "tagValueIamMember": testAccTagsTagValueIamMember, - "tagValueIamPolicy": testAccTagsTagValueIamPolicy, - "tagsLocationTagBindingBasic": testAccTagsLocationTagBinding_locationTagBindingbasic, - "tagsLocationTagBindingZonal": TestAccTagsLocationTagBinding_locationTagBindingzonal, + "tagKeyBasic": testAccTagsTagKey_tagKeyBasic, + "tagKeyBasicWithPurposeGceFirewall": testAccTagsTagKey_tagKeyBasicWithPurposeGceFirewall, + "tagKeyBasicWithPurposeDataGovernance": testAccTagsTagKey_tagKeyBasicWithPurposeDataGovernance, + "tagKeyUpdate": testAccTagsTagKey_tagKeyUpdate, + "tagKeyIamBinding": testAccTagsTagKeyIamBinding, + "tagKeyIamMember": testAccTagsTagKeyIamMember, + "tagKeyIamPolicy": testAccTagsTagKeyIamPolicy, + "tagValueBasic": testAccTagsTagValue_tagValueBasic, + "tagValueUpdate": testAccTagsTagValue_tagValueUpdate, + "tagBindingBasic": testAccTagsTagBinding_tagBindingBasic, + "tagValueIamBinding": testAccTagsTagValueIamBinding, + "tagValueIamMember": testAccTagsTagValueIamMember, + "tagValueIamPolicy": testAccTagsTagValueIamPolicy, + "tagsLocationTagBindingBasic": testAccTagsLocationTagBinding_locationTagBindingbasic, + "tagsLocationTagBindingZonal": TestAccTagsLocationTagBinding_locationTagBindingzonal, } for name, tc := range testCases { @@ -114,6 +115,35 @@ resource "google_tags_tag_key" "key" { `, context) } +func testAccTagsTagKey_tagKeyBasicWithPurposeDataGovernance(t *testing.T) { + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckTagsTagKeyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccTagsTagKey_tagKeyBasicWithPurposeDataGovernanceExample(context), + }, + }, + }) +} + +func testAccTagsTagKey_tagKeyBasicWithPurposeDataGovernanceExample(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_tags_tag_key" "key" { + parent = "organizations/%{org_id}" + short_name = "data-gov-%{random_suffix}" + description = "For data governance purposes." + purpose = "DATA_GOVERNANCE" +} +`, context) +} + func testAccTagsTagKey_tagKeyUpdate(t *testing.T) { context := map[string]interface{}{ "org_id": envvar.GetTestOrgFromEnv(t), From e3f9ae66643e38285a464c782f7dcc3c8f7f935b Mon Sep 17 00:00:00 2001 From: Ankush Jain Date: Mon, 8 Sep 2025 23:39:58 +0530 Subject: [PATCH 150/201] Adding support for extended attributes in workforce pool provider (#15077) --- .../WorkforcePoolProvider.yaml | 95 ++++ ..._oauth2_config_client_secret_value.go.tmpl | 29 ++ ...ributes_oauth2_config_client_basic.tf.tmpl | 37 ++ ...tributes_oauth2_config_client_full.tf.tmpl | 40 ++ .../iam_workforce_pool_provider.go.tmpl | 14 +- .../iam_workforce_pool_provider.go.tmpl | 13 +- ...e_iam_workforce_pool_provider_test.go.tmpl | 446 ++++++++++++++++++ 7 files changed, 672 insertions(+), 2 deletions(-) create mode 100644 mmv1/templates/terraform/custom_flatten/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_secret_value.go.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_full.tf.tmpl diff --git a/mmv1/products/iamworkforcepool/WorkforcePoolProvider.yaml b/mmv1/products/iamworkforcepool/WorkforcePoolProvider.yaml index 21e853f5e7ef..245e8e77d4e8 100644 --- a/mmv1/products/iamworkforcepool/WorkforcePoolProvider.yaml +++ b/mmv1/products/iamworkforcepool/WorkforcePoolProvider.yaml @@ -114,6 +114,26 @@ examples: ignore_read_extra: - 'oidc.0.client_secret.0.value.0.plain_text' - 'extra_attributes_oauth2_client.0.client_secret.0.value.0.plain_text' + - name: 'iam_workforce_pool_provider_extended_attributes_oauth2_config_client_basic' + primary_resource_id: 'example' + vars: + workforce_pool_id: 'example-pool' + provider_id: 'example-prvdr' + test_env_vars: + org_id: 'ORG_ID' + ignore_read_extra: + - 'oidc.0.client_secret.0.value.0.plain_text' + - 'extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text' + - name: 'iam_workforce_pool_provider_extended_attributes_oauth2_config_client_full' + primary_resource_id: 'example' + vars: + workforce_pool_id: 'example-pool' + provider_id: 'example-prvdr' + test_env_vars: + org_id: 'ORG_ID' + ignore_read_extra: + - 'oidc.0.client_secret.0.value.0.plain_text' + - 'extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text' parameters: properties: - name: 'location' @@ -461,3 +481,78 @@ properties: The filter used to request specific records from IdP. In case of attributes type as AZURE_AD_GROUPS_MAIL and AZURE_AD_GROUPS_ID, it represents the filter used to request specific groups for users from IdP. By default, all of the groups associated with the user are fetched. The groups should be security enabled. See https://learn.microsoft.com/en-us/graph/search-query-parameter for more details. + - name: 'extendedAttributesOauth2Client' + type: NestedObject + description: | + The configuration for OAuth 2.0 client used to get the extended group + memberships for user identities. Only the `AZURE_AD_GROUPS_ID` attribute + type is supported. Extended groups supports a subset of Google Cloud + services. When the user accesses these services, extended group memberships + override the mapped `google.groups` attribute. Extended group memberships + cannot be used in attribute mapping or attribute condition expressions. + + To keep extended group memberships up to date, extended groups are + retrieved when the user signs in and at regular intervals during the user's + active session. Each user identity in the workforce identity pool must map + to a unique Microsoft Entra ID user. + properties: + - name: 'issuerUri' + type: String + description: | + The OIDC identity provider's issuer URI. Must be a valid URI using the `https` scheme. Required to get the OIDC discovery document. + required: true + - name: 'clientId' + type: String + description: | + The OAuth 2.0 client ID for retrieving extended attributes from the identity provider. Required to get the Access Token using client credentials grant flow. + required: true + - name: 'clientSecret' + type: NestedObject + description: | + The OAuth 2.0 client secret for retrieving extended attributes from the identity provider. Required to get the Access Token using client credentials grant flow. + required: true + properties: + - name: 'value' + type: NestedObject + description: | + The value of the client secret. + custom_flatten: 'templates/terraform/custom_flatten/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_secret_value.go.tmpl' + properties: + - name: 'plainText' + type: String + description: | + The plain text of the client secret value. + required: true + validation: + function: 'validation.StringIsNotEmpty' + - name: 'thumbprint' + type: String + description: | + A thumbprint to represent the current client secret value. + output: true + - name: 'attributesType' + type: Enum + description: | + Represents the IdP and type of claims that should be fetched. + * AZURE_AD_GROUPS_ID: Used to get the user's group claims from the Azure AD identity provider + using configuration provided in ExtendedAttributesOAuth2Client and `id` + property of the `microsoft.graph.group` object is used for claim mapping. See + https://learn.microsoft.com/en-us/graph/api/resources/group?view=graph-rest-1.0#properties + for more details on `microsoft.graph.group` properties. The + group IDs obtained from Azure AD are present in `assertion.groups` for + OIDC providers and `assertion.attributes.groups` for SAML providers for + attribute mapping. + required: true + enum_values: + - 'AZURE_AD_GROUPS_ID' + - name: 'queryParameters' + type: NestedObject + description: | + Represents the parameters to control which claims are fetched from an IdP. + properties: + - name: 'filter' + type: String + description: | + The filter used to request specific records from IdP. In case of attributes type as AZURE_AD_GROUPS_ID, it represents the + filter used to request specific groups for users from IdP. By default, all of the groups associated with the user are fetched. The + groups should be security enabled. See https://learn.microsoft.com/en-us/graph/search-query-parameter for more details. diff --git a/mmv1/templates/terraform/custom_flatten/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_secret_value.go.tmpl b/mmv1/templates/terraform/custom_flatten/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_secret_value.go.tmpl new file mode 100644 index 000000000000..362ce873aa90 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_secret_value.go.tmpl @@ -0,0 +1,29 @@ +{{/* + The license inside this block applies to this file + Copyright 2024 Google Inc. + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ -}} +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["thumbprint"] = original["thumbprint"] + // Trigger a diff based on the plain_text if there is no change in the thumbprint, + // otherwise leave plain_text empty to always trigger a diff. + if original["thumbprint"].(string) == d.Get("extended_attributes_oauth2_client.0.client_secret.0.value.0.thumbprint").(string) { + transformed["plain_text"] = d.Get("extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text") + } + return []interface{}{transformed} +} diff --git a/mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_basic.tf.tmpl b/mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_basic.tf.tmpl new file mode 100644 index 000000000000..ce4c20fc9fe9 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_basic.tf.tmpl @@ -0,0 +1,37 @@ +resource "google_iam_workforce_pool" "pool" { + workforce_pool_id = "{{index $.Vars "workforce_pool_id"}}" + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "{{$.PrimaryResourceId}}" { + workforce_pool_id = google_iam_workforce_pool.pool.workforce_pool_id + location = google_iam_workforce_pool.pool.location + provider_id = "{{index $.Vars "provider_id"}}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + oidc { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + } + client_secret { + value { + plain_text = "client-secret" + } + } + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + } +} diff --git a/mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_full.tf.tmpl b/mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_full.tf.tmpl new file mode 100644 index 000000000000..2475d41787e9 --- /dev/null +++ b/mmv1/templates/terraform/examples/iam_workforce_pool_provider_extended_attributes_oauth2_config_client_full.tf.tmpl @@ -0,0 +1,40 @@ +resource "google_iam_workforce_pool" "pool" { + workforce_pool_id = "{{index $.Vars "workforce_pool_id"}}" + parent = "organizations/{{index $.TestEnvVars "org_id"}}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "{{$.PrimaryResourceId}}" { + workforce_pool_id = google_iam_workforce_pool.pool.workforce_pool_id + location = google_iam_workforce_pool.pool.location + provider_id = "{{index $.Vars "provider_id"}}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + oidc { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + } + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + query_parameters { + filter = "mail:gcp" + } + } +} diff --git a/mmv1/templates/terraform/post_create/iam_workforce_pool_provider.go.tmpl b/mmv1/templates/terraform/post_create/iam_workforce_pool_provider.go.tmpl index 56594c36c4ac..803a87b21dbe 100644 --- a/mmv1/templates/terraform/post_create/iam_workforce_pool_provider.go.tmpl +++ b/mmv1/templates/terraform/post_create/iam_workforce_pool_provider.go.tmpl @@ -1,7 +1,8 @@ createdOidcClientSecret := d.Get("oidc.0.client_secret.0.value.0.plain_text") createdExtraAttributesClientSecret := d.Get("extra_attributes_oauth2_client.0.client_secret.0.value.0.plain_text") +createdExtendedAttributesClientSecret := d.Get("extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text") -if (createdOidcClientSecret != nil && createdOidcClientSecret != "") || (createdExtraAttributesClientSecret != nil && createdExtraAttributesClientSecret != "") { +if (createdOidcClientSecret != nil && createdOidcClientSecret != "") || (createdExtraAttributesClientSecret != nil && createdExtraAttributesClientSecret != "") || (createdExtendedAttributesClientSecret != nil && createdExtendedAttributesClientSecret != "") { // After the create, reading from the API returns a new thumbprint // for the client secret value, which clears the plain_text. We set the plain_text since // this case should not warrant a diff. @@ -20,6 +21,17 @@ if (createdOidcClientSecret != nil && createdOidcClientSecret != "") || (created } } + // Populate ExtendedAttributesOauth2Client the client secret plain text + if createdExtendedAttributesClientSecret != nil && createdExtendedAttributesClientSecret != "" { + extendedAttributesOauth2Client := d.Get("extended_attributes_oauth2_client") + clientSecret := extendedAttributesOauth2Client.([]interface{})[0].(map[string]interface{})["client_secret"] + clientSecretValue := clientSecret.([]interface{})[0].(map[string]interface{})["value"] + clientSecretValue.([]interface{})[0].(map[string]interface{})["plain_text"] = createdExtendedAttributesClientSecret + if err := d.Set("extended_attributes_oauth2_client", extendedAttributesOauth2Client); err != nil { + return err + } + } + // Populate OIDC the client secret plain text if createdOidcClientSecret != nil && createdOidcClientSecret != "" { oidc := d.Get("oidc") diff --git a/mmv1/templates/terraform/post_update/iam_workforce_pool_provider.go.tmpl b/mmv1/templates/terraform/post_update/iam_workforce_pool_provider.go.tmpl index 2dfb9e3db151..797d04eb09b2 100644 --- a/mmv1/templates/terraform/post_update/iam_workforce_pool_provider.go.tmpl +++ b/mmv1/templates/terraform/post_update/iam_workforce_pool_provider.go.tmpl @@ -1,6 +1,7 @@ -if d.HasChange("oidc") || d.HasChange("extra_attributes_oauth2_client") { +if d.HasChange("oidc") || d.HasChange("extra_attributes_oauth2_client") || d.HasChange("extended_attributes_oauth2_client") { updatedOidcClientSecret := d.Get("oidc.0.client_secret.0.value.0.plain_text") updatedExtraAttributesOauth2ClientSecret := d.Get("extra_attributes_oauth2_client.0.client_secret.0.value.0.plain_text") + updatedExtendedAttributesOauth2ClientSecret := d.Get("extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text") // After the update, reading from the API returns a different thumbprint // for the client secret value, which clears the plain_text. We set the plain_text since // this case should not warrant a diff. @@ -27,5 +28,15 @@ if d.HasChange("oidc") || d.HasChange("extra_attributes_oauth2_client") { return err } } + + if updatedExtendedAttributesOauth2ClientSecret != nil && updatedExtendedAttributesOauth2ClientSecret != "" { + extendedAttributesOauth2Client := d.Get("extended_attributes_oauth2_client") + clientSecret := extendedAttributesOauth2Client.([]interface{})[0].(map[string]interface{})["client_secret"] + clientSecretValue := clientSecret.([]interface{})[0].(map[string]interface{})["value"] + clientSecretValue.([]interface{})[0].(map[string]interface{})["plain_text"] = updatedExtendedAttributesOauth2ClientSecret + if err := d.Set("extended_attributes_oauth2_client", extendedAttributesOauth2Client); err != nil { + return err + } + } return nil } \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_test.go.tmpl b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_test.go.tmpl index 133139f5933c..70dccc3f54bc 100644 --- a/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_test.go.tmpl +++ b/mmv1/third_party/terraform/services/iamworkforcepool/resource_iam_workforce_pool_provider_test.go.tmpl @@ -241,6 +241,127 @@ func TestAccIAMWorkforcePoolWorkforcePoolSamlProvider_extraAttributesOauth2Clien }) } +func TestAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": random_suffix, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMWorkforcePoolWorkforcePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_full(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"oidc.0.client_secret.0.value.0.plain_text", "extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_update(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"oidc.0.client_secret.0.value.0.plain_text", "extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_update_clearConfig(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"oidc.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_basic(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"oidc.0.client_secret.0.value.0.plain_text", "extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolProvider_destroy(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckIAMWorkforcePoolWorkforcePoolProviderAccess(t, random_suffix), + ), + }, + }, + }) +} + + +func TestAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client(t *testing.T) { + t.Parallel() + + random_suffix := acctest.RandString(t, 10) + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": random_suffix, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMWorkforcePoolWorkforcePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_full(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_update(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_update_clearConfig(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"oidc.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_basic(context), + }, + { + ResourceName: "google_iam_workforce_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"extended_attributes_oauth2_client.0.client_secret.0.value.0.plain_text"}, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePoolProvider_destroy(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckIAMWorkforcePoolWorkforcePoolProviderAccess(t, random_suffix), + ), + }, + }, + }) +} + func testAccCheckIAMWorkforcePoolWorkforcePoolProviderAccess(t *testing.T, random_suffix string) resource.TestCheckFunc { return func(s *terraform.State) error { pool_resource_name := "google_iam_workforce_pool.my_pool" @@ -812,6 +933,331 @@ resource "google_iam_workforce_pool_provider" "my_provider" { `, context) } +func testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + oidc { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + query_parameters { + filter = "mail:gcp" + } + } + display_name = "Display name" + description = "A sample OIDC workforce pool provider." + disabled = false + attribute_condition = "true" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + oidc { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "new-client-id" + client_secret { + value { + plain_text = "new-client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + query_parameters { + filter = "displayName:gcp" + } + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_update_clearConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + oidc { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolOidcProvider_extendedAttributesOauth2Client_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + oidc { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + + +func testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/3c75f51a-5393-4b53-8efe-fa85c311e533/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + query_parameters { + filter = "mail:gcp" + } + } + display_name = "Display name" + description = "A sample OIDC workforce pool provider." + disabled = false + attribute_condition = "true" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/3c75f51a-5393-4b53-8efe-fa85c311e533/v2.0" + client_id = "new-client-id" + client_secret { + value { + plain_text = "new-client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + query_parameters { + filter = "displayName:gcp" + } + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_update_clearConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolSamlProvider_extendedAttributesOauth2Client_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } + extended_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/3c75f51a-5393-4b53-8efe-fa85c311e533/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_ID" + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + func testAccIAMWorkforcePoolWorkforcePoolProvider_destroy(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_iam_workforce_pool" "my_pool" { From c58392b24ac4e21789ba1bc05b5643804e50a79c Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Mon, 8 Sep 2025 20:32:19 +0200 Subject: [PATCH 151/201] diff-processor: add doc detector for ephemeral attributes (#15075) --- .../documentparser/document_parser.go | 7 +++-- .../documentparser/document_parser_test.go | 4 +++ .../testdata/resource.html.markdown | 28 +++++++++++++++++++ 3 files changed, 37 insertions(+), 2 deletions(-) diff --git a/tools/diff-processor/documentparser/document_parser.go b/tools/diff-processor/documentparser/document_parser.go index 2950c3e971e7..1ab5b6a0ba76 100644 --- a/tools/diff-processor/documentparser/document_parser.go +++ b/tools/diff-processor/documentparser/document_parser.go @@ -66,7 +66,7 @@ func traverse(paths *[]string, path string, n *node) { // Parse parse a resource document markdown's arguments and attributes section. // The parsed file format is defined in mmv1/templates/terraform/resource.html.markdown.tmpl. func (d *DocumentParser) Parse(src []byte) error { - var argument, attribute string + var argument, attribute, ephemeralAttribute string for _, p := range strings.Split(string(src), "\n"+sectionSeparator) { if strings.HasPrefix(p, "Attributes Reference") { attribute = p @@ -74,8 +74,11 @@ func (d *DocumentParser) Parse(src []byte) error { if strings.HasPrefix(p, "Argument Reference") { argument = p } + if strings.HasPrefix(p, "Ephemeral Attributes Reference") { + ephemeralAttribute = p + } } - for _, text := range []string{argument, attribute} { + for _, text := range []string{argument, attribute, ephemeralAttribute} { if len(text) != 0 { sections := horizontalLineRegex.Split(text, -1) var allTopLevelFieldSections string diff --git a/tools/diff-processor/documentparser/document_parser_test.go b/tools/diff-processor/documentparser/document_parser_test.go index 0144eeabfb87..cfb9ae91fec4 100644 --- a/tools/diff-processor/documentparser/document_parser_test.go +++ b/tools/diff-processor/documentparser/document_parser_test.go @@ -78,6 +78,10 @@ func TestParse(t *testing.T) { "workload_identity_config.issuer_uri", "workload_identity_config.workload_pool", "errors.message", + // The below are from the ephemeral attributes section. + "shared_secret_wo", + "sensitive_params", + "sensitive_params.secret_access_key_wo", } got := parser.FlattenFields() // gotAttributes := parser.Attributes() diff --git a/tools/diff-processor/testdata/resource.html.markdown b/tools/diff-processor/testdata/resource.html.markdown index 2f5a29f6def4..14e6c150b9e2 100644 --- a/tools/diff-processor/testdata/resource.html.markdown +++ b/tools/diff-processor/testdata/resource.html.markdown @@ -275,6 +275,34 @@ exported: (Optional) Human-friendly description of the error. +## Ephemeral Attributes Reference + +The following write-only attributes are supported: + +* `shared_secret_wo` - + (Optional) + Shared secret used to set the secure session between the Cloud VPN + gateway and the peer VPN gateway. + Note: This property is write-only and will not be read from the API. For more info see [updating write-only attributes](/docs/providers/google/guides/using_write_only_attributes.html#updating-write-only-attributes) + **Note**: This property is write-only and will not be read from the API. + +* `sensitive_params` - + (Optional) + Different parameters are configured primarily using the the `params` field on this + resource. This block contains the parameters which contain secrets or passwords so that they can be marked + sensitive and hidden from plan output. The name of the field, eg: secret_access_key, will be the key + in the `params` map in the api request. + Credentials may not be specified in both locations and will cause an error. Changing from one location + to a different credential configuration in the config will require an apply to update state. + Structure is [documented below](#nested_sensitive_params). + +The `sensitive_params` block supports: + +* `secret_access_key_wo` - + (Optional) + The Secret Access Key of the AWS account transferring data from. + **Note**: This property is write-only and will not be read from the API. + ## Timeouts Lorem ipsum From fe64002aba3ae9f3e3d5f54a5931a257ef5ab89d Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 8 Sep 2025 12:54:06 -0700 Subject: [PATCH 152/201] Remove send_empty_value for suspended and disableRollbackIfRolloutPending fields (#14982) --- mmv1/products/clouddeploy/Automation.yaml | 2 -- mmv1/products/clouddeploy/DeployPolicy.yaml | 1 - .../resource_clouddeploy_automation_test.go | 9 +++++++++ .../resource_clouddeploy_deploy_policy_test.go | 10 ++++++++++ 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/mmv1/products/clouddeploy/Automation.yaml b/mmv1/products/clouddeploy/Automation.yaml index 1947ccf7ceec..5ab9b46f2b64 100644 --- a/mmv1/products/clouddeploy/Automation.yaml +++ b/mmv1/products/clouddeploy/Automation.yaml @@ -104,7 +104,6 @@ properties: - name: 'suspended' type: Boolean description: "Optional. When Suspended, automation is deactivated from execution." - send_empty_value: true - name: 'serviceAccount' type: String description: "Required. Email address of the user-managed IAM service account that creates Cloud Deploy release and rollout resources." @@ -242,7 +241,6 @@ properties: description: "Optional. The starting phase ID for the Rollout. If unspecified, the Rollout will start in the stable phase." - name: 'disableRollbackIfRolloutPending' type: Boolean - send_empty_value: true description: "Optional. If pending rollout exists on the target, the rollback operation will be aborted." - name: 'timedPromoteReleaseRule' type: NestedObject diff --git a/mmv1/products/clouddeploy/DeployPolicy.yaml b/mmv1/products/clouddeploy/DeployPolicy.yaml index 56dc1bf0633a..e18f864f3b86 100644 --- a/mmv1/products/clouddeploy/DeployPolicy.yaml +++ b/mmv1/products/clouddeploy/DeployPolicy.yaml @@ -94,7 +94,6 @@ properties: - name: "suspended" type: Boolean description: "When suspended, the policy will not prevent actions from occurring, even if the action violates the policy." - send_empty_value: true - name: "selectors" type: Array description: "Selected resources to which the policy will be applied. At least one selector is required. If one selector matches the resource the policy applies. For example, if there are two selectors and the action being attempted matches one of them, the policy will apply to that action." diff --git a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go index e4c414598744..2b6d9930c08b 100644 --- a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go +++ b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go @@ -40,6 +40,15 @@ func TestAccClouddeployAutomation_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"location", "delivery_pipeline", "annotations", "labels", "terraform_labels"}, }, + { + Config: testAccClouddeployAutomation_basic(context), + }, + { + ResourceName: "google_clouddeploy_automation.automation", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "delivery_pipeline", "annotations", "labels", "terraform_labels"}, + }, }, }) } diff --git a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go index eecdaf1a4fb1..8cca70af4a88 100644 --- a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go +++ b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_deploy_policy_test.go @@ -38,6 +38,15 @@ func TestAccClouddeployDeployPolicy_update(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"location", "annotations", "labels", "terraform_labels"}, }, + { + Config: testAccClouddeployDeployPolicy_basic(context), + }, + { + ResourceName: "google_clouddeploy_deploy_policy.deploy_policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "annotations", "labels", "terraform_labels"}, + }, }, }) } @@ -85,6 +94,7 @@ resource "google_clouddeploy_deploy_policy" "deploy_policy" { id = "tf-test-cd-pipeline%{random_suffix}" } } + suspended = true rules { rollout_restriction { id = "rule" From 6f1f501695345ad25f094e8976b18c092f1d6835 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Mon, 8 Sep 2025 13:11:20 -0700 Subject: [PATCH 153/201] workbench: Make report-notebook-metrics but unmodifiable (#15080) --- mmv1/templates/terraform/constants/workbench_instance.go.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index 817474e89de7..ff829cd5b9d1 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -26,6 +26,7 @@ func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceD var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ "install-monitoring-agent", "serial-port-logging-enable", + "report-notebook-metrics", } var WorkbenchInstanceEUCProvidedAdditionalMetadata = []string{ @@ -84,7 +85,6 @@ var WorkbenchInstanceProvidedMetadata = []string{ "proxy-user-mail", "report-container-health", "report-event-url", - "report-notebook-metrics", "report-system-health", "report-system-status", "resource-url", From b834fdcbc2d24075fa9bde7008971498115e2f32 Mon Sep 17 00:00:00 2001 From: bcreddy-gcp <123543489+bcreddy-gcp@users.noreply.github.com> Date: Mon, 8 Sep 2025 13:41:11 -0700 Subject: [PATCH 154/201] workbench: Fix issue with apply failing with labels was null (#15055) --- mmv1/products/workbench/Instance.yaml | 1 - .../constants/workbench_instance.go.tmpl | 25 ------------------- .../resource_workbench_instance_test.go | 10 ++++++++ 3 files changed, 10 insertions(+), 26 deletions(-) diff --git a/mmv1/products/workbench/Instance.yaml b/mmv1/products/workbench/Instance.yaml index bfdf2ff10181..0e45b2137a91 100644 --- a/mmv1/products/workbench/Instance.yaml +++ b/mmv1/products/workbench/Instance.yaml @@ -615,7 +615,6 @@ properties: description: | Optional. Labels to apply to this instance. These can be later modified by the UpdateInstance method. - diff_suppress_func: 'WorkbenchInstanceLabelsDiffSuppress' - name: 'enableThirdPartyIdentity' type: Boolean description: | diff --git a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl index ff829cd5b9d1..325039f410db 100644 --- a/mmv1/templates/terraform/constants/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/constants/workbench_instance.go.tmpl @@ -1,28 +1,3 @@ -var WorkbenchInstanceProvidedLabels = []string{ - "consumer-project-id", - "consumer-project-number", - "notebooks-product", - "resource-name", -} - -func WorkbenchInstanceLabelsDiffSuppress(k, old, new string, d *schema.ResourceData) bool { - // Suppress diffs for the labels - for _, label := range WorkbenchInstanceProvidedLabels { - if strings.Contains(k, label) && new == "" { - return true - } - } - - // Let diff be determined by labels (above) - if strings.Contains(k, "labels.%") { - return true - } - - // For other keys, don't suppress diff. - return false -} - - var WorkbenchInstanceSettableUnmodifiableDefaultMetadata = []string{ "install-monitoring-agent", "serial-port-logging-enable", diff --git a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go index 1ad111258b41..5a280ef140d2 100644 --- a/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go +++ b/mmv1/third_party/terraform/services/workbench/resource_workbench_instance_test.go @@ -732,6 +732,9 @@ resource "google_workbench_instance" "instance" { } func TestAccWorkbenchInstance_updatelabels(t *testing.T) { + // Skip it in VCR test because of the randomness of uuid in "labels" field + // which causes the replaying mode after recording mode failing in VCR test + acctest.SkipIfVcr(t) t.Parallel() context := map[string]interface{}{ @@ -741,6 +744,9 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + }, Steps: []resource.TestStep{ { Config: testAccWorkbenchInstance_label(context), @@ -787,11 +793,15 @@ func TestAccWorkbenchInstance_updatelabels(t *testing.T) { func testAccWorkbenchInstance_label(context map[string]interface{}) string { return acctest.Nprintf(` +resource "random_uuid" "test" { +} + resource "google_workbench_instance" "instance" { name = "tf-test-workbench-instance%{random_suffix}" location = "us-central1-a" labels = { k = "val" + computed_label = "${random_uuid.test.result}" } } `, context) From 2f06c6fc926e1f36364cc100d34a1e52035576e4 Mon Sep 17 00:00:00 2001 From: James Duncan Date: Mon, 8 Sep 2025 17:27:09 -0400 Subject: [PATCH 155/201] Make allow_global_access conditionally immutable for INTERNAL_MANAGED forwarding rules (#15079) --- .../constants/compute_forwarding_rule.go.tmpl | 8 + ...ource_compute_forwarding_rule_test.go.tmpl | 180 ++++++++++++++++++ 2 files changed, 188 insertions(+) diff --git a/mmv1/templates/terraform/constants/compute_forwarding_rule.go.tmpl b/mmv1/templates/terraform/constants/compute_forwarding_rule.go.tmpl index cef8664a7914..17d3697ffb1d 100644 --- a/mmv1/templates/terraform/constants/compute_forwarding_rule.go.tmpl +++ b/mmv1/templates/terraform/constants/compute_forwarding_rule.go.tmpl @@ -12,6 +12,14 @@ func forwardingRuleCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, v } } } + + // Force recreation if allow_global_access changes for INTERNAL_MANAGED load balancing scheme + if diff.Id() != "" && diff.HasChange("allow_global_access") { + if loadBalancingScheme, ok := diff.Get("load_balancing_scheme").(string); ok && loadBalancingScheme == "INTERNAL_MANAGED" { + diff.ForceNew("allow_global_access") + } + } + return nil } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_forwarding_rule_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_forwarding_rule_test.go.tmpl index 46b742cb59e2..d79450627547 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_forwarding_rule_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_forwarding_rule_test.go.tmpl @@ -920,3 +920,183 @@ resource "google_compute_network" "custom-test" { } `, context) } + +func TestAccComputeForwardingRule_allowGlobalAccessUpdate_Internal(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + poolName := fmt.Sprintf("tf-test-%s", suffix) + ruleName := fmt.Sprintf("tf-test-%s", suffix) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeForwardingRuleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeForwardingRule_allowGlobalAccess_Internal(poolName, ruleName, false), + }, + { + ResourceName: "google_compute_forwarding_rule.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backend_service", "network", "subnetwork", "region"}, + }, + { + Config: testAccComputeForwardingRule_allowGlobalAccess_Internal(poolName, ruleName, true), + }, + { + ResourceName: "google_compute_forwarding_rule.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"backend_service", "network", "subnetwork", "region"}, + }, + }, + }) +} + +func TestAccComputeForwardingRule_allowGlobalAccessUpdate_InternalManaged(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + poolName := fmt.Sprintf("tf-test-%s", suffix) + ruleName := fmt.Sprintf("tf-test-%s", suffix) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeForwardingRuleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeForwardingRule_allowGlobalAccess_InternalManaged(poolName, ruleName, false), + }, + { + ResourceName: "google_compute_forwarding_rule.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"target", "network", "subnetwork", "region"}, + }, + { + // This should trigger recreation due to immutability for INTERNAL_MANAGED + Config: testAccComputeForwardingRule_allowGlobalAccess_InternalManaged(poolName, ruleName, true), + }, + { + ResourceName: "google_compute_forwarding_rule.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"target", "network", "subnetwork", "region"}, + }, + }, + }) +} + +func testAccComputeForwardingRule_allowGlobalAccess_Internal(poolName, ruleName string, allowGlobalAccess bool) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s-network" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s-subnet" + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_health_check" "default" { + name = "%s-hc" + check_interval_sec = 1 + timeout_sec = 1 + tcp_health_check { + port = "80" + } +} + +resource "google_compute_region_backend_service" "default" { + name = "%s-backend" + region = "us-central1" + health_checks = [google_compute_health_check.default.id] + load_balancing_scheme = "INTERNAL" + protocol = "TCP" +} + +resource "google_compute_forwarding_rule" "foobar" { + name = "%s" + region = "us-central1" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.default.id + all_ports = true + allow_global_access = %t +} +`, poolName, poolName, poolName, poolName, ruleName, allowGlobalAccess) +} + +func testAccComputeForwardingRule_allowGlobalAccess_InternalManaged(poolName, ruleName string, allowGlobalAccess bool) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s-network" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "%s-subnet" + ip_cidr_range = "10.0.0.0/16" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_subnetwork" "proxy" { + name = "%s-proxy-subnet" + ip_cidr_range = "10.1.0.0/24" + region = "us-central1" + network = google_compute_network.default.id + purpose = "REGIONAL_MANAGED_PROXY" + role = "ACTIVE" +} + +resource "google_compute_health_check" "default" { + name = "%s-hc" + check_interval_sec = 1 + timeout_sec = 1 + http_health_check { + port = "80" + } +} + +resource "google_compute_region_backend_service" "default" { + name = "%s-backend" + region = "us-central1" + health_checks = [google_compute_health_check.default.id] + load_balancing_scheme = "INTERNAL_MANAGED" + protocol = "HTTP" +} + +resource "google_compute_region_url_map" "default" { + name = "%s-url-map" + region = "us-central1" + default_service = google_compute_region_backend_service.default.id +} + +resource "google_compute_region_target_http_proxy" "default" { + name = "%s-http-proxy" + region = "us-central1" + url_map = google_compute_region_url_map.default.id +} + +resource "google_compute_forwarding_rule" "foobar" { + name = "%s" + region = "us-central1" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + load_balancing_scheme = "INTERNAL_MANAGED" + target = google_compute_region_target_http_proxy.default.id + port_range = "80" + allow_global_access = %t + + depends_on = [google_compute_subnetwork.proxy] +} +`, poolName, poolName, poolName, poolName, poolName, poolName, poolName, ruleName, allowGlobalAccess) +} From d6be8f03fe6b7e39e3b2ff647dbb2b6149949ca6 Mon Sep 17 00:00:00 2001 From: Ankit Sharma <41139383+ankitiit84@users.noreply.github.com> Date: Mon, 8 Sep 2025 14:45:54 -0700 Subject: [PATCH 156/201] Add Big Query Data Policy V2 resource (#14979) --- .../bigquerydatapolicyv2/DataPolicy.yaml | 168 ++++++++++++++++++ .../bigquerydatapolicyv2/product.yaml | 23 +++ .../bigquery_datapolicyv2_datapolicy.go.tmpl | 6 + ...uery_datapolicyv2_datapolicy_basic.tf.tmpl | 5 + ...cyv2_datapolicy_predefined_masking.tf.tmpl | 8 + ...ry_datapolicyv2_datapolicy_routine.tf.tmpl | 28 +++ ...tapolicyv2_datapolicy_withgrantees.tf.tmpl | 8 + ...icyv2_datapolicy_withgrantees_test.tf.tmpl | 8 + .../bigquery_datapolicyv2_datapolicy.go.tmpl | 1 + .../components/inputs/services_beta.kt | 5 + .../components/inputs/services_ga.kt | 5 + ..._bigquery_datapolicyv2_data_policy_test.go | 79 ++++++++ 12 files changed, 344 insertions(+) create mode 100644 mmv1/products/bigquerydatapolicyv2/DataPolicy.yaml create mode 100644 mmv1/products/bigquerydatapolicyv2/product.yaml create mode 100644 mmv1/templates/terraform/encoders/bigquery_datapolicyv2_datapolicy.go.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_predefined_masking.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_routine.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees_test.tf.tmpl create mode 100644 mmv1/templates/terraform/update_encoder/bigquery_datapolicyv2_datapolicy.go.tmpl create mode 100644 mmv1/third_party/terraform/services/bigquerydatapolicyv2/resource_bigquery_datapolicyv2_data_policy_test.go diff --git a/mmv1/products/bigquerydatapolicyv2/DataPolicy.yaml b/mmv1/products/bigquerydatapolicyv2/DataPolicy.yaml new file mode 100644 index 000000000000..64adfe0ed315 --- /dev/null +++ b/mmv1/products/bigquerydatapolicyv2/DataPolicy.yaml @@ -0,0 +1,168 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: DataPolicy +description: BigQuery Data Policy +references: + guides: + 'Official Documentation': 'https://cloud.google.com/bigquery/docs/column-data-masking-intro' + api: 'https://cloud.google.com/bigquery/docs/reference/bigquerydatapolicy/rest/v2/projects.locations.dataPolicies' +docs: null +id_format: 'projects/{{project}}/locations/{{location}}/dataPolicies/{{data_policy_id}}' +base_url: 'projects/{{project}}/locations/{{location}}/dataPolicies' +self_link: 'projects/{{project}}/locations/{{location}}/dataPolicies/{{data_policy_id}}' +create_url: 'projects/{{project}}/locations/{{location}}/dataPolicies' +update_verb: 'PATCH' +update_mask: true +import_format: + - 'projects/{{project}}/locations/{{location}}/dataPolicies/{{data_policy_id}}' + - '{{project}}/{{location}}/{{data_policy_id}}' + - '{{location}}/{{data_policy_id}}' +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +iam_policy: + method_name_separator: ':' + fetch_iam_policy_verb: 'POST' + parent_resource_attribute: 'data_policy_id' + example_config_body: 'templates/terraform/iam/iam_attributes.go.tmpl' + import_format: + - 'projects/{{project}}/locations/{{location}}/dataPolicies/{{data_policy_id}}' + - '{{data_policy_id}}' +custom_code: + encoder: templates/terraform/encoders/bigquery_datapolicyv2_datapolicy.go.tmpl + update_encoder: templates/terraform/update_encoder/bigquery_datapolicyv2_datapolicy.go.tmpl +examples: + - name: 'bigquery_datapolicyv2_datapolicy_basic' + primary_resource_id: 'basic_data_policy' + primary_resource_name: 'fmt.Sprintf("tf_test_basic_data_policy%s", context["random_suffix"])' + vars: + data_policy_id: 'basic_data_policy' + - name: 'bigquery_datapolicyv2_datapolicy_predefined_masking' + primary_resource_id: 'predefined_masking_data_policy' + primary_resource_name: 'fmt.Sprintf("tf_test_predefined_masking_data_policy%s", context["random_suffix"])' + vars: + data_policy_id: 'predefined_masking_data_policy' + - name: 'bigquery_datapolicyv2_datapolicy_routine' + primary_resource_id: 'routine_data_policy' + primary_resource_name: 'fmt.Sprintf("tf_test_routine_data_policy%s", context["random_suffix"])' + vars: + data_policy_id: 'routine_data_policy' + dataset_id: 'dataset_id' + - name: 'bigquery_datapolicyv2_datapolicy_withgrantees' + primary_resource_id: 'data_policy_with_grantees' + primary_resource_name: 'fmt.Sprintf("tf_test_data_policy_with_grantees%s", context["random_suffix"])' + exclude_test: true + vars: + data_policy_id: 'data_policy_with_grantees' + - name: 'bigquery_datapolicyv2_datapolicy_withgrantees_test' + primary_resource_id: 'data_policy_with_grantees' + primary_resource_name: 'fmt.Sprintf("tf_test_data_policy_with_grantees%s", context["random_suffix"])' + exclude_docs: true + vars: + data_policy_id: 'data_policy_with_grantees' +parameters: + - name: location + type: String + description: Resource ID segment making up resource `name`. It identifies the resource + within its parent collection as described in https://google.aip.dev/122. + immutable: true + url_param_only: true + required: true +properties: + - name: dataMaskingPolicy + type: NestedObject + description: The policy used to specify data masking rule. + properties: + - name: predefinedExpression + type: String + description: |- + A predefined masking expression. + Possible values: + SHA256 + ALWAYS_NULL + DEFAULT_MASKING_VALUE + LAST_FOUR_CHARACTERS + FIRST_FOUR_CHARACTERS + EMAIL_MASK + DATE_YEAR_MASK + RANDOM_HASH + - name: routine + type: String + description: |- + The name of the BigQuery routine that contains the custom masking + routine, in the format of + `projects/{project_number}/datasets/{dataset_id}/routines/{routine_id}`. + diff_suppress_func: 'tpgresource.ProjectNumberDiffSuppress' + - name: dataPolicyType + type: String + description: |- + Type of data policy. + Possible values: + DATA_MASKING_POLICY + RAW_DATA_ACCESS_POLICY + COLUMN_LEVEL_SECURITY_POLICY + required: true + - name: etag + type: Fingerprint + description: |- + The etag for this Data Policy. + This field is used for UpdateDataPolicy calls. If Data Policy exists, this + field is required and must match the server's etag. It will also be + populated in the response of GetDataPolicy, CreateDataPolicy, and + UpdateDataPolicy calls. + default_from_api: true + - name: grantees + type: Array + description: |- + The list of IAM principals that have Fine Grained Access to the underlying + data goverened by this data policy. + + Uses the [IAM V2 principal + syntax](https://cloud.google.com/iam/docs/principal-identifiers#v2) Only + supports principal types users, groups, serviceaccounts, cloudidentity. + This field is supported in V2 Data Policy only. In case of V1 data policies + (i.e. verion = 1 and policy_tag is set), this field is not populated. + item_type: + type: String + default_from_api: true + - name: name + type: String + description: |- + Identifier. Resource name of this data policy, in the format of + `projects/{project_number}/locations/{location_id}/dataPolicies/{data_policy_id}`. + output: true + - name: policyTag + type: String + description: |- + Policy tag resource name, in the format of + `projects/{project_number}/locations/{location_id}/taxonomies/{taxonomy_id}/policyTags/{policyTag_id}`. + policy_tag is supported only for V1 data policies. + output: true + - name: version + type: String + description: |- + The version of the Data Policy resource. + Possible values: + V1 + V2 + output: true + - name: dataPolicyId + type: String + description: |- + User-assigned (human readable) ID of the data policy that needs to be + unique within a project. Used as {data_policy_id} in part of the resource + name. + required: true diff --git a/mmv1/products/bigquerydatapolicyv2/product.yaml b/mmv1/products/bigquerydatapolicyv2/product.yaml new file mode 100644 index 000000000000..017161717bac --- /dev/null +++ b/mmv1/products/bigquerydatapolicyv2/product.yaml @@ -0,0 +1,23 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: BigqueryDatapolicyv2 +display_name: BigQuery Data Policy V2 +scopes: + - https://www.googleapis.com/auth/cloud-platform +versions: + - base_url: https://bigquerydatapolicy.googleapis.com/v2/ + name: ga +caibaseurl: "" +resourceswithcaiassettype: {} diff --git a/mmv1/templates/terraform/encoders/bigquery_datapolicyv2_datapolicy.go.tmpl b/mmv1/templates/terraform/encoders/bigquery_datapolicyv2_datapolicy.go.tmpl new file mode 100644 index 000000000000..6d740c9773df --- /dev/null +++ b/mmv1/templates/terraform/encoders/bigquery_datapolicyv2_datapolicy.go.tmpl @@ -0,0 +1,6 @@ +// The create request is not in the same format as the resource. +// The API request needs resource to be nested inside the "data_policy" field. +newObj := make(map[string]interface{}) +newObj["dataPolicy"] = obj +newObj["dataPolicyId"] = obj["dataPolicyId"].(string) +return newObj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_basic.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_basic.tf.tmpl new file mode 100644 index 000000000000..bc0263aa7a6e --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_basic.tf.tmpl @@ -0,0 +1,5 @@ +resource "google_bigquery_datapolicyv2_data_policy" "{{$.PrimaryResourceId}}" { + location = "us-central1" + data_policy_type = "RAW_DATA_ACCESS_POLICY" + data_policy_id = "{{index $.Vars "data_policy_id"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_predefined_masking.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_predefined_masking.tf.tmpl new file mode 100644 index 000000000000..2636e766cdc9 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_predefined_masking.tf.tmpl @@ -0,0 +1,8 @@ +resource "google_bigquery_datapolicyv2_data_policy" "{{$.PrimaryResourceId}}" { + location = "us-central1" + data_policy_type = "DATA_MASKING_POLICY" + data_masking_policy { + predefined_expression = "SHA256" + } + data_policy_id = "{{index $.Vars "data_policy_id"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_routine.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_routine.tf.tmpl new file mode 100644 index 000000000000..c07deb96eb08 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_routine.tf.tmpl @@ -0,0 +1,28 @@ +resource "google_bigquery_datapolicyv2_data_policy" "{{$.PrimaryResourceId}}" { + location = "us-central1" + data_policy_id = "{{index $.Vars "data_policy_id"}}" + data_policy_type = "DATA_MASKING_POLICY" + data_masking_policy { + routine = google_bigquery_routine.custom_masking_routine.id + } +} + +resource "google_bigquery_dataset" "test" { + dataset_id = "{{index $.Vars "dataset_id"}}" + location = "us-central1" +} + +resource "google_bigquery_routine" "custom_masking_routine" { + dataset_id = google_bigquery_dataset.test.dataset_id + routine_id = "custom_masking_routine" + routine_type = "SCALAR_FUNCTION" + language = "SQL" + data_governance_type = "DATA_MASKING" + definition_body = "SAFE.REGEXP_REPLACE(ssn, '[0-9]', 'X')" + return_type = "{\"typeKind\" : \"STRING\"}" + + arguments { + name = "ssn" + data_type = "{\"typeKind\" : \"STRING\"}" + } +} diff --git a/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees.tf.tmpl new file mode 100644 index 000000000000..37fd9c79c074 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees.tf.tmpl @@ -0,0 +1,8 @@ +resource "google_bigquery_datapolicyv2_data_policy" "{{$.PrimaryResourceId}}" { + location = "us-central1" + data_policy_type = "RAW_DATA_ACCESS_POLICY" + grantees = [ + "principal://goog/subject/jane@example.com" + ] + data_policy_id = "{{index $.Vars "data_policy_id"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees_test.tf.tmpl b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees_test.tf.tmpl new file mode 100644 index 000000000000..f710e3e08a15 --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_datapolicyv2_datapolicy_withgrantees_test.tf.tmpl @@ -0,0 +1,8 @@ +resource "google_bigquery_datapolicyv2_data_policy" "{{$.PrimaryResourceId}}" { + location = "us-central1" + data_policy_type = "RAW_DATA_ACCESS_POLICY" + grantees = [ + "principalSet://goog/group/bigquery-datamasking-swe@google.com" + ] + data_policy_id = "{{index $.Vars "data_policy_id"}}" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/update_encoder/bigquery_datapolicyv2_datapolicy.go.tmpl b/mmv1/templates/terraform/update_encoder/bigquery_datapolicyv2_datapolicy.go.tmpl new file mode 100644 index 000000000000..0a9ee8f70bb4 --- /dev/null +++ b/mmv1/templates/terraform/update_encoder/bigquery_datapolicyv2_datapolicy.go.tmpl @@ -0,0 +1 @@ +return obj, nil \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 5c2b40ad838b..d8f244434eaf 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -111,6 +111,11 @@ var ServicesListBeta = mapOf( "displayName" to "Bigquerydatapolicy", "path" to "./google-beta/services/bigquerydatapolicy" ), + "bigquerydatapolicyv2" to mapOf( + "name" to "bigquerydatapolicyv2", + "displayName" to "Bigquerydatapolicyv2", + "path" to "./google-beta/services/bigquerydatapolicyv2" + ), "bigquerydatatransfer" to mapOf( "name" to "bigquerydatatransfer", "displayName" to "Bigquerydatatransfer", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 9b3f94e065da..0b522a66c56d 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -111,6 +111,11 @@ var ServicesListGa = mapOf( "displayName" to "Bigquerydatapolicy", "path" to "./google/services/bigquerydatapolicy" ), + "bigquerydatapolicyv2" to mapOf( + "name" to "bigquerydatapolicyv2", + "displayName" to "Bigquerydatapolicyv2", + "path" to "./google/services/bigquerydatapolicyv2" + ), "bigquerydatatransfer" to mapOf( "name" to "bigquerydatatransfer", "displayName" to "Bigquerydatatransfer", diff --git a/mmv1/third_party/terraform/services/bigquerydatapolicyv2/resource_bigquery_datapolicyv2_data_policy_test.go b/mmv1/third_party/terraform/services/bigquerydatapolicyv2/resource_bigquery_datapolicyv2_data_policy_test.go new file mode 100644 index 000000000000..9354f98dda1c --- /dev/null +++ b/mmv1/third_party/terraform/services/bigquerydatapolicyv2/resource_bigquery_datapolicyv2_data_policy_test.go @@ -0,0 +1,79 @@ +package bigquerydatapolicyv2_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccBigqueryDatapolicyv2DataPolicy_bigqueryDatapolicyv2DatapolicyBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + // CheckDestroy: testAccCheckBigqueryDatapolicyv2DataPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDatapolicyv2DataPolicy_bigqueryDatapolicyv2DatapolicyBasicExample_basic(context), + }, + { + ResourceName: "google_bigquery_datapolicyv2_data_policy.basic_data_policy_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + { + Config: testAccBigqueryDatapolicyv2DataPolicy_bigqueryDatapolicyv2DatapolicyBasicExample_update(context), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("google_bigquery_datapolicyv2_data_policy.basic_data_policy_update", plancheck.ResourceActionUpdate), + }, + }, + }, + { + ResourceName: "google_bigquery_datapolicyv2_data_policy.basic_data_policy_update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccBigqueryDatapolicyv2DataPolicy_bigqueryDatapolicyv2DatapolicyBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_datapolicyv2_data_policy" "basic_data_policy_update" { + location = "us-central1" + data_policy_type = "DATA_MASKING_POLICY" + data_masking_policy { + predefined_expression = "SHA256" + } + grantees = [] + data_policy_id = "tf_test_basic_data_policy_update%{random_suffix}" +} +`, context) +} + +func testAccBigqueryDatapolicyv2DataPolicy_bigqueryDatapolicyv2DatapolicyBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_bigquery_datapolicyv2_data_policy" "basic_data_policy_update" { + location = "us-central1" + data_policy_type = "DATA_MASKING_POLICY" + data_masking_policy { + predefined_expression = "ALWAYS_NULL" + } + grantees = [ + "principalSet://goog/group/bigquery-datamasking-swe@google.com" + ] + data_policy_id = "tf_test_basic_data_policy_update%{random_suffix}" +} +`, context) +} From a0f35aaa450f77be882860141f738fe8f075d878 Mon Sep 17 00:00:00 2001 From: jialei-chen <147877028+jialei-chen@users.noreply.github.com> Date: Tue, 9 Sep 2025 09:06:02 -0700 Subject: [PATCH 157/201] Add a new field app_type to resource search engine. (#14874) --- .../discoveryengine/SearchEngine.yaml | 11 +++++++++++ ...gine_searchengine_agentspace_basic.tf.tmpl | 19 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 mmv1/templates/terraform/examples/discoveryengine_searchengine_agentspace_basic.tf.tmpl diff --git a/mmv1/products/discoveryengine/SearchEngine.yaml b/mmv1/products/discoveryengine/SearchEngine.yaml index 4511e3b1032b..9d9ec538d3c5 100644 --- a/mmv1/products/discoveryengine/SearchEngine.yaml +++ b/mmv1/products/discoveryengine/SearchEngine.yaml @@ -49,6 +49,11 @@ examples: vars: engine_id: 'example-engine-id' data_store_id: 'example-datastore-id' + - name: 'discoveryengine_searchengine_agentspace_basic' + primary_resource_id: 'agentspace_basic' + vars: + engine_id: 'example-engine-id' + data_store_id: 'example-datastore-id' parameters: - name: 'engineId' type: String @@ -147,3 +152,9 @@ properties: description: | The name of the company, business or entity that is associated with the engine. Setting this may help improve LLM related features.cd immutable: true + - name: 'appType' + type: String + description: | + This is the application type this engine resource represents. + The supported values: 'APP_TYPE_UNSPECIFIED', 'APP_TYPE_INTRANET'. + immutable: true diff --git a/mmv1/templates/terraform/examples/discoveryengine_searchengine_agentspace_basic.tf.tmpl b/mmv1/templates/terraform/examples/discoveryengine_searchengine_agentspace_basic.tf.tmpl new file mode 100644 index 000000000000..f5dd060cec12 --- /dev/null +++ b/mmv1/templates/terraform/examples/discoveryengine_searchengine_agentspace_basic.tf.tmpl @@ -0,0 +1,19 @@ +resource "google_discovery_engine_data_store" "agentspace_basic" { + location = "global" + data_store_id = "{{index $.Vars "data_store_id"}}" + display_name = "tf-test-structured-datastore" + industry_vertical = "GENERIC" + content_config = "NO_CONTENT" + solution_types = ["SOLUTION_TYPE_SEARCH"] + create_advanced_site_search = false +} +resource "google_discovery_engine_search_engine" "agentspace_basic" { + engine_id = "{{index $.Vars "engine_id"}}" + collection_id = "default_collection" + location = google_discovery_engine_data_store.agentspace_basic.location + display_name = "tf-test-agentspace-search-engine" + data_store_ids = [google_discovery_engine_data_store.agentspace_basic.data_store_id] + industry_vertical = "GENERIC" + search_engine_config { + } +} From 8dc43a2d0f4f4ebf5c25afa9d96731a8b1365b95 Mon Sep 17 00:00:00 2001 From: Bryan Kendall Date: Tue, 9 Sep 2025 10:07:54 -0700 Subject: [PATCH 158/201] Fixes a test issue for Firebase App Hosting (#15069) --- mmv1/products/firebaseapphosting/Domain.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/products/firebaseapphosting/Domain.yaml b/mmv1/products/firebaseapphosting/Domain.yaml index f27993655a87..b86ed11e742b 100644 --- a/mmv1/products/firebaseapphosting/Domain.yaml +++ b/mmv1/products/firebaseapphosting/Domain.yaml @@ -55,6 +55,9 @@ examples: service_act_id: '"tf-test-domain-m"' - name: firebase_app_hosting_domain_full primary_resource_id: example + ignore_read_extra: + # This isn't guaranteed to be set right away. + - custom_domain_status vars: backend_id: 'domain-full' service_act_id: 'sa-id' From f9e335833183179c4d0601f7246be95dd95bb020 Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Tue, 9 Sep 2025 19:12:05 +0200 Subject: [PATCH 159/201] Feature gap: Add labels and label_fingerprint fields to `google_compute_security_policy` (#14821) Signed-off-by: Cezary Sobczak --- .../resource_compute_security_policy.go.tmpl | 113 +++++++++++++++++- ...ource_compute_security_policy_rule_test.go | 3 +- ...ource_compute_security_policy_test.go.tmpl | 61 ++++++++++ .../r/compute_security_policy.html.markdown | 10 ++ 4 files changed, 181 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl index a91284724b3f..6614d068d3a5 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl @@ -65,6 +65,7 @@ func ResourceComputeSecurityPolicy() *schema.Resource { }, CustomizeDiff: customdiff.All( tpgresource.DefaultProviderProject, + tpgresource.SetLabelsDiff, rulesCustomizeDiff, ), @@ -701,8 +702,36 @@ func ResourceComputeSecurityPolicy() *schema.Resource { }, }, }, - }, + "labels": { + Type: schema.TypeMap, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `Labels to apply to this address. A list of key->value pairs. + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field 'effective_labels' for all of the labels present on the resource.`, + }, + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "label_fingerprint": { + Type: schema.TypeString, + Computed: true, + Description: `The unique fingerprint of the labels.`, + }, + }, UseJSONNumber: true, } } @@ -748,7 +777,7 @@ func rulesCustomizeDiff(_ context.Context, diff *schema.ResourceDiff, _ interfac func resourceComputeSecurityPolicyCreate(d *schema.ResourceData, meta interface{}) error { config := meta.(*transport_tpg.Config) - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { return err } @@ -772,17 +801,17 @@ func resourceComputeSecurityPolicyCreate(d *schema.ResourceData, meta interface{ securityPolicy.Rules = expandSecurityPolicyRules(v.(*schema.Set).List()) } - if v, ok := d.GetOk("advanced_options_config"); ok{ + if v, ok := d.GetOk("advanced_options_config"); ok { securityPolicy.AdvancedOptionsConfig = expandSecurityPolicyAdvancedOptionsConfig(v.([]interface{})) } - if v, ok := d.GetOk("adaptive_protection_config"); ok{ + if v, ok := d.GetOk("adaptive_protection_config"); ok { securityPolicy.AdaptiveProtectionConfig = expandSecurityPolicyAdaptiveProtectionConfig(v.([]interface{})) } log.Printf("[DEBUG] SecurityPolicy insert request: %#v", securityPolicy) - if v, ok := d.GetOk("recaptcha_options_config"); ok{ + if v, ok := d.GetOk("recaptcha_options_config"); ok { securityPolicy.RecaptchaOptionsConfig = expandSecurityPolicyRecaptchaOptionsConfig(v.([]interface{}), d) } @@ -805,6 +834,48 @@ func resourceComputeSecurityPolicyCreate(d *schema.ResourceData, meta interface{ return err } + if effectiveLabels := tpgresource.ExpandEffectiveLabels(d); effectiveLabels != nil { + userLabels := d.Get("labels") + terraformLabels := d.Get("terraform_labels") + + // Labels cannot be set in a create. We'll have to set them here. + err = resourceComputeSecurityPolicyRead(d, meta) + if err != nil { + return err + } + + // Now we can set the labels + setLabels := &compute.GlobalSetLabelsRequest{ + Labels: effectiveLabels, + LabelFingerprint: d.Get("label_fingerprint").(string), + } + + op, err = client.SecurityPolicies.SetLabels(project, sp, setLabels).Do() + if err != nil { + return err + } + + err = ComputeOperationWaitTime(config, op, project, fmt.Sprintf("Creating SecurityPolicy.Labels %q", sp), userAgent, d.Timeout(schema.TimeoutCreate)) + if err != nil { + return err + } + + // Set back the labels field, as it is needed to decide the value of "labels" in the state in the read function. + if err := d.Set("labels", userLabels); err != nil { + return fmt.Errorf("Error setting back labels: %s", err) + } + + // Set back the terraform_labels field, as it is needed to decide the value of "terraform_labels" in the state in the read function. + if err := d.Set("terraform_labels", terraformLabels); err != nil { + return fmt.Errorf("Error setting back terraform_labels: %s", err) + } + + // Set back the effective_labels field, as it is needed to decide the value of "effective_labels" in the state in the read function. + if err := d.Set("effective_labels", effectiveLabels); err != nil { + return fmt.Errorf("Error setting back effective_labels: %s", err) + } + } + return resourceComputeSecurityPolicyRead(d, meta) } @@ -862,6 +933,22 @@ func resourceComputeSecurityPolicyRead(d *schema.ResourceData, meta interface{}) return fmt.Errorf("Error setting recaptcha_options_config: %s", err) } + if err := tpgresource.SetLabels(securityPolicy.Labels, d, "labels"); err != nil { + return err + } + + if err := tpgresource.SetLabels(securityPolicy.Labels, d, "terraform_labels"); err != nil { + return err + } + + if err := d.Set("effective_labels", securityPolicy.Labels); err != nil { + return err + } + + if err := d.Set("label_fingerprint", securityPolicy.LabelFingerprint); err != nil { + return fmt.Errorf("Error setting label_fingerprint: %s", err) + } + return nil } @@ -923,6 +1010,22 @@ func resourceComputeSecurityPolicyUpdate(d *schema.ResourceData, meta interface{ securityPolicy.ForceSendFields = append(securityPolicy.ForceSendFields, "RecaptchaOptionsConfig") } + if d.HasChange("effective_labels") { + labels := tpgresource.ExpandEffectiveLabels(d) + labelFingerprint := d.Get("label_fingerprint").(string) + req := compute.GlobalSetLabelsRequest{Labels: labels, LabelFingerprint: labelFingerprint} + + op, err := config.NewComputeClient(userAgent).SecurityPolicies.SetLabels(project, sp, &req).Do() + if err != nil { + return fmt.Errorf("Error updating labels: %s", err) + } + + opErr := ComputeOperationWaitTime(config, op, project, "labels to update", userAgent, d.Timeout(schema.TimeoutUpdate)) + if opErr != nil { + return opErr + } + } + if len(securityPolicy.ForceSendFields) > 0 { client := config.NewComputeClient(userAgent) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go index bed35a86c641..18b8e2d229fa 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_rule_test.go @@ -2,10 +2,11 @@ package compute_test import ( "fmt" - "github.com/hashicorp/terraform-provider-google/google/acctest" "regexp" "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl index 59d7bdb90a4a..22f69caf7dfe 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy_test.go.tmpl @@ -774,6 +774,38 @@ func TestAccComputeSecurityPolicy_modifyExprOptions(t *testing.T) { }) } +func TestAccComputeSecurityPolicy_labels(t *testing.T) { + t.Parallel() + + spName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeSecurityPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeSecurityPolicy_basicLabels(spName), + }, + { + ResourceName: "google_compute_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccComputeSecurityPolicy_updateLabels(spName), + }, + { + ResourceName: "google_compute_security_policy.policy", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + func testAccComputeSecurityPolicy_withRecaptchaOptionsConfig(project, spName string) string { return fmt.Sprintf(` resource "google_recaptcha_enterprise_key" "primary" { @@ -2230,3 +2262,32 @@ resource "google_compute_security_policy" "policy" { } `, spName) } + +func testAccComputeSecurityPolicy_basicLabels(spName string) string { + return fmt.Sprintf(` +resource "google_compute_security_policy" "policy" { + name = "%s" + description = "basic security policy" + type = "CLOUD_ARMOR" + + labels = { + "env" = "test" + } +} +`, spName) +} + +func testAccComputeSecurityPolicy_updateLabels(spName string) string { + return fmt.Sprintf(` +resource "google_compute_security_policy" "policy" { + name = "%s" + description = "basic security policy" + type = "CLOUD_ARMOR" + + labels = { + "env" = "test", + "new_label" = "abcd1" + } +} +`, spName) +} diff --git a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown index ba850c6a7007..581bfd5a4639 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_security_policy.html.markdown @@ -191,6 +191,16 @@ The following arguments are supported: * `CLOUD_ARMOR_INTERNAL_SERVICE` - Cloud Armor internal service policies can be configured to filter HTTP requests targeting services managed by Traffic Director in a service mesh. They filter requests before the request is served from the application. +* `labels` - Labels to apply to this address. A list of key->value pairs. + **Note**: This field is non-authoritative, and will only manage the labels present in your configuration. + Please refer to the field `effective_labels` for all of the labels present on the resource. + +* `effective_labels` - All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services. + +* `terraform_labels` - The combination of labels configured directly on the resource and default labels configured on the provider. + +* `label_fingerprint` - The unique fingerprint of the labels. + The `advanced_options_config` block supports: * `json_parsing` - Whether or not to JSON parse the payload body. Defaults to `DISABLED`. From 0cce7b69bd3fdb6f849851395f9abb15820d5d07 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Tue, 9 Sep 2025 10:12:53 -0700 Subject: [PATCH 160/201] tgc-revival: add networksecurity resources (#15073) --- mmv1/products/networksecurity/AddressGroup.yaml | 1 + mmv1/products/networksecurity/AuthorizationPolicy.yaml | 1 + mmv1/products/networksecurity/ClientTlsPolicy.yaml | 1 + mmv1/products/networksecurity/GatewaySecurityPolicy.yaml | 1 + mmv1/products/networksecurity/SecurityProfile.yaml | 1 + mmv1/products/networksecurity/SecurityProfileGroup.yaml | 1 + 6 files changed, 6 insertions(+) diff --git a/mmv1/products/networksecurity/AddressGroup.yaml b/mmv1/products/networksecurity/AddressGroup.yaml index f1454643f4e1..caef32e865a2 100644 --- a/mmv1/products/networksecurity/AddressGroup.yaml +++ b/mmv1/products/networksecurity/AddressGroup.yaml @@ -46,6 +46,7 @@ async: resource_inside_response: false include_project: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'network_security_address_groups_basic' primary_resource_id: 'default' diff --git a/mmv1/products/networksecurity/AuthorizationPolicy.yaml b/mmv1/products/networksecurity/AuthorizationPolicy.yaml index 3b41099e73ee..2ed82fcb97f9 100644 --- a/mmv1/products/networksecurity/AuthorizationPolicy.yaml +++ b/mmv1/products/networksecurity/AuthorizationPolicy.yaml @@ -45,6 +45,7 @@ async: sweeper: url_substitutions: - region: "global" +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'network_security_authorization_policy_basic' primary_resource_id: 'default' diff --git a/mmv1/products/networksecurity/ClientTlsPolicy.yaml b/mmv1/products/networksecurity/ClientTlsPolicy.yaml index cb6a8772f575..aaacefa7fa08 100644 --- a/mmv1/products/networksecurity/ClientTlsPolicy.yaml +++ b/mmv1/products/networksecurity/ClientTlsPolicy.yaml @@ -43,6 +43,7 @@ async: result: resource_inside_response: false custom_code: +include_in_tgc_next_DO_NOT_USE: true sweeper: url_substitutions: - region: "global" diff --git a/mmv1/products/networksecurity/GatewaySecurityPolicy.yaml b/mmv1/products/networksecurity/GatewaySecurityPolicy.yaml index e9466502817e..e85d1a606b57 100644 --- a/mmv1/products/networksecurity/GatewaySecurityPolicy.yaml +++ b/mmv1/products/networksecurity/GatewaySecurityPolicy.yaml @@ -43,6 +43,7 @@ async: result: resource_inside_response: false custom_code: +include_in_tgc_next_DO_NOT_USE: true sweeper: dependencies: - "google_network_services_gateway" diff --git a/mmv1/products/networksecurity/SecurityProfile.yaml b/mmv1/products/networksecurity/SecurityProfile.yaml index 9e0a5c9fbf99..a23effe75f1d 100644 --- a/mmv1/products/networksecurity/SecurityProfile.yaml +++ b/mmv1/products/networksecurity/SecurityProfile.yaml @@ -40,6 +40,7 @@ async: resource_inside_response: false include_project: true custom_code: +include_in_tgc_next_DO_NOT_USE: true sweeper: url_substitutions: - parent: "organizations/${ORG_ID}" diff --git a/mmv1/products/networksecurity/SecurityProfileGroup.yaml b/mmv1/products/networksecurity/SecurityProfileGroup.yaml index ba7d51a1ddc7..072e665a4b04 100644 --- a/mmv1/products/networksecurity/SecurityProfileGroup.yaml +++ b/mmv1/products/networksecurity/SecurityProfileGroup.yaml @@ -41,6 +41,7 @@ async: resource_inside_response: false include_project: true custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'network_security_security_profile_group_basic' primary_resource_id: 'default' From 59c0dbaf57cf27acd90b32fa599e70f0d3fa0711 Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Tue, 9 Sep 2025 16:57:42 -0300 Subject: [PATCH 161/201] Secret Manager: Fix panic in SecretVersion flatten when AccessSecretVersion API call fails (#15082) --- .../secret_version_access.go.tmpl | 88 +++++++++++++------ 1 file changed, 63 insertions(+), 25 deletions(-) diff --git a/mmv1/templates/terraform/custom_flatten/secret_version_access.go.tmpl b/mmv1/templates/terraform/custom_flatten/secret_version_access.go.tmpl index 4f223a197f78..1806e596206c 100644 --- a/mmv1/templates/terraform/custom_flatten/secret_version_access.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/secret_version_access.go.tmpl @@ -11,50 +11,88 @@ limitations under the License. */ -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { - transformed := make(map[string]interface{}) - // write-only attributes are null on reads, secret_data_wo_version is used instead to return empty transformed that resolves a diff. + // helper: always return []interface{}{map} with the safest value + safeTransformed := func(val interface{}) []interface{} { + m := make(map[string]interface{}) + if val != nil { + m["secret_data"] = val + } + return []interface{}{m} + } + + // write-only: during read, resolve diff with empty object if _, ok := d.GetOkExists("secret_data_wo_version"); ok { - return []interface{}{transformed} + return safeTransformed(nil) } - // if this secret version is disabled, the api will return an error, as the value cannot be accessed, return what we have - if d.Get("enabled").(bool) == false { - transformed["secret_data"] = d.Get("secret_data") - return []interface{}{transformed} + // if "enabled" does not exist or is false, preserve what we already have in the state + enabledVal, exists := d.GetOk("enabled") + if !exists { + return safeTransformed(d.Get("secret_data")) + } + if enabled, _ := enabledVal.(bool); !enabled { + return safeTransformed(d.Get("secret_data")) } + // build access URL; if it fails, preserve state url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}SecretManagerBasePath{{"}}"}}{{"{{"}}name{{"}}"}}:access") if err != nil { - return err + log.Printf("[ERROR] Failed to build secret access URL: %v", err) + return safeTransformed(d.Get("secret_data")) } - parts := strings.Split(d.Get("name").(string), "/") + // safely extract project + nameStr, _ := d.Get("name").(string) + parts := strings.Split(nameStr, "/") + if len(parts) < 2 { + log.Printf("[WARN] Unexpected secret name format %q, preserving state", nameStr) + return safeTransformed(d.Get("secret_data")) + } project := parts[1] - userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + ua, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) if err != nil { - return err + log.Printf("[ERROR] Failed to generate user agent string: %v", err) + return safeTransformed(d.Get("secret_data")) } accessRes, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ - Config: config, - Method: "GET", - Project: project, - RawURL: url, - UserAgent: userAgent, + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: ua, }) if err != nil { - return err + // per review: add explicit log to diagnose underlying url/transport error + log.Printf("[ERROR] Failed to access secret version at %q: %v", url, err) + return safeTransformed(d.Get("secret_data")) } - if d.Get("is_secret_data_base64").(bool) { - transformed["secret_data"] = accessRes["payload"].(map[string]interface{})["data"].(string) - } else { - data, err := base64.StdEncoding.DecodeString(accessRes["payload"].(map[string]interface{})["data"].(string)) - if err != nil { - return err + // safely fetch payload.data + var dataB64 string + if payloadAny, ok := accessRes["payload"]; ok { + if payloadMap, ok := payloadAny.(map[string]interface{}); ok { + if s, ok := payloadMap["data"].(string); ok { + dataB64 = s + } } - transformed["secret_data"] = string(data) } - return []interface{}{transformed} + if dataB64 == "" { + log.Printf("[WARN] No payload.data found in secret access response for %q, preserving state", nameStr) + return safeTransformed(d.Get("secret_data")) + } + + // decide whether to keep pure base64 or decode it + isB64, _ := d.Get("is_secret_data_base64").(bool) + if isB64 { + return safeTransformed(dataB64) + } + + decoded, decErr := base64.StdEncoding.DecodeString(dataB64) + if decErr != nil { + log.Printf("[ERROR] Failed to decode base64 secret payload for %q: %v", nameStr, decErr) + return safeTransformed(d.Get("secret_data")) + } + return safeTransformed(string(decoded)) } From a71440718aca830a9504c23ef92e87e21e34e290 Mon Sep 17 00:00:00 2001 From: Rohan Chawla <73727454+rohanchawla23@users.noreply.github.com> Date: Tue, 9 Sep 2025 13:08:50 -0700 Subject: [PATCH 162/201] Add new encryption_spec field (#15068) --- mmv1/products/privateca/CaPool.yaml | 19 +++++++++++++++++++ .../privateca_capool_all_fields.tf.tmpl | 19 ++++++++++++++++++- 2 files changed, 37 insertions(+), 1 deletion(-) diff --git a/mmv1/products/privateca/CaPool.yaml b/mmv1/products/privateca/CaPool.yaml index d52bb335bd43..a884165d4b01 100644 --- a/mmv1/products/privateca/CaPool.yaml +++ b/mmv1/products/privateca/CaPool.yaml @@ -58,6 +58,11 @@ examples: primary_resource_id: 'default' vars: name: 'my-pool' + pool_location: 'asia-east1' + cloud_kms_key: 'projects/keys-project/locations/asia-east1/keyRings/key-ring/cryptoKeys/crypto-key' + test_vars_overrides: + 'pool_location': '"asia-east1"' + 'cloud_kms_key': 'acctest.BootstrapKMSKeyWithPurposeInLocation(t, "ENCRYPT_DECRYPT", "asia-east1").CryptoKey.Name' - name: 'privateca_quickstart' primary_resource_id: 'default' vars: @@ -507,3 +512,17 @@ properties: An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }. + - name: 'encryptionSpec' + type: NestedObject + description: | + Used when customer would like to encrypt data at rest. The customer-provided key will be used + to encrypt the Subject, SubjectAltNames and PEM-encoded certificate fields. When unspecified, + customer data will remain unencrypted. + immutable: true + properties: + - name: 'cloudKmsKey' + type: String + description: | + The resource name for an existing Cloud KMS key in the format + `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + immutable: true diff --git a/mmv1/templates/terraform/examples/privateca_capool_all_fields.tf.tmpl b/mmv1/templates/terraform/examples/privateca_capool_all_fields.tf.tmpl index e513a9e4b1c3..8771232e0274 100644 --- a/mmv1/templates/terraform/examples/privateca_capool_all_fields.tf.tmpl +++ b/mmv1/templates/terraform/examples/privateca_capool_all_fields.tf.tmpl @@ -1,6 +1,16 @@ +resource "google_project_service_identity" "privateca_sa" { + service = "privateca.googleapis.com" +} + +resource "google_kms_crypto_key_iam_member" "privateca_sa_keyuser_encrypterdecrypter" { + crypto_key_id = "{{index $.Vars "cloud_kms_key"}}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = google_project_service_identity.privateca_sa.member +} + resource "google_privateca_ca_pool" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "name"}}" - location = "us-central1" + location = "{{index $.Vars "pool_location"}}" tier = "ENTERPRISE" publishing_options { publish_ca_cert = false @@ -10,6 +20,9 @@ resource "google_privateca_ca_pool" "{{$.PrimaryResourceId}}" { labels = { foo = "bar" } + encryption_spec { + cloud_kms_key = "{{index $.Vars "cloud_kms_key"}}" + } issuance_policy { allowed_key_types { elliptic_curve { @@ -87,4 +100,8 @@ resource "google_privateca_ca_pool" "{{$.PrimaryResourceId}}" { } } } + + depends_on = [ + google_kms_crypto_key_iam_member.privateca_sa_keyuser_encrypterdecrypter, + ] } From 8eded531a1debf25538f1b19c81ce7c683366593 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 9 Sep 2025 15:53:27 -0700 Subject: [PATCH 163/201] Pinned codeql action to a specific hash (#15104) --- .github/workflows/codeql.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 59da3e72a7e6..94dea0078c6a 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -57,6 +57,6 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3.29.11 with: category: "/language:${{matrix.language}}" From 9d1ce866dc28898cbe94f470d4292b5afc7cef98 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 9 Sep 2025 16:33:12 -0700 Subject: [PATCH 164/201] Removed unused gitkeep file (#14599) --- mmv1/templates/terraform/custom_update/.gitkeep | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 mmv1/templates/terraform/custom_update/.gitkeep diff --git a/mmv1/templates/terraform/custom_update/.gitkeep b/mmv1/templates/terraform/custom_update/.gitkeep deleted file mode 100644 index e69de29bb2d1..000000000000 From 8564692e39b3683f0e8f13b5117152af00cdb51f Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 10 Sep 2025 09:46:06 -0700 Subject: [PATCH 165/201] Update enrolled_teams.yml (#15105) --- tools/issue-labeler/labeler/enrolled_teams.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index d7aaa4a2fca2..3289443f4d30 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -7,6 +7,9 @@ service/accessapproval: service/accesscontextmanager: resources: - google_access_context_manager_.* +service/aiplatform-agent-engine: + resources: + - google_vertex_ai_reasoning_engine service/aiplatform-colab-enterprise: resources: - google_colab_.* From 64def5a9e1e7e4f20e9f4b171b56ade17717f944 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Wed, 10 Sep 2025 10:15:24 -0700 Subject: [PATCH 166/201] Made pinned versions for codeql steps match (#15110) --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 94dea0078c6a..18d8baf9ded8 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -30,7 +30,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8 + uses: github/codeql-action/init@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3.29.11 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -44,7 +44,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@05963f47d870e2cb19a537396c1f668a348c7d8f # v3.24.8 + uses: github/codeql-action/autobuild@3c3833e0f8c1c83d449a7478aa59c036a9165498 # v3.29.11 # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun From 19190d895cb9eb37cb41fcbe081da971b9c69b60 Mon Sep 17 00:00:00 2001 From: Joel Shapiro Date: Wed, 10 Sep 2025 10:54:25 -0700 Subject: [PATCH 167/201] Added validationConfig to healthcare FHIR store. (#15057) --- mmv1/products/healthcare/FhirStore.yaml | 47 ++++++++++++++++++ ...hcare_fhir_store_validation_config.tf.tmpl | 37 ++++++++++++++ ...esource_healthcare_fhir_store_test.go.tmpl | 49 +++++++++++++++++++ 3 files changed, 133 insertions(+) create mode 100644 mmv1/templates/terraform/examples/healthcare_fhir_store_validation_config.tf.tmpl diff --git a/mmv1/products/healthcare/FhirStore.yaml b/mmv1/products/healthcare/FhirStore.yaml index 2db30de02f4d..e8d5fcdcd9b2 100644 --- a/mmv1/products/healthcare/FhirStore.yaml +++ b/mmv1/products/healthcare/FhirStore.yaml @@ -76,6 +76,12 @@ examples: dataset_name: 'example-dataset' fhir_store_name: 'example-fhir-store' pubsub_topic: 'fhir-notifications' + - name: 'healthcare_fhir_store_validation_config' + primary_resource_id: 'default' + vars: + dataset_name: 'example-dataset' + fhir_store_name: 'example-fhir-store' + pubsub_topic: 'fhir-notifications' parameters: - name: 'dataset' type: ResourceRef @@ -174,6 +180,47 @@ properties: output: true item_type: type: String + - name: 'validationConfig' + type: NestedObject + description: | + Configuration for how to validate incoming FHIR resources against configured profiles. + properties: + - name: 'disableProfileValidation' + type: Boolean + default_value: false + description: | + Whether to disable profile validation for this FHIR store. The default value is false. Set this to true to disable checking incoming resources for conformance against structure definitions in this FHIR store. + - name: 'enabledImplementationGuides' + type: Array + description: | + A list of implementation guide URLs in this FHIR store that are used to configure the profiles to use for validation. + When a URL cannot be resolved (for example, in a type assertion), the server does not return an error. + For example, to use the US Core profiles for validation, set enabledImplementationGuides to ["http://hl7.org/fhir/us/core/ImplementationGuide/ig"]. If enabledImplementationGuides is empty or omitted, then incoming resources are only required to conform to the base FHIR profiles. Otherwise, a resource must conform to at least one profile listed in the global property of one of the enabled ImplementationGuides. + The Cloud Healthcare API does not currently enforce all of the rules in a StructureDefinition. The following rules are supported: + - min/max + - minValue/maxValue + - maxLength + - type + - fixed[x] + - pattern[x] on simple types + - slicing, when using "value" as the discriminator type + item_type: + type: String + - name: 'disableRequiredFieldValidation' + type: Boolean + default_value: false + description: | + Whether to disable required fields validation for incoming resources. The default value is false. Set this to true to disable checking incoming resources for conformance against required fields requirement defined in the FHIR specification. This property only affects resource types that do not have profiles configured for them, any rules in enabled implementation guides will still be enforced. + - name: 'disableReferenceTypeValidation' + type: Boolean + default_value: false + description: | + Whether to disable reference type validation for incoming resources. The default value is false. Set this to true to disable checking incoming resources for conformance against reference type requirement defined in the FHIR specification. This property only affects resource types that do not have profiles configured for them, any rules in enabled implementation guides will still be enforced. + - name: 'disableFhirpathValidation' + type: Boolean + default_value: false + description: | + Whether to disable FHIRPath validation for incoming resources. The default value is false. Set this to true to disable checking incoming resources for conformance against FHIRPath requirement defined in the FHIR specification. This property only affects resource types that do not have profiles configured for them, any rules in enabled implementation guides will still be enforced. - name: 'complexDataTypeReferenceParsing' type: Enum description: | diff --git a/mmv1/templates/terraform/examples/healthcare_fhir_store_validation_config.tf.tmpl b/mmv1/templates/terraform/examples/healthcare_fhir_store_validation_config.tf.tmpl new file mode 100644 index 000000000000..f6aae7ae71c4 --- /dev/null +++ b/mmv1/templates/terraform/examples/healthcare_fhir_store_validation_config.tf.tmpl @@ -0,0 +1,37 @@ +resource "google_healthcare_fhir_store" "default" { + name = "{{index $.Vars "fhir_store_name"}}" + dataset = google_healthcare_dataset.dataset.id + version = "R4" + complex_data_type_reference_parsing = "DISABLED" + + enable_update_create = false + disable_referential_integrity = false + disable_resource_versioning = false + enable_history_import = false + default_search_handling_strict = false + + notification_configs { + pubsub_topic = google_pubsub_topic.topic.id + } + + labels = { + label1 = "labelvalue1" + } + + validation_config { + disable_profile_validation = true + enabled_implementation_guides = [] + disable_required_field_validation = true + disable_reference_type_validation = true + disable_fhirpath_validation = true + } +} + +resource "google_pubsub_topic" "topic" { + name = "{{index $.Vars "pubsub_topic"}}" +} + +resource "google_healthcare_dataset" "dataset" { + name = "{{index $.Vars "dataset_name"}}" + location = "us-central1" +} diff --git a/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl b/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl index 30418b32bd1c..7cfb383afe97 100644 --- a/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl +++ b/mmv1/third_party/terraform/services/healthcare/resource_healthcare_fhir_store_test.go.tmpl @@ -189,6 +189,14 @@ resource "google_healthcare_fhir_store" "default" { } {{- end }} + validation_config { + disable_profile_validation = true + enabled_implementation_guides = ["http://hl7.org/fhir/us/core/ImplementationGuide/ig", "http://example.com/SomeCustomIG"] + disable_required_field_validation = true + disable_reference_type_validation = true + disable_fhirpath_validation = true + } + labels = { label1 = "labelvalue1" } @@ -247,6 +255,29 @@ func testAccCheckGoogleHealthcareFhirStoreUpdate(t *testing.T, pubsubTopic strin } {{- end }} + if response.ValidationConfig == nil { + return fmt.Errorf("fhirStore 'ValidationConfig' missing: %s", gcpResourceUri) + } + if !response.ValidationConfig.DisableProfileValidation { + return fmt.Errorf("fhirStore 'ValidationConfig.DisableProfileValidation' not updated: %s", gcpResourceUri) + } + if response.ValidationConfig.EnabledImplementationGuides == nil { + return fmt.Errorf("fhirStore 'ValidationConfig.EnabledImplementationGuides' missing: %s", gcpResourceUri) + } + expectedEnabledImplementationGuides := []string{"http://hl7.org/fhir/us/core/ImplementationGuide/ig", "http://example.com/SomeCustomIG"} + if !checkEnabledImplementationGuidesArraysAreEqual(expectedEnabledImplementationGuides, response.ValidationConfig.EnabledImplementationGuides) { + return fmt.Errorf("fhirStore 'ValidationConfig.EnabledImplementationGuides' not updated: %s", gcpResourceUri) + } + if !response.ValidationConfig.DisableRequiredFieldValidation { + return fmt.Errorf("fhirStore 'ValidationConfig.DisableRequiredFieldValidation' not updated: %s", gcpResourceUri) + } + if !response.ValidationConfig.DisableReferenceTypeValidation { + return fmt.Errorf("fhirStore 'ValidationConfig.DisableReferenceTypeValidation' not updated: %s", gcpResourceUri) + } + if !response.ValidationConfig.DisableFhirpathValidation { + return fmt.Errorf("fhirStore 'ValidationConfig.DisableFhirpathValidation' not updated: %s", gcpResourceUri) + } + if !response.EnableUpdateCreate { return fmt.Errorf("fhirStore 'EnableUpdateCreate' not updated: %s", gcpResourceUri) } @@ -277,3 +308,21 @@ func testAccCheckGoogleHealthcareFhirStoreUpdate(t *testing.T, pubsubTopic strin return nil } } + +// Returns true if the size and contents (order dependent) of enabledImplementationGuides1 and enabledImplementationGuides2 are identical, +// else false. +func checkEnabledImplementationGuidesArraysAreEqual(enabledImplementationGuides1 []string, enabledImplementationGuides2 []string) bool { + // Same length? + if len(enabledImplementationGuides1) != len(enabledImplementationGuides2) { + return false + } + + // Same contents - do NOT sort to ensure order is identical and avoid permadiff risk + for i1, v1 := range enabledImplementationGuides1 { + if v1 != enabledImplementationGuides2[i1] { + return false + } + } + + return true +} From d480235cef1b76690f1cfa08670dda2f56754a6d Mon Sep 17 00:00:00 2001 From: victorsantos-cit Date: Wed, 10 Sep 2025 14:57:55 -0300 Subject: [PATCH 168/201] runV2: add support for value "connector" on field "vpcAccess" on resource "google_cloudrunv2_worker_pool" (#15108) --- mmv1/products/cloudrunv2/WorkerPool.yaml | 4 ++ ...resource_cloud_run_v2_service_test.go.tmpl | 72 +++++++++++++++++++ 2 files changed, 76 insertions(+) diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml index 4f6958c4aa1e..96577ec01961 100644 --- a/mmv1/products/cloudrunv2/WorkerPool.yaml +++ b/mmv1/products/cloudrunv2/WorkerPool.yaml @@ -332,6 +332,10 @@ properties: description: |- VPC Access configuration to use for this Revision. For more information, visit https://cloud.google.com/run/docs/configuring/connecting-vpc. properties: + - name: 'connector' + type: String + description: |- + VPC Access connector name. Format: projects/{project}/locations/{location}/connectors/{connector}, where {project} can be project id or number. - name: 'egress' type: Enum description: |- diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index fd20578a8d20..0a51d58773d3 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -13,6 +13,78 @@ import ( "github.com/hashicorp/terraform-provider-google/google/services/cloudrunv2" ) +func TestAccCloudRunV2WorkerPool_vpcAccess_basic(t *testing.T) { + t.Parallel() + + ctx := map[string]interface{}{ + "rs": acctest.RandString(t, 10), + "region": "us-central1", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2WorkerPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2WorkerPool_vpcAccess_basicConfig(ctx), + }, + { + ResourceName: "google_cloud_run_v2_worker_pool.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{ + "name", "location", "annotations", "labels", "terraform_labels", + "deletion_protection", + }, + }, + }, + }) +} + +func testAccCloudRunV2WorkerPool_vpcAccess_basicConfig(ctx map[string]interface{}) string { + return fmt.Sprintf(` +resource "google_compute_network" "primary" { + name = "tf-crwp-vpc-%[1]s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "primary" { + name = "tf-crwp-subnet-%[1]s" + ip_cidr_range = "10.0.0.0/16" + region = "%[2]s" + network = google_compute_network.primary.id +} + +resource "google_vpc_access_connector" "primary" { + name = "tf-crwp-conn-%[1]s" + region = "%[2]s" + network = google_compute_network.primary.name + ip_cidr_range = "10.8.0.0/28" + + # Exigência atual da API: definir capacidade explícita + min_instances = 2 + max_instances = 3 +} + +resource "google_cloud_run_v2_worker_pool" "primary" { + name = "tf-crwp-%[1]s" + location = "%[2]s" + deletion_protection = false + launch_stage = "BETA" + + template { + containers { + image = "gcr.io/cloudrun/hello" + } + vpc_access { + connector = google_vpc_access_connector.primary.id + } + } +} +`, ctx["rs"], ctx["region"]) +} + func TestAccCloudRunV2Service_cloudrunv2ServiceFullUpdate(t *testing.T) { t.Parallel() From 7f60c4b961569ea98c8d78b7d2782eea40062c1a Mon Sep 17 00:00:00 2001 From: veraz0818 Date: Wed, 10 Sep 2025 11:16:50 -0700 Subject: [PATCH 169/201] Add proxy to VMware admin cluster resources. (#14908) --- mmv1/products/gkeonprem/VmwareAdminCluster.yaml | 13 +++++++++++++ .../gkeonprem_vmware_admin_cluster_full.tf.tmpl | 4 ++++ .../gkeonprem_vmware_admin_cluster_metallb.tf.tmpl | 4 ++++ 3 files changed, 21 insertions(+) diff --git a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml index 9cc09f9d9a29..33768020f6d7 100644 --- a/mmv1/products/gkeonprem/VmwareAdminCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareAdminCluster.yaml @@ -699,3 +699,16 @@ properties: - type: String name: 'caCert' description: The CA certificate public key for private registry. + - type: NestedObject + name: proxy + description: Configuration for proxy. + properties: + - type: String + name: 'url' + required: true + description: The proxy url. + - type: String + name: 'noProxy' + description: | + A comma-separated list of IP addresses, IP address ranges, + host names, and domain names that should not go through the proxy server. diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl index e038d9b00553..5f7aa59aca20 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_full.tf.tmpl @@ -88,4 +88,8 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { address = "test-address" ca_cert = "test-ca-cert" } + proxy { + url = "http://my-proxy.example.local:80" + no_proxy = "10.151.222.0/24,my-host.example.local,10.151.2.1" + } } diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl index 609e2149036d..4a076c7cb302 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_admin_cluster_metallb.tf.tmpl @@ -40,4 +40,8 @@ resource "google_gkeonprem_vmware_admin_cluster" "{{$.PrimaryResourceId}}" { address = "test-address" ca_cert = "test-ca-cert" } + proxy { + url = "http://my-proxy.example.local:80" + no_proxy = "10.151.222.0/24,my-host.example.local,10.151.2.1" + } } From aa289e7096997fce4b51ec8f64370a64608edb2e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Adam=20Sk=C3=A4rbo=20Jonsson?= Date: Wed, 10 Sep 2025 21:24:46 +0200 Subject: [PATCH 170/201] fix(google_sql_database_instance): Connection Pool requires Enterprise Plus (#15107) --- .../services/sql/resource_sql_database_instance_test.go.tmpl | 3 ++- .../website/docs/r/sql_database_instance.html.markdown | 5 +++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl index 4e572f28ad0a..72ed7806113e 100644 --- a/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/sql/resource_sql_database_instance_test.go.tmpl @@ -5470,7 +5470,8 @@ resource "google_sql_database_instance" "instance" { database_version = "POSTGRES_16" deletion_protection = false settings { - tier = "db-perf-optimized-N-2" + tier = "db-perf-optimized-N-2" + edition = "ENTERPRISE_PLUS" connection_pool_config { connection_pooling_enabled = true flags { diff --git a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown index 2e110a6ed166..460a39ce9940 100644 --- a/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/sql_database_instance.html.markdown @@ -176,11 +176,12 @@ resource "google_sql_database_instance" "main" { ### Cloud SQL Instance with Managed Connection Pooling ```hcl resource "google_sql_database_instance" "instance" { - name: = "mcp-enabled-main-instance" + name = "mcp-enabled-main-instance" region = "us-central1" database_version = "POSTGRES_16" settings { - tier = "db-perf-optimized-N-2" + tier = "db-perf-optimized-N-2" + edition = "ENTERPRISE_PLUS" connection_pool_config { connection_pooling_enabled = true flags { From 9b1a11e9d9d58895df0acf835427cd7e23a56abc Mon Sep 17 00:00:00 2001 From: Benjamin Maynard <36383062+benjamin-maynard@users.noreply.github.com> Date: Wed, 10 Sep 2025 20:43:26 +0100 Subject: [PATCH 171/201] google_datastream_stream: Add support for projectID field in SourceHierarchyDatasets (#14858) Co-authored-by: Nick Elliot --- mmv1/products/datastream/Stream.yaml | 24 ++++ ...uery_cross_project_source_hierachy.tf.tmpl | 129 ++++++++++++++++++ 2 files changed, 153 insertions(+) create mode 100644 mmv1/templates/terraform/examples/datastream_stream_bigquery_cross_project_source_hierachy.tf.tmpl diff --git a/mmv1/products/datastream/Stream.yaml b/mmv1/products/datastream/Stream.yaml index fa4df7b8b3e0..62087566d276 100644 --- a/mmv1/products/datastream/Stream.yaml +++ b/mmv1/products/datastream/Stream.yaml @@ -180,6 +180,26 @@ examples: external_providers: ["random", "time"] # Random provider skip_vcr: true + - name: 'datastream_stream_bigquery_cross_project_source_hierachy' + primary_resource_id: 'default' + vars: + stream_id: 'my-stream' + private_connection_id: 'my-connection' + network_name: 'my-network' + source_connection_profile_id: 'source-profile' + database_instance_name: 'my-instance' + deletion_protection: 'true' + destination_connection_profile_id: 'destination-profile' + test_env_vars: + org_id: 'ORG_ID' + billing_account: 'BILLING_ACCT' + test_vars_overrides: + 'deletion_protection': 'false' + oics_vars_overrides: + 'deletion_protection': 'false' + external_providers: ["random", "time"] + # Random provider + skip_vcr: true - name: 'datastream_stream_bigquery_append_only' primary_resource_id: 'default' vars: @@ -1337,6 +1357,10 @@ properties: encryption key. i.e. projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{cryptoKey}. See https://cloud.google.com/bigquery/docs/customer-managed-encryption for more information. immutable: true + - name: 'projectId' + type: string + description: | + Optional. The project id of the BigQuery dataset. If not specified, the project will be inferred from the stream resource. - name: 'blmtConfig' type: NestedObject description: 'BigLake Managed Tables configuration for BigQuery streams.' diff --git a/mmv1/templates/terraform/examples/datastream_stream_bigquery_cross_project_source_hierachy.tf.tmpl b/mmv1/templates/terraform/examples/datastream_stream_bigquery_cross_project_source_hierachy.tf.tmpl new file mode 100644 index 000000000000..23e2a5d30ec0 --- /dev/null +++ b/mmv1/templates/terraform/examples/datastream_stream_bigquery_cross_project_source_hierachy.tf.tmpl @@ -0,0 +1,129 @@ +data "google_project" "project" { +} + +resource "google_project" "cross-project-dataset" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "{{index $.TestEnvVars "org_id"}}" + billing_account = "{{index $.TestEnvVars "billing_account"}}" + deletion_policy = "DELETE" +} + +resource "time_sleep" "wait_60_seconds" { + create_duration = "60s" + depends_on = [google_project.cross-project-dataset] +} + +resource "google_project_service" "bigquery" { + project = google_project.cross-project-dataset.project_id + service = "bigquery.googleapis.com" + disable_on_destroy = false + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_project_iam_member" "datastream_bigquery_admin" { + project = google_project.cross-project-dataset.project_id + role = "roles/bigquery.admin" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-datastream.iam.gserviceaccount.com" + depends_on = [time_sleep.wait_60_seconds] +} + +resource "google_sql_database_instance" "instance" { + name = "{{index $.Vars "database_instance_name"}}" + database_version = "MYSQL_8_0" + region = "us-central1" + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + binary_log_enabled = true + } + + ip_configuration { + + // Datastream IPs will vary by region. + authorized_networks { + value = "34.71.242.81" + } + + authorized_networks { + value = "34.72.28.29" + } + + authorized_networks { + value = "34.67.6.157" + } + + authorized_networks { + value = "34.67.234.134" + } + + authorized_networks { + value = "34.72.239.218" + } + } + } + + deletion_protection = {{index $.Vars "deletion_protection"}} +} + +resource "google_sql_database" "db" { + instance = google_sql_database_instance.instance.name + name = "db" +} + +resource "random_password" "pwd" { + length = 16 + special = false +} + +resource "google_sql_user" "user" { + name = "user" + instance = google_sql_database_instance.instance.name + host = "%" + password = random_password.pwd.result +} + +resource "google_datastream_connection_profile" "source_connection_profile" { + display_name = "Source connection profile" + location = "us-central1" + connection_profile_id = "{{index $.Vars "source_connection_profile_id"}}" + + mysql_profile { + hostname = google_sql_database_instance.instance.public_ip_address + username = google_sql_user.user.name + password = google_sql_user.user.password + } +} + +resource "google_datastream_connection_profile" "destination_connection_profile" { + display_name = "Connection profile" + location = "us-central1" + connection_profile_id = "{{index $.Vars "destination_connection_profile_id"}}" + + bigquery_profile {} +} + +resource "google_datastream_stream" "{{$.PrimaryResourceId}}" { + stream_id = "{{index $.Vars "stream_id"}}" + location = "us-central1" + display_name = "my stream" + source_config { + source_connection_profile = google_datastream_connection_profile.source_connection_profile.id + mysql_source_config {} + } + destination_config { + destination_connection_profile = google_datastream_connection_profile.destination_connection_profile.id + bigquery_destination_config { + source_hierarchy_datasets { + dataset_template { + location = "us-central1" + } + project_id = google_project.cross-project-dataset.project_id + } + } + } + + backfill_none { + } +} From 93ef242a5c3d0ff48a44c67fdd15ee4801fa4ddb Mon Sep 17 00:00:00 2001 From: Weston Haught Date: Wed, 10 Sep 2025 14:58:56 -0700 Subject: [PATCH 172/201] Add subPath field to volume mounts (#15060) --- mmv1/products/cloudrun/Service.yaml | 4 ++++ mmv1/products/cloudrunv2/Job.yaml | 4 ++++ mmv1/products/cloudrunv2/Service.yaml | 4 ++++ mmv1/products/cloudrunv2/WorkerPool.yaml | 4 ++++ .../services/cloudrun/resource_cloud_run_service_test.go.tmpl | 1 + .../cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl | 1 + .../cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl | 1 + .../cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl | 1 + 8 files changed, 20 insertions(+) diff --git a/mmv1/products/cloudrun/Service.yaml b/mmv1/products/cloudrun/Service.yaml index 6e9415daed92..5eb150c638d1 100644 --- a/mmv1/products/cloudrun/Service.yaml +++ b/mmv1/products/cloudrun/Service.yaml @@ -559,6 +559,10 @@ properties: Path within the container at which the volume should be mounted. Must not contain ':'. required: true + - name: 'subPath' + type: String + description: |- + Path within the volume from which the container's volume should be mounted. - name: 'name' type: String description: |- diff --git a/mmv1/products/cloudrunv2/Job.yaml b/mmv1/products/cloudrunv2/Job.yaml index 4e4b200d4cfe..a8b23e5e38bd 100644 --- a/mmv1/products/cloudrunv2/Job.yaml +++ b/mmv1/products/cloudrunv2/Job.yaml @@ -440,6 +440,10 @@ properties: description: |- Path within the container at which the volume should be mounted. Must not contain ':'. For Cloud SQL volumes, it can be left empty, or must otherwise be /cloudsql. All instances defined in the Volume will be available as /cloudsql/[instance]. For more information on Cloud SQL volumes, visit https://cloud.google.com/sql/docs/mysql/connect-run required: true + - name: 'subPath' + type: String + description: |- + Path within the volume from which the container's volume should be mounted. - name: 'workingDir' type: String description: |- diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 9482b22054af..4b41110e7e4e 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -591,6 +591,10 @@ properties: description: |- Path within the container at which the volume should be mounted. Must not contain ':'. For Cloud SQL volumes, it can be left empty, or must otherwise be /cloudsql. All instances defined in the Volume will be available as /cloudsql/[instance]. For more information on Cloud SQL volumes, visit https://cloud.google.com/sql/docs/mysql/connect-run required: true + - name: 'subPath' + type: String + description: |- + Path within the volume from which the container's volume should be mounted. - name: 'workingDir' type: String description: |- diff --git a/mmv1/products/cloudrunv2/WorkerPool.yaml b/mmv1/products/cloudrunv2/WorkerPool.yaml index 96577ec01961..3252ee267404 100644 --- a/mmv1/products/cloudrunv2/WorkerPool.yaml +++ b/mmv1/products/cloudrunv2/WorkerPool.yaml @@ -478,6 +478,10 @@ properties: description: |- Path within the container at which the volume should be mounted. Must not contain ':'. For Cloud SQL volumes, it can be left empty, or must otherwise be /cloudsql. All instances defined in the Volume will be available as /cloudsql/[instance]. For more information on Cloud SQL volumes, visit https://cloud.google.com/sql/docs/mysql/connect-run required: true + - name: 'subPath' + type: String + description: |- + Path within the volume from which the container's volume should be mounted. - name: 'workingDir' type: String description: |- diff --git a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl index e2370c664c30..61249e1febb6 100644 --- a/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrun/resource_cloud_run_service_test.go.tmpl @@ -1406,6 +1406,7 @@ resource "google_cloud_run_service" "default" { volume_mounts { name = "vol1" mount_path = "/mnt/vol1" + sub_path = "/LM01" } } volumes { diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl index d58b6d7fede3..719625f20258 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_job_test.go.tmpl @@ -380,6 +380,7 @@ func testAccCloudRunV2Job_cloudrunv2JobWithGcsVolume(context map[string]interfac volume_mounts { name = "gcs" mount_path = "/mnt/gcs" + sub_path = "/LM01" } } volumes { diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl index 0a51d58773d3..f864687aafab 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.tmpl @@ -366,6 +366,7 @@ resource "google_cloud_run_v2_service" "default" { volume_mounts { name = "gcs" mount_path = "/mnt/landsat" + sub_path = "/LM01" } resources { cpu_idle = true diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl index cd1640ba5ccd..1b23ffb825cf 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_worker_pool_test.go.tmpl @@ -238,6 +238,7 @@ resource "google_cloud_run_v2_worker_pool" "default" { volume_mounts { name = "gcs" mount_path = "/mnt/landsat" + sub_path = "/LM01" } resources { limits = { From e0452656352b8ffb16aa2a8651ecadb6f072cc21 Mon Sep 17 00:00:00 2001 From: ericzzzzzzz <102683393+ericzzzzzzz@users.noreply.github.com> Date: Thu, 11 Sep 2025 13:05:46 -0400 Subject: [PATCH 173/201] Add google secure source manager hook resource (#15050) --- mmv1/products/securesourcemanager/Hook.yaml | 158 ++++++++++++++++++ .../secure_source_manager_hook_basic.tf.tmpl | 25 +++ ...urce_manager_hook_pushoption_empty.tf.tmpl | 27 +++ ...re_source_manager_hook_with_fields.tf.tmpl | 29 ++++ ..._secure_source_manager_hook_update_test.go | 109 ++++++++++++ 5 files changed, 348 insertions(+) create mode 100644 mmv1/products/securesourcemanager/Hook.yaml create mode 100644 mmv1/templates/terraform/examples/secure_source_manager_hook_basic.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/secure_source_manager_hook_pushoption_empty.tf.tmpl create mode 100644 mmv1/templates/terraform/examples/secure_source_manager_hook_with_fields.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_hook_update_test.go diff --git a/mmv1/products/securesourcemanager/Hook.yaml b/mmv1/products/securesourcemanager/Hook.yaml new file mode 100644 index 000000000000..81c3d0aa17c8 --- /dev/null +++ b/mmv1/products/securesourcemanager/Hook.yaml @@ -0,0 +1,158 @@ +# Copyright 2025 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +name: Hook +description: Hook is a user-defined HTTP callback triggered by an event. +references: + guides: + Official Documentation: https://cloud.google.com/secure-source-manager/docs/overview +docs: null +id_format: projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/hooks/{{hook_id}} +base_url: projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/hooks?hook_id={{hook_id}} +self_link: projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/hooks/{{hook_id}} +import_format: + - projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/hooks/{{hook_id}} + - "{{hook_id}}" +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +autogen_async: true +update_verb: PATCH +update_mask: true +async: + actions: + - create + - delete + type: OpAsync + operation: + base_url: "{{op_id}}" + result: + resource_inside_response: false +custom_code: null +examples: + - name: secure_source_manager_hook_basic + primary_resource_id: basic + vars: + hook_id: my-basic-hook + repository_id: my-basic-repository + instance_id: my-basic-instance + prevent_destroy: "true" + deletion_policy: '"PREVENT"' + test_vars_overrides: + prevent_destroy: "false" + 'deletion_policy': '"DELETE"' + oics_vars_overrides: + prevent_destroy: "false" + 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' + - name: secure_source_manager_hook_with_fields + primary_resource_id: default + vars: + hook_id: my-initial-hook + repository_id: my-initial-repository + instance_id: my-initial-instance + prevent_destroy: "true" + deletion_policy: '"PREVENT"' + test_vars_overrides: + prevent_destroy: "false" + 'deletion_policy': '"DELETE"' + oics_vars_overrides: + prevent_destroy: "false" + 'deletion_policy': '"DELETE"' + ignore_read_extra: + - 'deletion_policy' +parameters: + - name: hook_id + type: String + description: | + The ID for the Hook. + url_param_only: true + required: true + - name: location + type: String + description: | + The location for the Repository. + url_param_only: true + required: true + - name: repository_id + type: String + description: | + The ID for the Repository. + url_param_only: true + required: true +properties: + - name: name + type: String + description: | + A unique identifier for a Hook. The name should be of the format: + `projects/{project}/locations/{location_id}/repositories/{repository_id}/hooks/{hook_id}` + output: true + - name: targetUri + type: String + description: | + The target URI to which the payloads will be delivered. + required: true + - name: disabled + type: Boolean + description: | + Determines if the hook disabled or not. + Set to true to stop sending traffic. + - name: events + type: Array + description: | + The events that trigger hook on. + item_type: + type: Enum + enum_values: + - PUSH + - PULL_REQUEST + default_from_api: true + - name: createTime + type: Time + description: | + Create timestamp. + output: true + - name: updateTime + type: Time + description: | + Update timestamp. + output: true + - name: uid + type: String + description: | + Unique identifier of the hook. + output: true + - name: sensitiveQueryString + type: String + sensitive: true + ignore_read: true + description: | + The sensitive query string to be appended to the target URI. + - name: pushOption + type: NestedObject + allow_empty_object: true + send_empty_value: true + default_from_api: true + description: | + The trigger option for push events. + properties: + - name: branchFilter + type: String + description: | + Trigger hook for matching branches only. + Specified as glob pattern. If empty or *, events for all branches are + reported. Examples: main, {main,release*}. + See https://pkg.go.dev/github.com/gobwas/glob documentation. diff --git a/mmv1/templates/terraform/examples/secure_source_manager_hook_basic.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_hook_basic.tf.tmpl new file mode 100644 index 000000000000..f83439cbdd96 --- /dev/null +++ b/mmv1/templates/terraform/examples/secure_source_manager_hook_basic.tf.tmpl @@ -0,0 +1,25 @@ +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "{{index $.Vars "instance_id"}}" + + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "{{index $.Vars "repository_id"}}" + location = google_secure_source_manager_instance.instance.location + instance = google_secure_source_manager_instance.instance.name + + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" +} + +resource "google_secure_source_manager_hook" "basic" { + hook_id = "{{index $.Vars "hook_id"}}" + repository_id = google_secure_source_manager_repository.repository.repository_id + location = google_secure_source_manager_repository.repository.location + target_uri = "https://www.example.com" + # default event + events = ["PUSH"] +} diff --git a/mmv1/templates/terraform/examples/secure_source_manager_hook_pushoption_empty.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_hook_pushoption_empty.tf.tmpl new file mode 100644 index 000000000000..eb9087657767 --- /dev/null +++ b/mmv1/templates/terraform/examples/secure_source_manager_hook_pushoption_empty.tf.tmpl @@ -0,0 +1,27 @@ +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "{{index $.Vars "instance_id"}}" + + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "{{index $.Vars "repository_id"}}" + location = google_secure_source_manager_instance.instance.location + instance = google_secure_source_manager_instance.instance.name + + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" +} + +resource "google_secure_source_manager_hook" "push_option_empty" { + hook_id = "{{index $.Vars "hook_id"}}" + repository_id = google_secure_source_manager_repository.repository.repository_id + location = google_secure_source_manager_repository.repository.location + target_uri = "https://www.example.com" + # default event + events = ["PUSH"] + push_option { + } +} diff --git a/mmv1/templates/terraform/examples/secure_source_manager_hook_with_fields.tf.tmpl b/mmv1/templates/terraform/examples/secure_source_manager_hook_with_fields.tf.tmpl new file mode 100644 index 000000000000..eb18fd0a8201 --- /dev/null +++ b/mmv1/templates/terraform/examples/secure_source_manager_hook_with_fields.tf.tmpl @@ -0,0 +1,29 @@ +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "{{index $.Vars "instance_id"}}" + + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "{{index $.Vars "repository_id"}}" + instance = google_secure_source_manager_instance.instance.name + location = google_secure_source_manager_instance.instance.location + + # Prevent accidental deletions. + deletion_policy = "{{index $.Vars "deletion_policy"}}" +} + +resource "google_secure_source_manager_hook" "default" { + hook_id = "{{index $.Vars "hook_id"}}" + location = google_secure_source_manager_repository.repository.location + repository_id = google_secure_source_manager_repository.repository.repository_id + target_uri = "https://www.example.com" + sensitive_query_string = "auth=fake_token" + disabled = false + push_option { + branch_filter = "main" + } + events = ["PUSH", "PULL_REQUEST"] +} diff --git a/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_hook_update_test.go b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_hook_update_test.go new file mode 100644 index 000000000000..4bd3d1f7537a --- /dev/null +++ b/mmv1/third_party/terraform/services/securesourcemanager/resource_secure_source_manager_hook_update_test.go @@ -0,0 +1,109 @@ +package securesourcemanager_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccSecureSourceManagerHook_secureSourceManagerHookWithFieldsExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "deletion_policy": "DELETE", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccSecureSourceManagerHook_secureSourceManagerHookWithFieldsExample_full(context), + }, + { + ResourceName: "google_secure_source_manager_hook.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"hook_id", "location", "repository_id", "sensitive_query_string"}, + }, + { + Config: testAccSecureSourceManagerHook_secureSourceManagerHookWithFieldsExample_update(context), + }, + { + ResourceName: "google_secure_source_manager_hook.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"hook_id", "location", "repository_id", "sensitive_query_string"}, + }, + }, + }) +} + +func testAccSecureSourceManagerHook_secureSourceManagerHookWithFieldsExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "tf-test-my-initial-instance%{random_suffix}" + + # Prevent accidental deletions. + deletion_policy = "%{deletion_policy}" +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "tf-test-my-initial-repository%{random_suffix}" + instance = google_secure_source_manager_instance.instance.name + location = google_secure_source_manager_instance.instance.location + + # Prevent accidental deletions. + deletion_policy = "%{deletion_policy}" +} + +resource "google_secure_source_manager_hook" "default" { + hook_id = "tf-test-my-initial-hook%{random_suffix}" + location = google_secure_source_manager_repository.repository.location + repository_id = google_secure_source_manager_repository.repository.repository_id + events = ["PUSH", "PULL_REQUEST"] + push_option { + branch_filter = "main" + } + target_uri = "https://www.example.com" + sensitive_query_string = "auth=fake_token" + disabled = false +} +`, context) +} + +func testAccSecureSourceManagerHook_secureSourceManagerHookWithFieldsExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secure_source_manager_instance" "instance" { + location = "us-central1" + instance_id = "tf-test-my-initial-instance%{random_suffix}" + + # Prevent accidental deletions. + deletion_policy = "%{deletion_policy}" +} + +resource "google_secure_source_manager_repository" "repository" { + repository_id = "tf-test-my-initial-repository%{random_suffix}" + instance = google_secure_source_manager_instance.instance.name + location = google_secure_source_manager_instance.instance.location + + # Prevent accidental deletions. + deletion_policy = "%{deletion_policy}" +} + +resource "google_secure_source_manager_hook" "default" { + hook_id = "tf-test-my-initial-hook%{random_suffix}" + location = google_secure_source_manager_repository.repository.location + repository_id = google_secure_source_manager_repository.repository.repository_id + events = ["PUSH", "PULL_REQUEST"] + push_option { + } + target_uri = "https://www.update.com" + sensitive_query_string = "auth=fake_token" + disabled = true +} +`, context) +} From 52e9c674a759cb0b31852adbde9ec65ec47a7e29 Mon Sep 17 00:00:00 2001 From: Kushal Lunkad Date: Thu, 11 Sep 2025 23:11:29 +0530 Subject: [PATCH 174/201] Added GET data source references functionality (#15087) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ..._source_backup_dr_data_source_reference.go | 134 ++++++++++++++++++ ...ce_backup_dr_data_source_reference_test.go | 126 ++++++++++++++++ ...kup_dr_data_source_reference.html.markdown | 64 +++++++++ 4 files changed, 325 insertions(+) create mode 100644 mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_reference.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index e33c780cfdb0..5bde95fa7035 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -49,6 +49,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_backup_dr_data_source": backupdr.DataSourceGoogleCloudBackupDRDataSource(), "google_backup_dr_backup_vault": backupdr.DataSourceGoogleCloudBackupDRBackupVault(), "google_backup_dr_data_source_references": backupdr.DataSourceGoogleCloudBackupDRDataSourceReferences(), + "google_backup_dr_data_source_reference": backupdr.DataSourceGoogleCloudBackupDRDataSourceReference(), "google_beyondcorp_app_connection": beyondcorp.DataSourceGoogleBeyondcorpAppConnection(), "google_beyondcorp_app_connector": beyondcorp.DataSourceGoogleBeyondcorpAppConnector(), "google_beyondcorp_app_gateway": beyondcorp.DataSourceGoogleBeyondcorpAppGateway(), diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go index b541fa7e44c6..a3153c008bd6 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/googleapi" ) func DataSourceGoogleCloudBackupDRDataSourceReferences() *schema.Resource { @@ -165,3 +166,136 @@ func flattenDataSourceReferences(items []interface{}) ([]map[string]interface{}, } return references, nil } + +func DataSourceGoogleCloudBackupDRDataSourceReference() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleCloudBackupDRDataSourceReferenceRead, + Schema: map[string]*schema.Schema{ + "data_source_reference_id": { + Type: schema.TypeString, + Required: true, + Description: "The `id` of the data source reference.", + }, + "location": { + Type: schema.TypeString, + Required: true, + Description: "The location of the data source reference.", + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: "The ID of the project in which the resource belongs.", + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "data_source": { + Type: schema.TypeString, + Computed: true, + Description: "The underlying data source resource.", + }, + "backup_config_state": { + Type: schema.TypeString, + Computed: true, + Description: "The state of the backup config for the data source.", + }, + "backup_count": { + Type: schema.TypeInt, + Computed: true, + Description: "The number of backups for the data source.", + }, + "last_backup_state": { + Type: schema.TypeString, + Computed: true, + Description: "The state of the last backup.", + }, + "last_successful_backup_time": { + Type: schema.TypeString, + Computed: true, + Description: "The last time a successful backup was made.", + }, + "gcp_resource_name": { + Type: schema.TypeString, + Computed: true, + Description: "The GCP resource name for the data source.", + }, + "resource_type": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func dataSourceGoogleCloudBackupDRDataSourceReferenceRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + location := d.Get("location").(string) + dataSourceReferenceId := d.Get("data_source_reference_id").(string) + url := fmt.Sprintf("%sprojects/%s/locations/%s/dataSourceReferences/%s", config.BackupDRBasePath, project, location, dataSourceReferenceId) + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: project, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + d.SetId("") // Resource not found + return nil + } + return fmt.Errorf("Error reading DataSourceReference: %s", err) + } + + if err := flattenDataSourceReference(d, res); err != nil { + return err + } + + d.SetId(res["name"].(string)) + return nil +} + +func flattenDataSourceReference(d *schema.ResourceData, data map[string]interface{}) error { + ref, err := flattenDataSourceReferenceToMap(data) + if err != nil { + return err + } + for k, v := range ref { + if err := d.Set(k, v); err != nil { + return fmt.Errorf("Error setting %s: %s", k, err) + } + } + return nil +} + +func flattenDataSourceReferenceToMap(data map[string]interface{}) (map[string]interface{}, error) { + ref := map[string]interface{}{ + "name": data["name"], + "data_source": data["dataSource"], + "backup_config_state": data["dataSourceBackupConfigState"], + } + if v, ok := data["dataSourceBackupCount"].(string); ok { + if i, err := strconv.Atoi(v); err == nil { + ref["backup_count"] = i + } + } + if configInfo, ok := data["dataSourceBackupConfigInfo"].(map[string]interface{}); ok { + ref["last_backup_state"] = configInfo["lastBackupState"] + ref["last_successful_backup_time"] = configInfo["lastSuccessfulBackupConsistencyTime"] + } + if resourceInfo, ok := data["dataSourceGcpResourceInfo"].(map[string]interface{}); ok { + ref["gcp_resource_name"] = resourceInfo["gcpResourcename"] + ref["resource_type"] = resourceInfo["type"] + } + return ref, nil +} diff --git a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go index a3c4e2175b75..8dcaa8456b85 100644 --- a/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go +++ b/mmv1/third_party/terraform/services/backupdr/data_source_backup_dr_data_source_reference_test.go @@ -146,3 +146,129 @@ data "google_backup_dr_data_source_references" "default" { } `, context) } + +func TestAccDataSourceGoogleBackupDRDataSourceReference_basic(t *testing.T) { + t.Parallel() + + dsRefDataSourceName := "data.google_backup_dr_data_source_reference.default" + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + // All logic is now in a single HCL block and a single step. + Config: testAccDataSourceGoogleBackupDRDataSourceReference_basic(context), + Check: resource.ComposeTestCheckFunc( + // Check that the singular data source has been populated + resource.TestCheckResourceAttrSet(dsRefDataSourceName, "name"), + resource.TestCheckResourceAttrSet(dsRefDataSourceName, "data_source"), + resource.TestCheckResourceAttrSet(dsRefDataSourceName, "backup_config_state"), + resource.TestCheckResourceAttrSet(dsRefDataSourceName, "gcp_resource_name"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleBackupDRDataSourceReference_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_service_account" "default" { + account_id = "tf-test-my-custom-%{random_suffix}" + display_name = "Custom SA for VM Instance" +} + +resource "google_sql_database_instance" "instance" { + name = "default-%{random_suffix}" + database_version = "MYSQL_8_0" + region = "us-central1" + deletion_protection = false + settings { + tier = "db-f1-micro" + availability_type = "ZONAL" + activation_policy = "ALWAYS" + } +} + +resource "google_backup_dr_backup_vault" "my-backup-vault" { + location ="us-central1" + backup_vault_id = "tf-test-bv-%{random_suffix}" + description = "This is a second backup vault built by Terraform." + backup_minimum_enforced_retention_duration = "100000s" + labels = { + foo = "bar1" + bar = "baz1" + } + annotations = { + annotations1 = "bar1" + annotations2 = "baz1" + } + force_update = "true" + force_delete = "true" + allow_missing = "true" +} + +resource "google_backup_dr_backup_plan" "foo" { + location = "us-central1" + backup_plan_id = "tf-test-bp-test-%{random_suffix}" + resource_type = "sqladmin.googleapis.com/Instance" + backup_vault = google_backup_dr_backup_vault.my-backup-vault.name + + backup_rules { + rule_id = "rule-1" + backup_retention_days = 2 + + standard_schedule { + recurrence_type = "HOURLY" + hourly_frequency = 6 + time_zone = "UTC" + + backup_window { + start_hour_of_day = 12 + end_hour_of_day = 18 + } + } + } +} + +resource "google_backup_dr_backup_plan_association" "bpa" { + location = "us-central1" + backup_plan_association_id = "tf-test-bpa-test-%{random_suffix}" + resource = "projects/${data.google_project.project.project_id}/instances/${google_sql_database_instance.instance.name}" + resource_type= "sqladmin.googleapis.com/Instance" + backup_plan = google_backup_dr_backup_plan.foo.name + depends_on = [ google_sql_database_instance.instance ] +} + +data "google_backup_dr_data_source_references" "all_refs" { + project = data.google_project.project.project_id + location = "us-central1" + resource_type = "sqladmin.googleapis.com/Instance" + depends_on = [google_backup_dr_backup_plan_association.bpa] +} + +locals { + // Directly get the name from the first item in the list. + ds_ref_name = data.google_backup_dr_data_source_references.all_refs.data_source_references[0].name + + // Split the name string and take the last element, which is the ID. + data_source_reference_id = element(split("/", local.ds_ref_name), 5) +} + +// Now, use the singular data source to fetch the specific reference by its ID. +data "google_backup_dr_data_source_reference" "default" { + project = data.google_project.project.project_id + location = "us-central1" + data_source_reference_id = local.data_source_reference_id +} + +`, context) +} diff --git a/mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_reference.html.markdown b/mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_reference.html.markdown new file mode 100644 index 000000000000..81dec39c4ee2 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/backup_dr_data_source_reference.html.markdown @@ -0,0 +1,64 @@ +--- +subcategory: "Backup and DR Service" +description: |- + Get information about a specific Backup and DR data source reference. +--- + +# google_backup_dr_data_source_reference + +Get information about a specific Backup and DR data source reference. + +~> **Warning:** This resource is in beta, and should be used with the terraform-provider-google-beta provider. +See [Provider Versions](https://terraform.io/docs/providers/google/guides/provider_versions.html) for more details on beta resources. + +## Example Usage + +This example shows how to get the details of a specific data source reference by its ID. The ID is often obtained from the `google_backup_dr_data_source_references` data source. + +```hcl +data "google_backup_dr_data_source_references" "all_sql_references" { + location = "us-central1" + resource_type = "sqladmin.googleapis.com/Instance" +} + +data "google_backup_dr_data_source_reference" "my_reference" { + location = "us-central1" + data_source_reference_id = element(split("/", data.google_backup_dr_data_source_references.all_sql_references.data_source_references[0].name), 5) +} + +output "specific_reference_gcp_resource_name" { + value = data.google_backup_dr_data_source_reference.my_reference.gcp_resource_name +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location `- (Required) The location of the data source references. + +* `data_source_reference_id` - (Required) The id of the data source reference. + +* `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. + +## Attributes Reference + +In addition to the arguments listed above, the following attributes are exported: + + 1. `name`- The full name of the data source reference. + + 2. `data_source`- The underlying data source resource. + + 3. `backup_config_state`- The state of the backup config for the data source. + + 4. `backup_count`- The number of backups for the data source. + + 5. `last_backup_state`- The state of the last backup. + + 6. `last_successful_backup_time`- The last time a successful backup was made. + + 7. `gcp_resource_name`- The GCP resource name for the data source. + + 8. `resource_type`- The type of the referenced resource. + +See [google_backup_dr_data_source_reference](https://registry.terraform.io/providers/hashicorp/google/latest/docs/resources/backup_dr_data_source_reference) resource for details of the available attributes. From 3ed6187b55822136fe25b3bd8d545763ac513a45 Mon Sep 17 00:00:00 2001 From: zhihaos Date: Thu, 11 Sep 2025 14:42:15 -0400 Subject: [PATCH 175/201] Skip the acceptance test for Dialogflow CMEK (#15113) --- mmv1/products/dialogflow/EncryptionSpec.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/mmv1/products/dialogflow/EncryptionSpec.yaml b/mmv1/products/dialogflow/EncryptionSpec.yaml index cc9a8fda9cfc..b57f520a7856 100644 --- a/mmv1/products/dialogflow/EncryptionSpec.yaml +++ b/mmv1/products/dialogflow/EncryptionSpec.yaml @@ -67,6 +67,9 @@ examples: primary_resource_id: "my-encryption-spec" min_version: "beta" exclude_import_test: true + # The test resulted in an increasing number of CMEK DBs + # which we don't have ways to handle db deletion for deleted cmek projects. + exclude_test: true vars: project_id: 'my-proj' kms_keyring: 'my-keyring' From c4be68fb3a7da17d2520ef847392bab7f45152c1 Mon Sep 17 00:00:00 2001 From: "Laurenz K." <45950275+laurenz-k@users.noreply.github.com> Date: Thu, 11 Sep 2025 21:06:10 +0200 Subject: [PATCH 176/201] Use map instead of BAC API type as intermediate type for TGC cai2hcl conversion of networksecurity BackendAuthenticationConfig (#15117) --- .../backend_authentication_config.go | 23 ++++++++----------- .../cai2hcl/services/networksecurity/utils.go | 8 +++++++ 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go b/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go index 9590dbf3dd41..1ea40447ae11 100644 --- a/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go +++ b/mmv1/third_party/cai2hcl/services/networksecurity/backend_authentication_config.go @@ -6,7 +6,6 @@ import ( "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/cai2hcl/common" "github.com/GoogleCloudPlatform/terraform-google-conversion/v6/caiasset" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - netsecapi "google.golang.org/api/networksecurity/v1" ) // BackendAuthenticationConfigAssetType is the CAI asset type name. @@ -75,20 +74,16 @@ func (c *BackendAuthenticationConfigConverter) convertResourceData(asset *caiass func flattenBackendAuthenticationConfig(resource *caiasset.AssetResource) (map[string]any, error) { result := make(map[string]any) - var backendAuthenticationConfig *netsecapi.BackendAuthenticationConfig - if err := common.DecodeJSON(resource.Data, &backendAuthenticationConfig); err != nil { - return nil, err - } - - result["name"] = flattenName(backendAuthenticationConfig.Name) - result["labels"] = backendAuthenticationConfig.Labels - result["description"] = backendAuthenticationConfig.Description - result["client_certificate"] = backendAuthenticationConfig.ClientCertificate - result["trust_config"] = backendAuthenticationConfig.TrustConfig - result["well_known_roots"] = backendAuthenticationConfig.WellKnownRoots - result["project"] = flattenProjectName(backendAuthenticationConfig.Name) + resourceData := resource.Data - result["location"] = resource.Location + result["name"] = flattenName(resourceData["name"].(string)) + result["labels"] = resourceData["labels"] + result["description"] = resourceData["description"] + result["client_certificate"] = resourceData["clientCertificate"] + result["trust_config"] = resourceData["trustConfig"] + result["well_known_roots"] = resourceData["wellKnownRoots"] + result["project"] = flattenProjectName(resourceData["name"].(string)) + result["location"] = flattenLocation(resourceData["name"].(string)) return result, nil } diff --git a/mmv1/third_party/cai2hcl/services/networksecurity/utils.go b/mmv1/third_party/cai2hcl/services/networksecurity/utils.go index 97a4b53ffaed..a3255c9bb653 100644 --- a/mmv1/third_party/cai2hcl/services/networksecurity/utils.go +++ b/mmv1/third_party/cai2hcl/services/networksecurity/utils.go @@ -14,3 +14,11 @@ func flattenProjectName(name string) string { } return tokens[1] } + +func flattenLocation(name string) string { + tokens := strings.Split(name, "/") + if len(tokens) < 6 || tokens[2] != "locations" { + return "" + } + return tokens[3] +} From eb45d62271afee42110b78b3bcf14a50d8c8afe6 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Thu, 11 Sep 2025 13:24:49 -0700 Subject: [PATCH 177/201] Fix devsite link w prod link in google_memorystore_instance_desired_user_created_endpoints (#15122) Co-authored-by: Shuya Ma <87669292+shuyama1@users.noreply.github.com> --- .../memorystore/InstanceDesiredUserCreatedEndpoints.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mmv1/products/memorystore/InstanceDesiredUserCreatedEndpoints.yaml b/mmv1/products/memorystore/InstanceDesiredUserCreatedEndpoints.yaml index 52b526675119..256572a93d52 100644 --- a/mmv1/products/memorystore/InstanceDesiredUserCreatedEndpoints.yaml +++ b/mmv1/products/memorystore/InstanceDesiredUserCreatedEndpoints.yaml @@ -19,7 +19,7 @@ description: | docs: note: | Please ensure your connections meet the requirements outlined at - https://cloud.devsite.corp.google.com/memorystore/docs/valkey/about-multiple-vpc-networking#application_connection_requirements. + https://cloud.google.com/memorystore/docs/valkey/about-multiple-vpc-networking#application_connection_requirements. If you remove a connections item from the resource, the corresponding forwarding rule will no longer be functioning. If the corresponding forwarding rule is represented in your terraform configuration it is recommended to delete that `google_compute_forwarding_rule` resource at the same time. From 757ebf8383d4362786ccff45899fac65ed7d775a Mon Sep 17 00:00:00 2001 From: chenir0219 Date: Thu, 11 Sep 2025 20:37:04 +0000 Subject: [PATCH 178/201] make label updatable for google_compute_instance under initialize_params (#15081) --- .../compute/resource_compute_instance.go.tmpl | 61 ++++++++++++++- .../resource_compute_instance_test.go.tmpl | 74 +++++++++++++++++++ 2 files changed, 134 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 718a857dab8d..4f9692933dd9 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -7,6 +7,7 @@ import ( "errors" "fmt" "log" + "net/http" "strconv" "strings" "time" @@ -469,7 +470,6 @@ func ResourceComputeInstance() *schema.Resource { Optional: true, AtLeastOneOf: initializeParamsKeys, Computed: true, - ForceNew: true, Description: `A set of key/value label pairs assigned to the disk.`, }, @@ -2732,6 +2732,31 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err } } + if d.HasChange("boot_disk") { + //default behavior for the disk is to have the same name as the instance + diskName := instance.Name + if v := tpgresource.GetResourceNameFromSelfLink(d.Get("boot_disk.0.source").(string)); v != diskName { + diskName = v + } + + disk, err := config.NewComputeClient(userAgent).Disks.Get(project, zone, diskName).Do() + if err != nil { + return fmt.Errorf("Error getting boot disk: %s", err) + } + + obj := make(map[string]interface{}) + + if d.HasChange("boot_disk.0.initialize_params.0.labels") { + obj["labels"] = tpgresource.ConvertStringMap(d.Get("boot_disk.0.initialize_params.0.labels").(map[string]interface{})) + obj["labelFingerprint"] = disk.LabelFingerprint + url := "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/zones/{{"{{"}}zone{{"}}"}}/disks/{{"{{"}}name{{"}}"}}/setLabels" + err := updateDisk(d, config, userAgent, project, url, obj) + if err != nil { + return err + } + } + } + if d.HasChange("attached_disk") { o, n := d.GetChange("attached_disk") @@ -3836,3 +3861,37 @@ func CheckForCommonAliasIp(old, new *compute.NetworkInterface) []*compute.AliasI } return resultAliasIpRanges } + +func updateDisk(d *schema.ResourceData, config *transport_tpg.Config, userAgent, project, patchUrl string, obj map[string]interface{}) error { + billingProject := project + url, err := tpgresource.ReplaceVars(d, config, patchUrl) + if err != nil { + return err + } + headers := make(http.Header) + if bp, err := tpgresource.GetBillingProject(d, config); err != nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutUpdate), + Headers: headers, + }) + if err != nil { + return fmt.Errorf("Error updating Disk %q: %s", d.Id(), err) + } + err = ComputeOperationWaitTime( + config, res, project, "Updating Disk", userAgent, + d.Timeout(schema.TimeoutUpdate)) + if err != nil { + return err + } + return nil +} + diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index b3a76dd0f26f..c0f189b2ae75 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -1230,6 +1230,51 @@ func TestAccComputeInstance_attachDisk_forceAttach(t *testing.T) { }) } +func TestAccComputeInstance_bootDiskUpdate(t *testing.T) { + t.Parallel() + + var instance compute.Instance + context1 := map[string]interface{}{ + "instance_name": fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)), + "disk_size": 10, + "labels": "bar", + } + + context2 := map[string]interface{}{ + "instance_name": context1["instance_name"].(string), + "disk_size": 10, + "labels": "baz", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_bootDiskUpdate(context1), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + ), + }, + { + Config: testAccComputeInstance_bootDiskUpdate(context2), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + // Check that the update is done in-place + plancheck.ExpectResourceAction("google_compute_instance.foobar", plancheck.ResourceActionUpdate), + }, + }, + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + ), + }, + }, + }) +} + func TestAccComputeInstance_attachedDiskUpdate(t *testing.T) { t.Parallel() @@ -7341,6 +7386,35 @@ resource "google_compute_resource_policy" "instance_schedule2" { `, instance, schedule1, schedule2) } +func testAccComputeInstance_bootDiskUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%{instance_name}" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + size = %{disk_size} + labels = { + "foo" = "%{labels}" + } + } + } + + network_interface { + network = "default" + } +} +`, context) +} + func testAccComputeInstance_attachedDisk(disk, instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { From 13543acf88bc044848337e62f8517b72be115c17 Mon Sep 17 00:00:00 2001 From: Raj Anand <88097156+raazanand@users.noreply.github.com> Date: Fri, 12 Sep 2025 02:30:04 +0530 Subject: [PATCH 179/201] All squash support (#15115) --- mmv1/products/netapp/Volume.yaml | 12 + .../resource_netapp_volume_test.go.tmpl | 352 ++++++++++++++++++ 2 files changed, 364 insertions(+) diff --git a/mmv1/products/netapp/Volume.yaml b/mmv1/products/netapp/Volume.yaml index 9bd31101c5d6..b32c9aa69782 100644 --- a/mmv1/products/netapp/Volume.yaml +++ b/mmv1/products/netapp/Volume.yaml @@ -191,6 +191,18 @@ properties: type: Boolean description: | If enabled (true) the rule defines read and write access for clients matching the 'allowedClients' specification. It enables nfs clients to mount using 'privacy' kerberos security mode. The 'kerberos5pReadOnly' value is ignored if this is enabled. + - name: 'squashMode' + type: Enum + description: |- + SquashMode defines how remote user privileges are restricted when accessing an NFS export. It controls how the user identities (like root) are mapped to anonymous users to limit access and enforce security. + enum_values: + - 'NO_ROOT_SQUASH' + - 'ROOT_SQUASH' + - 'ALL_SQUASH' + - name: 'anonUid' + type: Integer + description: |- + An integer representing the anonymous user ID. Range is 0 to 4294967295. Required when `squash_mode` is `ROOT_SQUASH` or `ALL_SQUASH`. - name: 'protocols' type: Array description: | diff --git a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl index a8d8f2b9b5d9..d94a19942fb7 100644 --- a/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl +++ b/mmv1/third_party/terraform/services/netapp/resource_netapp_volume_test.go.tmpl @@ -970,4 +970,356 @@ data "google_compute_network" "default" { } `, context) } + +func TestAccNetappVolume_volumeExportPolicyWithSquashMode(t *testing.T) { + context := map[string]interface{}{ + "network_name": acctest.BootstrapSharedServiceNetworkingConnection(t, "gcnv-network-config-3", acctest.ServiceNetworkWithParentService("netapp.servicenetworking.goog")), + "random_suffix": acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetappVolumeDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccNetappVolume_volumeExportPolicyWithSquashMode_noRootSquash(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_volumeExportPolicyWithSquashMode_rootSquash(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_volumeExportPolicyWithSquashMode_allSquash(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_volumeExportPolicyWithSquashMode_noRootSquash_ReadNoneAccessType(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_volumeExportPolicyWithSquashMode_rootSquash_readOnlyAccessType(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + { + Config: testAccNetappVolume_volumeExportPolicyWithSquashMode_rootSquash_readNoneAccessType(context), + }, + { + ResourceName: "google_netapp_volume.test_volume", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"restore_parameters", "location", "name", "deletion_policy", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetappVolume_volumeExportPolicyWithSquashMode_noRootSquash(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-west2" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id +} +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} +resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-west2" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + export_policy { + rules { + access_type = "READ_WRITE" + allowed_clients = "0.0.0.0/0" + has_root_access = "true" + kerberos5_read_only = false + kerberos5_read_write = false + kerberos5i_read_only = false + kerberos5i_read_write = false + kerberos5p_read_only = false + kerberos5p_read_write = false + nfsv3 = true + nfsv4 = false + squash_mode = "NO_ROOT_SQUASH" + } + } +} +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} +`, context) +} + +func testAccNetappVolume_volumeExportPolicyWithSquashMode_noRootSquash_ReadNoneAccessType(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-west2" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id +} +resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" +} +resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-west2" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + export_policy { + rules { + access_type = "READ_NONE" + allowed_clients = "0.0.0.0/0" + has_root_access = "true" + kerberos5_read_only = false + kerberos5_read_write = false + kerberos5i_read_only = false + kerberos5i_read_write = false + kerberos5p_read_only = false + kerberos5p_read_write = false + nfsv3 = true + nfsv4 = false + squash_mode = "NO_ROOT_SQUASH" + } + } +} +data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" +} +`, context) +} + +func testAccNetappVolume_volumeExportPolicyWithSquashMode_allSquash(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-west2" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id + } + resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" + } + resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-west2" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + export_policy { + rules { + access_type = "READ_NONE" + allowed_clients = "0.0.0.0/0" + has_root_access = "false" + kerberos5_read_only = false + kerberos5_read_write = false + kerberos5i_read_only = false + kerberos5i_read_write = false + kerberos5p_read_only = false + kerberos5p_read_write = false + nfsv3 = true + nfsv4 = false + squash_mode = "ALL_SQUASH" + anon_uid = 65534 + } + } + } + + data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" + } + `, context) +} + +func testAccNetappVolume_volumeExportPolicyWithSquashMode_rootSquash(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-west2" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id + } + resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" + } + resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-west2" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + export_policy { + rules { + access_type = "READ_WRITE" + allowed_clients = "0.0.0.0/0" + has_root_access = "false" + kerberos5_read_only = false + kerberos5_read_write = false + kerberos5i_read_only = false + kerberos5i_read_write = false + kerberos5p_read_only = false + kerberos5p_read_write = false + nfsv3 = true + nfsv4 = false + squash_mode = "ROOT_SQUASH" + } + } + } + + data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" + } + `, context) +} + +func testAccNetappVolume_volumeExportPolicyWithSquashMode_rootSquash_readOnlyAccessType(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-west2" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id + } + resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" + } + resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-west2" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + export_policy { + rules { + access_type = "READ_ONLY" + allowed_clients = "0.0.0.0/0" + has_root_access = "false" + kerberos5_read_only = false + kerberos5_read_write = false + kerberos5i_read_only = false + kerberos5i_read_write = false + kerberos5p_read_only = false + kerberos5p_read_write = false + nfsv3 = true + nfsv4 = false + squash_mode = "ROOT_SQUASH" + } + } + } + + data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" + } + `, context) +} + +func testAccNetappVolume_volumeExportPolicyWithSquashMode_rootSquash_readNoneAccessType(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_netapp_storage_pool" "default" { + provider = google-beta + name = "tf-test-pool%{random_suffix}" + location = "us-west2" + service_level = "PREMIUM" + capacity_gib = "2048" + network = data.google_compute_network.default.id + } + resource "time_sleep" "wait_3_minutes" { + depends_on = [google_netapp_storage_pool.default] + create_duration = "3m" + } + resource "google_netapp_volume" "test_volume" { + provider = google-beta + location = "us-west2" + name = "tf-test-test-volume%{random_suffix}" + capacity_gib = "100" + share_name = "tf-test-test-volume%{random_suffix}" + storage_pool = google_netapp_storage_pool.default.name + protocols = ["NFSV3"] + export_policy { + rules { + access_type = "READ_NONE" + allowed_clients = "0.0.0.0/0" + has_root_access = "false" + kerberos5_read_only = false + kerberos5_read_write = false + kerberos5i_read_only = false + kerberos5i_read_write = false + kerberos5p_read_only = false + kerberos5p_read_write = false + nfsv3 = true + nfsv4 = false + squash_mode = "ROOT_SQUASH" + } + } + } + + data "google_compute_network" "default" { + provider = google-beta + name = "%{network_name}" + } + `, context) +} {{ end }} From 0d89fcf3da2d00c01a6564228af5a0b94c0ebaf2 Mon Sep 17 00:00:00 2001 From: Ramon Vermeulen Date: Thu, 11 Sep 2025 23:54:59 +0200 Subject: [PATCH 180/201] dialogflow: support non-global endpoints for `google_dialogflow_conversation_profile` (#15037) --- .../dialogflow/ConversationProfile.yaml | 6 +- ...k_as_name_set_project_and_location.go.tmpl | 35 ++++++++ .../dialogflow_set_endpoint.go.tmpl | 12 ++- ...ce_dialogflow_conversation_profile_test.go | 90 +++++++++++++++++++ 4 files changed, 139 insertions(+), 4 deletions(-) create mode 100644 mmv1/templates/terraform/custom_import/self_link_as_name_set_project_and_location.go.tmpl diff --git a/mmv1/products/dialogflow/ConversationProfile.yaml b/mmv1/products/dialogflow/ConversationProfile.yaml index 2ac0d248715e..1c3ec6d5f7ad 100644 --- a/mmv1/products/dialogflow/ConversationProfile.yaml +++ b/mmv1/products/dialogflow/ConversationProfile.yaml @@ -32,8 +32,12 @@ timeouts: update_minutes: 40 delete_minutes: 20 custom_code: + pre_create: 'templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl' + pre_read: 'templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl' + pre_update: 'templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl' + pre_delete: 'templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl' post_create: 'templates/terraform/post_create/set_computed_name.tmpl' - custom_import: 'templates/terraform/custom_import/self_link_as_name_set_project.go.tmpl' + custom_import: 'templates/terraform/custom_import/self_link_as_name_set_project_and_location.go.tmpl' exclude_sweeper: true examples: - name: 'dialogflow_conversation_profile_basic' diff --git a/mmv1/templates/terraform/custom_import/self_link_as_name_set_project_and_location.go.tmpl b/mmv1/templates/terraform/custom_import/self_link_as_name_set_project_and_location.go.tmpl new file mode 100644 index 000000000000..502a57d01270 --- /dev/null +++ b/mmv1/templates/terraform/custom_import/self_link_as_name_set_project_and_location.go.tmpl @@ -0,0 +1,35 @@ + + config := meta.(*transport_tpg.Config) + + // current import_formats can't import fields with forward slashes in their value + if err := tpgresource.ParseImportId([]string{"(?P.+)"}, d, config); err != nil { + return nil, err + } + + stringParts := strings.Split(d.Get("name").(string), "/") + if len(stringParts) < 2 { + return nil, fmt.Errorf( + "Could not split project from name: %s", + d.Get("name"), + ) + } + + if err := d.Set("project", stringParts[1]); err != nil { + return nil, fmt.Errorf("Error setting project: %s", err) + } + + var location string + for i, part := range stringParts { + if part == "locations" && i+1 < len(stringParts) { + location = stringParts[i+1] + break + } + } + if location == "" { + return nil, fmt.Errorf("Could not extract location from name: %s", d.Get("name")) + } + if err := d.Set("location", location); err != nil { + return nil, fmt.Errorf("Error setting location: %s", err) + } + + return []*schema.ResourceData{d}, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl b/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl index 87dc02e5c6a1..b5d0c7bfc3de 100644 --- a/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl +++ b/mmv1/templates/terraform/pre_create/dialogflow_set_endpoint.go.tmpl @@ -1,6 +1,12 @@ location := d.Get("location").(string) +universeDomain := config.UniverseDomain -// insert location into url for a different endpoint. -if strings.HasPrefix(url, "https://dialogflow.googleapis.com/v2/") { - url = strings.Replace(url, "https://dialogflow", fmt.Sprintf("https://%s-dialogflow", location), 1) +if universeDomain != "" && universeDomain != "googleapis.com" { + url = strings.Replace(url, "googleapis.com", universeDomain, 1) +} + +if strings.HasPrefix(url, "https://dialogflow") { + if location != "" && location != "global" { + url = strings.Replace(url, "https://dialogflow", fmt.Sprintf("https://%s-dialogflow", location), 1) + } } diff --git a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go index f337597e4c24..06bdb40ffd2c 100644 --- a/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go +++ b/mmv1/third_party/terraform/services/dialogflow/resource_dialogflow_conversation_profile_test.go @@ -63,6 +63,35 @@ func TestAccDialogflowConversationProfile_update(t *testing.T) { }) } +func TestAccDialogflowConversationProfile_regional(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccDialogflowConversationProfile_dialogflowRegional(context), + }, + { + ResourceName: "google_dialogflow_conversation_profile.profile", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "logging_config", "logging_config.0", "logging_config.0.enable_stackdriver_logging"}, + }, + }, + }) +} + func testAccDialogflowConversationProfile_dialogflowAgentFull1(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_project" "agent_project" { @@ -246,6 +275,7 @@ func testAccDialogflowConversationProfile_dialogflowAgentFull1(context map[strin } `, context) } + func testAccDialogflowConversationProfile_dialogflowAgentFull2(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_project" "agent_project" { @@ -409,3 +439,63 @@ func testAccDialogflowConversationProfile_dialogflowAgentFull2(context map[strin } `, context) } + +func testAccDialogflowConversationProfile_dialogflowRegional(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_project" "agent_project" { + name = "tf-test-dialogflow-%{random_suffix}" + project_id = "tf-test-dialogflow-%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + deletion_policy = "DELETE" + } + resource "google_project_service" "agent_project" { + project = "${google_project.agent_project.id}" + service = "dialogflow.googleapis.com" + disable_dependent_services = false + } + + resource "google_service_account" "dialogflow_service_account" { + account_id = "tf-test-dialogflow-%{random_suffix}" + } + + resource "google_project_iam_member" "agent_create" { + project = "${google_project.agent_project.id}" + role = "roles/dialogflow.admin" + member = "serviceAccount:${google_service_account.dialogflow_service_account.email}" + } + + resource "google_dialogflow_agent" "agent" { + display_name = "tf-test-agent-%{random_suffix}" + default_language_code = "en-us" + time_zone = "America/New_York" + project = google_project.agent_project.name + } + + resource "google_pubsub_topic" "topic_diff" { + name = "tf-test-topic-%{random_suffix}-diff" + project = google_project.agent_project.project_id + depends_on = [google_project.agent_project, time_sleep.wait_120_seconds] + message_retention_duration = "8000s" + } + resource "google_dialogflow_cx_security_settings" "security_setting_diff" { + display_name = "tf-test-setting-%{random_suffix}-diff" + location = "us-central1" + purge_data_types = [] + retention_window_days = 7 + project = google_project.agent_project.project_id + depends_on = [time_sleep.wait_120_seconds] + } + resource "time_sleep" "wait_120_seconds" { + create_duration = "120s" + depends_on = [google_dialogflow_agent.agent] + } + resource "google_dialogflow_conversation_profile" "profile" { + depends_on = [google_dialogflow_agent.agent, google_dialogflow_cx_security_settings.security_setting_diff, time_sleep.wait_120_seconds] + project = "${google_project.agent_project.name}" + display_name = "tf-test-conversation-profile-%{random_suffix}-new" + location = "us-central1" + language_code = "en-US" + } +`, context) +} From 84686f1e3d606c455b19f0fff2d455e0fe940eaa Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Thu, 11 Sep 2025 15:45:56 -0700 Subject: [PATCH 181/201] Fixed intelligenceconfig resource variant patterns (#15119) --- mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml | 2 ++ .../products/storagecontrol/OrganizationIntelligenceConfig.yaml | 2 ++ mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml | 2 ++ 3 files changed, 6 insertions(+) diff --git a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml index ee9cb9e043a3..f168ce0abcc3 100644 --- a/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/FolderIntelligenceConfig.yaml @@ -15,6 +15,8 @@ # API resource name name: 'FolderIntelligenceConfig' api_resource_type_kind: 'IntelligenceConfig' +api_variant_patterns: + - "folders/{folder}/locations/{location}/intelligenceConfig" kind: 'storagecontrol#intelligenceconfig' # Resource description for the provider documentation. description: | diff --git a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml index 547f726d0a54..d46dd7bae856 100644 --- a/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/OrganizationIntelligenceConfig.yaml @@ -15,6 +15,8 @@ # API resource name name: 'OrganizationIntelligenceConfig' api_resource_type_kind: 'IntelligenceConfig' +api_variant_patterns: + - "organizations/{org}/locations/{location}/intelligenceConfig" kind: 'storagecontrol#intelligenceconfig' # Resource description for the provider documentation. description: | diff --git a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml index 60962fef6c4d..1340629398c5 100644 --- a/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml +++ b/mmv1/products/storagecontrol/ProjectIntelligenceConfig.yaml @@ -15,6 +15,8 @@ # API resource name name: 'ProjectIntelligenceConfig' api_resource_type_kind: 'IntelligenceConfig' +api_variant_patterns: + - "projects/{project}/locations/{location}/intelligenceConfig" kind: 'storagecontrol#intelligenceconfig' # Resource description for the provider documentation. description: | From 634129d661508334a07a089c653708e76cea0c65 Mon Sep 17 00:00:00 2001 From: martin-guillen Date: Thu, 11 Sep 2025 18:49:12 -0400 Subject: [PATCH 182/201] List and cache InstanceGroupManagers instead of getting them separately to improve node pool reconciliation time (#15086) --- .../resource_container_node_pool.go.tmpl | 110 ++++++++++++++++-- 1 file changed, 100 insertions(+), 10 deletions(-) diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl index 1356a211a42e..9ed3eed605f9 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool.go.tmpl @@ -1,6 +1,9 @@ package container import ( +{{- if ne $.TargetVersionName `ga` }} + "context" +{{- end }} "fmt" "log" "regexp" @@ -22,6 +25,7 @@ import ( {{ if eq $.TargetVersionName `ga` }} "google.golang.org/api/container/v1" {{- else }} + compute "google.golang.org/api/compute/v0.beta" container "google.golang.org/api/container/v1beta1" {{- end }} ) @@ -51,13 +55,13 @@ func (nodePoolCache *nodePoolCache) get(nodePool string) (*container.NodePool, e } func (nodePoolCache *nodePoolCache) refreshIfNeeded(d *schema.ResourceData, config *transport_tpg.Config, userAgent string, nodePoolInfo *NodePoolInformation, name string) error { + nodePoolCache.mutex.Lock() + defer nodePoolCache.mutex.Unlock() + if !nodePoolCache.needsRefresh(nodePoolInfo.fullyQualifiedName(name)) { return nil } - nodePoolCache.mutex.Lock() - defer nodePoolCache.mutex.Unlock() - parent := fmt.Sprintf("projects/%s/locations/%s/clusters/%s", nodePoolInfo.project, nodePoolInfo.location, nodePoolInfo.cluster) clusterNodePoolsListCall := config.NewContainerClient(userAgent).Projects.Locations.Clusters.NodePools.List(parent) if config.UserProjectOverride { @@ -79,8 +83,6 @@ func (nodePoolCache *nodePoolCache) refreshIfNeeded(d *schema.ResourceData, conf } func (nodePoolCache *nodePoolCache) needsRefresh(nodePool string) bool { - nodePoolCache.mutex.RLock() - defer nodePoolCache.mutex.RUnlock() np, ok := nodePoolCache.nodePools[nodePool] if !ok { return true @@ -94,10 +96,79 @@ func (nodePoolCache *nodePoolCache) remove(nodePool string) { delete(nodePoolCache.nodePools, nodePool) } -var npCache = &nodePoolCache{ - nodePools: make(map[string]*nodePoolWithUpdateTime), - ttl: 30 * time.Second, +type instanceGroupManagerWithUpdateTime struct { + instanceGroupManager *compute.InstanceGroupManager + updateTime time.Time +} + +type instanceGroupManagerCache struct { + instanceGroupManagers map[string]*instanceGroupManagerWithUpdateTime + ttl time.Duration + mutex sync.RWMutex } + +func (instanceGroupManagerCache *instanceGroupManagerCache) get(fullyQualifiedName string) (*compute.InstanceGroupManager, bool) { + instanceGroupManagerCache.mutex.RLock() + defer instanceGroupManagerCache.mutex.RUnlock() + igm, ok := instanceGroupManagerCache.instanceGroupManagers[fullyQualifiedName] + if !ok { + return nil, false + } + return igm.instanceGroupManager, true +} + +func (instanceGroupManagerCache *instanceGroupManagerCache) refreshIfNeeded(d *schema.ResourceData, config *transport_tpg.Config, userAgent string, npName string, igmUrl string) error { + instanceGroupManagerCache.mutex.Lock() + defer instanceGroupManagerCache.mutex.Unlock() + + matches := instanceGroupManagerURL.FindStringSubmatch(igmUrl) + if len(matches) < 4 { + return fmt.Errorf("Error reading instance group manager URL %q", igmUrl) + } + + if !instanceGroupManagerCache.needsRefresh(matches[0]) { + return nil + } + + updateTime := time.Now() + err := config.NewComputeClient(userAgent).InstanceGroupManagers.List(matches[1], matches[2]).Pages(context.Background(), instanceGroupManagerCache.processList(updateTime)) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("InstanceGroupManagers for node pool %q", npName)) + } + return nil +} + +func (instanceGroupManagerCache *instanceGroupManagerCache) processList(updateTime time.Time) func(*compute.InstanceGroupManagerList) error { + return func(igmList *compute.InstanceGroupManagerList) error { + for _, instanceGroupManager := range igmList.Items { + fullyQualifiedName := instanceGroupManagerURL.FindString(instanceGroupManager.SelfLink) + instanceGroupManagerCache.instanceGroupManagers[fullyQualifiedName] = &instanceGroupManagerWithUpdateTime{ + instanceGroupManager: instanceGroupManager, + updateTime: updateTime, + } + } + return nil + } +} + +func (instanceGroupManagerCache *instanceGroupManagerCache) needsRefresh(fullyQualifiedName string) bool { + igm, ok := instanceGroupManagerCache.instanceGroupManagers[fullyQualifiedName] + if !ok { + return true + } + return time.Since(igm.updateTime) > instanceGroupManagerCache.ttl +} + +var ( + npCache = &nodePoolCache{ + nodePools: make(map[string]*nodePoolWithUpdateTime), + ttl: 30 * time.Second, + } + igmCache = &instanceGroupManagerCache{ + instanceGroupManagers: make(map[string]*instanceGroupManagerWithUpdateTime), + ttl: 30 * time.Second, + } +) {{- end }} func ResourceContainerNodePool() *schema.Resource { @@ -794,7 +865,9 @@ func resourceContainerNodePoolRead(d *schema.ResourceData, meta interface{}) err } {{- else }} - npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) + if err := npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name); err != nil { + return err + } nodePool, err := npCache.get(nodePoolInfo.fullyQualifiedName(name)) if err != nil { log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) @@ -976,7 +1049,9 @@ func resourceContainerNodePoolExists(d *schema.ResourceData, meta interface{}) ( return true, err } {{- else }} - npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name) + if err := npCache.refreshIfNeeded(d, config, userAgent, nodePoolInfo, name); err != nil { + return false, err + } _, err = npCache.get(nodePoolInfo.fullyQualifiedName(name)) if err != nil { log.Printf("[WARN] Removing %s because it's gone", fmt.Sprintf("NodePool %q from cluster %q", name, nodePoolInfo.cluster)) @@ -1239,6 +1314,7 @@ func flattenNodePool(d *schema.ResourceData, config *transport_tpg.Config, np *c if len(matches) < 4 { return nil, fmt.Errorf("Error reading instance group manage URL '%q'", url) } +{{- if eq $.TargetVersionName `ga` }} igm, err := config.NewComputeClient(userAgent).InstanceGroupManagers.Get(matches[1], matches[2], matches[3]).Do() if transport_tpg.IsGoogleApiErrorWithCode(err, 404) { // The IGM URL in is stale; don't include it @@ -1247,6 +1323,20 @@ func flattenNodePool(d *schema.ResourceData, config *transport_tpg.Config, np *c if err != nil { return nil, fmt.Errorf("Error reading instance group manager returned as an instance group URL: %q", err) } +{{- else }} + if strings.HasPrefix("gk3", matches[3]) { + // IGM is autopilot so we know it will not be found, skip it + continue + } + if err := igmCache.refreshIfNeeded(d, config, userAgent, np.Name, url); err != nil { + return nil, err + } + igm, ok := igmCache.get(matches[0]) + if !ok { + // The IGM URL is stale; don't include it + continue + } +{{- end }} size += int(igm.TargetSize) igmUrls = append(igmUrls, url) managedIgmUrls = append(managedIgmUrls, igm.InstanceGroup) From 1a52c1d06bcef50a5e2f578aa14d2432dbe9d163 Mon Sep 17 00:00:00 2001 From: Riley Karson Date: Thu, 11 Sep 2025 16:05:59 -0700 Subject: [PATCH 183/201] Add nil check to exprOptions.RecaptchaOptions in `google_compute_security_policy` (#15125) --- .../compute/resource_compute_security_policy.go.tmpl | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl index 6614d068d3a5..7d46e1c493d6 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_security_policy.go.tmpl @@ -1372,7 +1372,14 @@ func flattenMatchExprOptions(exprOptions *compute.SecurityPolicyRuleMatcherExprO return nil } - // We check if the API is returning a empty non-null value then we find the current value for this field in the rule config and check if its empty + // The API can return an explicit entry `exprOptions` object, causing evaluation of the `recaptcha_options` settings to fail as it's nil: https://github.com/hashicorp/terraform-provider-google/issues/24334 + // Explicitly check it's available and exit early if not. + if exprOptions.RecaptchaOptions == nil { + return nil + } + + // The API can return an explicit empty rule causing an issue: https://github.com/hashicorp/terraform-provider-google/issues/16882#issuecomment-2474528447 + // We check if the API is returning a empty non-null value then we find the current value for this field in the rule config and check if its empty. if (tpgresource.IsEmptyValue(reflect.ValueOf(exprOptions.RecaptchaOptions.ActionTokenSiteKeys)) && tpgresource.IsEmptyValue(reflect.ValueOf(exprOptions.RecaptchaOptions.SessionTokenSiteKeys))) && verifyRulePriorityCompareEmptyValues(d, rulePriority, "recaptcha_options") { @@ -1380,7 +1387,9 @@ func flattenMatchExprOptions(exprOptions *compute.SecurityPolicyRuleMatcherExprO } data := map[string]interface{}{ + // NOTE: when adding new entries, the recaptcha_options rule above will need to be revised "recaptcha_options": flattenMatchExprOptionsRecaptchaOptions(exprOptions.RecaptchaOptions), + // NOTE: when adding new entries, the recaptcha_options rule above will need to be revised } return []map[string]interface{}{data} From 0464191d3588ddfccf1fed8cfd1fd036b850fc5d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B6rn?= <81525627+bestefreund@users.noreply.github.com> Date: Fri, 12 Sep 2025 18:05:56 +0200 Subject: [PATCH 184/201] Add singular data source for retrieving a Maven artifact from an Artifact Registry repository (#14782) --- .../provider/provider_mmv1_resources.go.tmpl | 1 + ...source_artifact_registry_maven_artifact.go | 282 ++++++++++++++++++ ...e_artifact_registry_maven_artifact_test.go | 67 +++++ ...fact_registry_maven_artifact.html.markdown | 68 +++++ 4 files changed, 418 insertions(+) create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact.go create mode 100644 mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/artifact_registry_maven_artifact.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index 5bde95fa7035..ea291b00f8de 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -29,6 +29,7 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_artifact_registry_docker_image": artifactregistry.DataSourceArtifactRegistryDockerImage(), "google_artifact_registry_docker_images": artifactregistry.DataSourceArtifactRegistryDockerImages(), "google_artifact_registry_locations": artifactregistry.DataSourceGoogleArtifactRegistryLocations(), + "google_artifact_registry_maven_artifact": artifactregistry.DataSourceArtifactRegistryMavenArtifact(), "google_artifact_registry_npm_package": artifactregistry.DataSourceArtifactRegistryNpmPackage(), "google_artifact_registry_package": artifactregistry.DataSourceArtifactRegistryPackage(), "google_artifact_registry_python_package": artifactregistry.DataSourceArtifactRegistryPythonPackage(), diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact.go new file mode 100644 index 000000000000..e655a614b6ae --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact.go @@ -0,0 +1,282 @@ +package artifactregistry + +import ( + "fmt" + "net/url" + "sort" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type MavenArtifact struct { + name string + pomUri string + version string + createTime string + updateTime string +} + +func DataSourceArtifactRegistryMavenArtifact() *schema.Resource { + return &schema.Resource{ + Read: DataSourceArtifactRegistryMavenArtifactRead, + + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + }, + "location": { + Type: schema.TypeString, + Required: true, + }, + "repository_id": { + Type: schema.TypeString, + Required: true, + }, + "group_id": { + Type: schema.TypeString, + Required: true, + }, + "artifact_id": { + Type: schema.TypeString, + Required: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "pom_uri": { + Type: schema.TypeString, + Computed: true, + }, + "version": { + Type: schema.TypeString, + Computed: true, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} + +func DataSourceArtifactRegistryMavenArtifactRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + var res MavenArtifact + + artifactId, version := parseMavenArtifact(d.Get("artifact_id").(string)) + + groupId := d.Get("group_id").(string) + + packageName := fmt.Sprintf("%s:%s", groupId, artifactId) + + if version != "" { + // fetch package by version + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.mavenArtifacts/get + packageUrlSafe := url.QueryEscape(packageName) + urlRequest, err := tpgresource.ReplaceVars(d, config, fmt.Sprintf("{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/mavenArtifacts/%s:%s", packageUrlSafe, version)) + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + resGet, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return err + } + + res = convertMavenArtifactResponseToStruct(resGet) + } else { + // fetch the list of packages, ordered by update time + // https://cloud.google.com/artifact-registry/docs/reference/rest/v1/projects.locations.repositories.mavenArtifacts/list + urlRequest, err := tpgresource.ReplaceVars(d, config, "{{ArtifactRegistryBasePath}}projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/mavenArtifacts") + if err != nil { + return fmt.Errorf("Error setting api endpoint") + } + + // to reduce the number of pages we need to fetch, we set the pageSize to 1000(max) + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageSize": "1000"}) + if err != nil { + return err + } + + res, err = retrieveAndFilterMavenArtifacts(d, config, urlRequest, userAgent, groupId, artifactId, version) + if err != nil { + return err + } + } + + // Set Terraform schema fields + if err := d.Set("project", project); err != nil { + return err + } + if err := d.Set("name", res.name); err != nil { + return err + } + if err := d.Set("pom_uri", res.pomUri); err != nil { + return err + } + if err := d.Set("version", res.version); err != nil { + return err + } + if err := d.Set("create_time", res.createTime); err != nil { + return err + } + if err := d.Set("update_time", res.updateTime); err != nil { + return err + } + + d.SetId(res.name) + + return nil +} + +func parseMavenArtifact(pkg string) (artifactId string, version string) { + splitByColon := strings.Split(pkg, ":") + + if len(splitByColon) == 2 { + artifactId = splitByColon[0] + version = splitByColon[1] + } else { + artifactId = pkg + } + + return artifactId, version +} + +func retrieveAndFilterMavenArtifacts(d *schema.ResourceData, config *transport_tpg.Config, urlRequest string, userAgent string, groupId string, artifactId string, version string) (MavenArtifact, error) { + // Paging through the list method until either: + // if a version was provided, the matching package name and version pair + // otherwise, return the first matching package name + + var allPackages []MavenArtifact + + for { + resListMavenArtifacts, token, err := retrieveListOfMavenArtifacts(config, urlRequest, userAgent) + if err != nil { + return MavenArtifact{}, err + } + + for _, pkg := range resListMavenArtifacts { + if strings.Contains(pkg.name, "/"+url.QueryEscape(groupId)+":"+url.QueryEscape(artifactId)+":") { + allPackages = append(allPackages, pkg) + } + } + + if token == "" { + break + } + + urlRequest, err = transport_tpg.AddQueryParams(urlRequest, map[string]string{"pageToken": token}) + if err != nil { + return MavenArtifact{}, err + } + } + + if len(allPackages) == 0 { + return MavenArtifact{}, fmt.Errorf("Requested Maven package was not found.") + } + + // Client-side sort by updateTime descending (latest first) + sort.Slice(allPackages, func(i, j int) bool { + // Parse RFC3339 timestamps, fallback to string compare if parse fails + ti, err1 := time.Parse(time.RFC3339, allPackages[i].updateTime) + tj, err2 := time.Parse(time.RFC3339, allPackages[j].updateTime) + if err1 == nil && err2 == nil { + return ti.After(tj) + } + return allPackages[i].updateTime > allPackages[j].updateTime + }) + + if version != "" { + for _, pkg := range allPackages { + if pkg.version == version { + return pkg, nil + } + } + return MavenArtifact{}, fmt.Errorf("Requested version was not found.") + } + + // Return the latest package if no version specified + return allPackages[0], nil +} + +func retrieveListOfMavenArtifacts(config *transport_tpg.Config, urlRequest string, userAgent string) ([]MavenArtifact, string, error) { + resList, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + RawURL: urlRequest, + UserAgent: userAgent, + }) + if err != nil { + return make([]MavenArtifact, 0), "", err + } + + if nextPageToken, ok := resList["nextPageToken"].(string); ok { + return flattenMavenArtifactDataSourceListResponse(resList), nextPageToken, nil + } else { + return flattenMavenArtifactDataSourceListResponse(resList), "", nil + } +} + +func flattenMavenArtifactDataSourceListResponse(res map[string]interface{}) []MavenArtifact { + var mavenArtifacts []MavenArtifact + + resMavenArtifacts, _ := res["mavenArtifacts"].([]interface{}) + + for _, resPackage := range resMavenArtifacts { + pkg, _ := resPackage.(map[string]interface{}) + mavenArtifacts = append(mavenArtifacts, convertMavenArtifactResponseToStruct(pkg)) + } + + return mavenArtifacts +} + +func convertMavenArtifactResponseToStruct(res map[string]interface{}) MavenArtifact { + var mavenArtifact MavenArtifact + + if name, ok := res["name"].(string); ok { + mavenArtifact.name = name + } + + if pomUri, ok := res["pomUri"].(string); ok { + mavenArtifact.pomUri = pomUri + } + + if version, ok := res["version"].(string); ok { + mavenArtifact.version = version + } + + if createTime, ok := res["createTime"].(string); ok { + mavenArtifact.createTime = createTime + } + + if updateTime, ok := res["updateTime"].(string); ok { + mavenArtifact.updateTime = updateTime + } + + return mavenArtifact +} diff --git a/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact_test.go b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact_test.go new file mode 100644 index 000000000000..cf76fb7e82d2 --- /dev/null +++ b/mmv1/third_party/terraform/services/artifactregistry/data_source_artifact_registry_maven_artifact_test.go @@ -0,0 +1,67 @@ +package artifactregistry_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceArtifactRegistryMavenArtifact_basic(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + // At the moment there are no public Maven artifacts available in Artifact Registry. + // This test is skipped to avoid unnecessary failures. + // As soon as there are public artifacts available, this test can be enabled by removing the skip and adjusting the configuration accordingly. + t.Skip("No public Maven artifacts available in Artifact Registry") + + resourceName := "data.google_artifact_registry_maven_artifact.test" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceArtifactRegistryMavenArtifactConfig, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet(resourceName, "project"), + resource.TestCheckResourceAttrSet(resourceName, "location"), + resource.TestCheckResourceAttrSet(resourceName, "repository_id"), + resource.TestCheckResourceAttrSet(resourceName, "artifact_id"), + resource.TestCheckResourceAttrSet(resourceName, "name"), + validateMavenArtifactTimestamps(resourceName), + ), + }, + }, + }) +} + +const testAccDataSourceArtifactRegistryMavenArtifactConfig = ` +data "google_artifact_registry_maven_artifact" "test" { + project = "example-project" + location = "us" + repository_id = "example-repo" + group_id = "com.example" + artifact_id = "example-artifact" +} +` + +func validateMavenArtifactTimestamps(dataSourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + res, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("can't find %s in state", dataSourceName) + } + + for _, attr := range []string{"create_time", "update_time"} { + if ts, ok := res.Primary.Attributes[attr]; !ok || !isRFC3339(ts) { + return fmt.Errorf("%s is not RFC3339: %s", attr, ts) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_maven_artifact.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_maven_artifact.html.markdown new file mode 100644 index 000000000000..1c3dd2ae2449 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_maven_artifact.html.markdown @@ -0,0 +1,68 @@ +--- +subcategory: "Artifact Registry" +description: |- + Get information about a Maven artifact within a Google Artifact Registry Repository. +--- + +# google_artifact_registry_maven_artifact + +This data source fetches information from a provided Artifact Registry repository, based on a the latest version of the artifact and optional version. + +## Example Usage + +```hcl +resource "google_artifact_registry_repository" "maven_repo" { + location = "us-central1" + repository_id = "my-maven-repo" + format = "MAVEN" +} + +data "google_artifact_registry_maven_artifact" "latest" { + location = google_artifact_registry_repository.maven_repo.location + repository_id = google_artifact_registry_repository.maven_repo.repository_id + group_id = "com.example" + artifact_id = "my-artifact" +} + +data "google_artifact_registry_maven_artifact" "with_version" { + location = google_artifact_registry_repository.maven_repo.location + repository_id = google_artifact_registry_repository.maven_repo.repository_id + artifact_name = "my-artifact:1.0.0" +} +``` + +## Argument Reference + +The following arguments are supported: + +* `location` – (Required) The location of the Artifact Registry repository. + +* `repository_id` – (Required) The ID of the repository containing the Maven artifact. + +* `group_id` – (Required) Group ID for the artifact. Example: `com.google.guava` + +* `artifact_id` – (Required) The name of the artifact to fetch. Can optionally include a specific version (e.g., `my_artifact:1.2.3`). If no version is provided, the latest version is used. + +* `project` – (Optional) The ID of the project that owns the repository. If not provided, the provider-level project is used. + +## Attributes Reference + +The following computed attributes are exported: + +* `id` – The fully qualified name of the fetched artifact. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/mavenArtifacts/{{group_id}}:{{artifact_id}}:{{version}} + ``` + +* `name` – The fully qualified name of the fetched artifact. Format: + ``` + projects/{{project}}/locations/{{location}}/repositories/{{repository_id}}/mavenArtifacts/{{group_id}}:{{artifact_id}}:{{version}} + ``` + +* `pom_uri` – URL to access the pom file of the artifact. Example: `us-west4-maven.pkg.dev/test-project/test-repo/com/google/guava/guava/31.0/guava-31.0.pom` + +* `version` – The version of the Maven artifact. + +* `create_time` – The time the artifact was created. + +* `update_time` – The time the artifact was last updated. From 6506c38f7f686aa502cd36e373c8487b79825e63 Mon Sep 17 00:00:00 2001 From: Nick Elliot Date: Fri, 12 Sep 2025 09:42:03 -0700 Subject: [PATCH 185/201] add default provider value plan modifiers/small utils upgrades to fwprovider templates (#15011) --- mmv1/templates/terraform/resource_fw.go.tmpl | 32 ++- .../terraform/fwresource/field_helpers.go | 2 +- .../framework_common_plan_modifiers.go | 84 +++++++ .../framework_common_plan_modifiers_test.go | 232 ++++++++++++++++++ .../terraform/fwtransport/framework_utils.go | 12 +- mmv1/third_party/terraform/go.mod | 1 + mmv1/third_party/terraform/go.sum | 4 +- ..._apigee_keystores_aliases_key_cert_file.go | 4 +- .../fw_resource_pubsub_lite_reservation.go | 8 +- 9 files changed, 358 insertions(+), 21 deletions(-) create mode 100644 mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers.go create mode 100644 mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers_test.go diff --git a/mmv1/templates/terraform/resource_fw.go.tmpl b/mmv1/templates/terraform/resource_fw.go.tmpl index c73454a0e4b3..e76c10c326e4 100644 --- a/mmv1/templates/terraform/resource_fw.go.tmpl +++ b/mmv1/templates/terraform/resource_fw.go.tmpl @@ -74,8 +74,11 @@ import ( {{- end}} var ( - _ resource.Resource = &{{$.ResourceName}}FWResource{} - _ resource.ResourceWithConfigure = &{{$.ResourceName}}FWResource{} + _ resource.Resource = &{{$.ResourceName}}FWResource{} + _ resource.ResourceWithConfigure = &{{$.ResourceName}}FWResource{} +{{- if or (and (or $.HasProject $.HasRegion $.HasZone) (not $.ExcludeDefaultCdiff)) $.CustomDiff }} + _ resource.ResourceWithModifyPlan = &{{$.ResourceName}}FWResource{} +{{- end}} ) func New{{$.ResourceName}}FWResource() resource.Resource { @@ -128,6 +131,23 @@ func (r *{{$.ResourceName}}FWResource) Configure(ctx context.Context, req resour r.providerConfig = p } +{{- if or (and (or $.HasProject $.HasRegion $.HasZone) (not $.ExcludeDefaultCdiff)) $.CustomDiff }} +func (r *{{$.ResourceName}}FWResource) ModifyPlan(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse) { +{{- if and ($.HasProject) (not $.ExcludeDefaultCdiff) }} + fwresource.DefaultProjectModify(ctx, req, resp, r.providerConfig.Project) +{{- end -}} +{{if and ($.HasRegion) (not $.ExcludeDefaultCdiff) }} + fwresource.DefaultRegionModify(ctx, req, resp, r.providerConfig.Region) +{{- end -}} +{{if and ($.HasZone) (not $.ExcludeDefaultCdiff) }} + fwresource.DefaultZoneModify(ctx, req, resp, r.providerConfig.Zone) +{{- end }} + if resp.Diagnostics.HasError() { + return + } +} +{{- end}} + func (d *{{$.ResourceName}}FWResource) Schema(ctx context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ MarkdownDescription: "A resource to represent a SQL User object.", @@ -263,7 +283,7 @@ func (r *{{$.ResourceName}}FWResource) Create(ctx context.Context, req resource. {{- if $.CustomCode.PreCreate }} {{ $.CustomTemplate $.CustomCode.PreCreate false -}} {{- end}} - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, err := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: r.providerConfig, Method: "{{ upper $.CreateVerb -}}", Project: billingProject.ValueString(), @@ -483,7 +503,7 @@ func (r *{{$.ResourceName}}FWResource) Update(ctx context.Context, req resource. {{- if $.CustomCode.PreUpdate }} {{ $.CustomTemplate $.CustomCode.PreUpdate false -}} {{- end}} - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, err := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: r.providerConfig, Method: "{{ upper $.UpdateVerb -}}", Project: billingProject.ValueString(), @@ -622,7 +642,7 @@ func (r *{{$.ResourceName}}FWResource) Delete(ctx context.Context, req resource. {{- end }} log.Printf("[DEBUG] Deleting {{ $.Name }} %q", data.Id.ValueString()) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + _, _ := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: r.providerConfig, Method: "{{ upper $.DeleteVerb -}}", Project: billingProject.ValueString(), @@ -727,7 +747,7 @@ func (r *{{$.ResourceName}}FWResource) {{$.ResourceName}}FWRefresh(ctx context.C {{- if $.CustomCode.PreRead }} {{ $.CustomTemplate $.CustomCode.PreRead false -}} {{- end}} - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, err := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: r.providerConfig, Method: "{{ upper $.ReadVerb -}}", Project: billingProject.ValueString(), diff --git a/mmv1/third_party/terraform/fwresource/field_helpers.go b/mmv1/third_party/terraform/fwresource/field_helpers.go index 40c170a85935..116dcf954cfe 100644 --- a/mmv1/third_party/terraform/fwresource/field_helpers.go +++ b/mmv1/third_party/terraform/fwresource/field_helpers.go @@ -37,7 +37,7 @@ func getProviderDefaultFromFrameworkSchema(schemaField string, rVal, pVal types. return pVal } - diags.AddError("required field is not set", fmt.Sprintf("%s is not set", schemaField)) + diags.AddError("required field is not set", fmt.Sprintf("%s must be set in at least one of Terraform resource configuration, Terraform provider configuration, or environment variables.", schemaField)) return types.String{} } diff --git a/mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers.go b/mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers.go new file mode 100644 index 000000000000..38f21619fb57 --- /dev/null +++ b/mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers.go @@ -0,0 +1,84 @@ +package fwresource + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func DefaultProjectModify(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, providerConfigProject string) { + var old types.String + diags := req.State.GetAttribute(ctx, path.Root("project"), &old) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var new types.String + diags = req.Plan.GetAttribute(ctx, path.Root("project"), &new) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if (old.IsUnknown() || old.IsNull()) && new.IsUnknown() { + project := GetProjectFramework(new, types.StringValue(providerConfigProject), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + resp.Plan.SetAttribute(ctx, path.Root("project"), project) + } + return +} + +func DefaultRegionModify(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, providerConfigRegion string) { + var old types.String + diags := req.State.GetAttribute(ctx, path.Root("region"), &old) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var new types.String + diags = req.Plan.GetAttribute(ctx, path.Root("region"), &new) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if (old.IsUnknown() || old.IsNull()) && new.IsUnknown() { + region := GetRegionFramework(new, types.StringValue(providerConfigRegion), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + resp.Plan.SetAttribute(ctx, path.Root("region"), region) + } + return +} + +func DefaultZoneModify(ctx context.Context, req resource.ModifyPlanRequest, resp *resource.ModifyPlanResponse, providerConfigZone string) { + var old types.String + diags := req.State.GetAttribute(ctx, path.Root("zone"), &old) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + var new types.String + diags = req.Plan.GetAttribute(ctx, path.Root("zone"), &new) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if (old.IsUnknown() || old.IsNull()) && new.IsUnknown() { + zone := GetZoneFramework(new, types.StringValue(providerConfigZone), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + resp.Plan.SetAttribute(ctx, path.Root("zone"), zone) + } + return +} diff --git a/mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers_test.go b/mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers_test.go new file mode 100644 index 000000000000..6dfb1e8994bb --- /dev/null +++ b/mmv1/third_party/terraform/fwresource/framework_common_plan_modifiers_test.go @@ -0,0 +1,232 @@ +package fwresource + +import ( + "context" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-go/tftypes" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestDefaultProjectModify(t *testing.T) { + testSchema := schema.Schema{ + Attributes: map[string]schema.Attribute{ + "project": schema.StringAttribute{ + Optional: true, + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + }, + } + + cases := map[string]struct { + resourceSchema schema.Schema + providerConfig *transport_tpg.Config + expectedAttribute types.String + expectError bool + errorContains string + req resource.ModifyPlanRequest + resp *resource.ModifyPlanResponse + }{ + "Prioritizes set config value": { + providerConfig: &transport_tpg.Config{ + Project: "default-provider-project", + }, + expectedAttribute: types.StringValue("plan-project"), + req: resource.ModifyPlanRequest{ + Config: tfsdk.Config{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, "plan-project"), + }), + Schema: testSchema, + }, + Plan: tfsdk.Plan{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, "plan-project"), + }), + Schema: testSchema, + }, + State: tfsdk.State{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, nil), + }), + Schema: testSchema, + }, + }, + + resp: &resource.ModifyPlanResponse{ + Plan: tfsdk.Plan{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, "plan-project"), + }), + Schema: testSchema, + }, + }, + }, + "Falls back on provider default": { + providerConfig: &transport_tpg.Config{ + Project: "default-provider-project", + }, + expectedAttribute: types.StringValue("default-provider-project"), + req: resource.ModifyPlanRequest{ + Config: tfsdk.Config{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, nil), + }), + Schema: testSchema, + }, + Plan: tfsdk.Plan{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, tftypes.UnknownValue), + }), + Schema: testSchema, + }, + State: tfsdk.State{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, nil), + }), + Schema: testSchema, + }, + }, + + resp: &resource.ModifyPlanResponse{ + Plan: tfsdk.Plan{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, tftypes.UnknownValue), + }), + Schema: testSchema, + }, + }, + }, + "Errors if there is no config value or provider default": { + providerConfig: &transport_tpg.Config{}, + req: resource.ModifyPlanRequest{ + Config: tfsdk.Config{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, nil), + }), + Schema: testSchema, + }, + Plan: tfsdk.Plan{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, tftypes.UnknownValue), + }), + Schema: testSchema, + }, + State: tfsdk.State{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, nil), + }), + Schema: testSchema, + }, + }, + + resp: &resource.ModifyPlanResponse{ + Plan: tfsdk.Plan{ + Raw: tftypes.NewValue(tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "project": tftypes.String, + }, + }, map[string]tftypes.Value{ + "project": tftypes.NewValue(tftypes.String, tftypes.UnknownValue), + }), + Schema: testSchema, + }, + }, + expectError: true, + errorContains: "must be set", + }, + } + + for name, tc := range cases { + t.Run(name, func(t *testing.T) { + ctx := context.Background() + DefaultProjectModify(ctx, tc.req, tc.resp, tc.providerConfig.Project) + if tc.resp.Diagnostics.HasError() { + if tc.expectError { + // Check if the error message contains the expected substring. + if tc.errorContains != "" { + found := false + for _, d := range tc.resp.Diagnostics.Errors() { + if strings.Contains(d.Detail(), tc.errorContains) { + found = true + break + } + } + if !found { + t.Fatalf("expected error to contain %q, but it did not. Got: %v", tc.errorContains, tc.resp.Diagnostics.Errors()) + } + } + // Correctly handled an expected error. + return + } + t.Fatalf("unexpected error: %v", tc.resp.Diagnostics) + } + + if tc.expectError { + t.Fatal("expected an error, but got none") + } + + var finalAttribute types.String + tc.resp.Plan.GetAttribute(ctx, path.Root("project"), &finalAttribute) + if !finalAttribute.Equal(tc.expectedAttribute) { + t.Fatalf("incorrect attributes parsed.\n- got: %v\n- want: %v", finalAttribute, tc.expectedAttribute) + } + }) + } +} diff --git a/mmv1/third_party/terraform/fwtransport/framework_utils.go b/mmv1/third_party/terraform/fwtransport/framework_utils.go index 238670b1b581..7aad1ec9e24d 100644 --- a/mmv1/third_party/terraform/fwtransport/framework_utils.go +++ b/mmv1/third_party/terraform/fwtransport/framework_utils.go @@ -75,7 +75,7 @@ type SendRequestOptions struct { ErrorAbortPredicates []transport_tpg.RetryErrorPredicateFunc } -func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) map[string]interface{} { +func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) (map[string]interface{}, error) { reqHeaders := opt.Headers if reqHeaders == nil { reqHeaders = make(http.Header) @@ -138,12 +138,12 @@ func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) map[string]int }) if err != nil { diags.AddError("Error when sending HTTP request: ", err.Error()) - return nil + return nil, err } if res == nil { diags.AddError("Unable to parse server response. This is most likely a terraform problem, please file a bug at https://github.com/hashicorp/terraform-provider-google/issues.", "") - return nil + return nil, fmt.Errorf("Unable to parse server response. This is most likely a terraform problem, please file a bug at https://github.com/hashicorp/terraform-provider-google/issues.") } // The defer call must be made outside of the retryFunc otherwise it's closed too soon. @@ -152,15 +152,15 @@ func SendRequest(opt SendRequestOptions, diags *diag.Diagnostics) map[string]int // 204 responses will have no body, so we're going to error with "EOF" if we // try to parse it. Instead, we can just return nil. if res.StatusCode == 204 { - return nil + return nil, nil } result := make(map[string]interface{}) if err := json.NewDecoder(res.Body).Decode(&result); err != nil { diags.AddError("Error when sending HTTP request: ", err.Error()) - return nil + return nil, err } - return result + return result, nil } type DefaultVars struct { diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go.mod index ab0546334c5d..ea513976e8a8 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go.mod @@ -21,6 +21,7 @@ require ( github.com/hashicorp/terraform-json v0.25.0 github.com/hashicorp/terraform-plugin-framework v1.15.0 github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 + github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0 github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 github.com/hashicorp/terraform-plugin-go v0.28.0 github.com/hashicorp/terraform-plugin-log v0.9.0 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index b4176222aeaf..dc5b5b7cc593 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -170,10 +170,10 @@ github.com/hashicorp/terraform-json v0.24.0 h1:rUiyF+x1kYawXeRth6fKFm/MdfBS6+lW4 github.com/hashicorp/terraform-json v0.24.0/go.mod h1:Nfj5ubo9xbu9uiAoZVBsNOjvNKB66Oyrvtit74kC7ow= github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGoxseG1hLhoQ= github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= -github.com/hashicorp/terraform-plugin-framework v1.13.0 h1:8OTG4+oZUfKgnfTdPTJwZ532Bh2BobF4H+yBiYJ/scw= -github.com/hashicorp/terraform-plugin-framework v1.13.0/go.mod h1:j64rwMGpgM3NYXTKuxrCnyubQb/4VKldEKlcG8cvmjU= github.com/hashicorp/terraform-plugin-framework v1.15.0 h1:LQ2rsOfmDLxcn5EeIwdXFtr03FVsNktbbBci8cOKdb4= github.com/hashicorp/terraform-plugin-framework v1.15.0/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= +github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0 h1:SJXL5FfJJm17554Kpt9jFXngdM6fXbnUnZ6iT2IeiYA= +github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0/go.mod h1:p0phD0IYhsu9bR4+6OetVvvH59I6LwjXGnTVEr8ox6E= github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0 h1:I/N0g/eLZ1ZkLZXUQ0oRSXa8YG/EF0CEuQP1wXdrzKw= github.com/hashicorp/terraform-plugin-framework-timeouts v0.5.0/go.mod h1:t339KhmxnaF4SzdpxmqW8HnQBHVGYazwtfxU0qCs4eE= github.com/hashicorp/terraform-plugin-framework-validators v0.9.0 h1:LYz4bXh3t7bTEydXOmPDPupRRnA480B/9+jV8yZvxBA= diff --git a/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go index 787f84425f65..661d6d7b331e 100644 --- a/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go +++ b/mmv1/third_party/terraform/services/apigee/fw_resource_apigee_keystores_aliases_key_cert_file.go @@ -342,7 +342,7 @@ func (r *ApigeeKeystoresAliasesKeyCertFileResource) Delete(ctx context.Context, tflog.Trace(ctx, "Deleting Apigee Keystore Alias", map[string]interface{}{"url": url}) - _ = fwtransport.SendRequest(fwtransport.SendRequestOptions{ + _, _ = fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: r.providerConfig, Method: "DELETE", Project: data.OrgId.ValueString(), @@ -373,7 +373,7 @@ func (r *ApigeeKeystoresAliasesKeyCertFileResource) refresh(ctx context.Context, tflog.Trace(ctx, "Refreshing Apigee Keystore Alias", map[string]interface{}{"url": url}) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, _ := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: r.providerConfig, Method: "GET", Project: data.OrgId.ValueString(), diff --git a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go index 685428eca2de..0502c2b42a60 100644 --- a/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go +++ b/mmv1/third_party/terraform/services/pubsublite/fw_resource_pubsub_lite_reservation.go @@ -147,7 +147,7 @@ func (d *GooglePubsubLiteReservationFWResource) Create(ctx context.Context, req tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Creating new Reservation: %#v", obj)) headers := make(http.Header) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, _ := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: d.providerConfig, Method: "POST", Project: billingProject.ValueString(), @@ -212,7 +212,7 @@ func (d *GooglePubsubLiteReservationFWResource) Read(ctx context.Context, req re } headers := make(http.Header) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, _ := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: d.providerConfig, Method: "GET", Project: billingProject.ValueString(), @@ -301,7 +301,7 @@ func (d *GooglePubsubLiteReservationFWResource) Update(ctx context.Context, req return } - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, _ := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: d.providerConfig, Method: "PATCH", Project: billingProject.ValueString(), @@ -366,7 +366,7 @@ func (d *GooglePubsubLiteReservationFWResource) Delete(ctx context.Context, req tflog.Trace(ctx, fmt.Sprintf("[DEBUG] Deleting Reservation: %#v", obj)) headers := make(http.Header) - res := fwtransport.SendRequest(fwtransport.SendRequestOptions{ + res, _ := fwtransport.SendRequest(fwtransport.SendRequestOptions{ Config: d.providerConfig, Method: "DELETE", Project: billingProject.ValueString(), From 3a1254edec054be88cfb2b34326f1eb5c6ebd67a Mon Sep 17 00:00:00 2001 From: Seitaro Fujigaki <51070449+seitarof@users.noreply.github.com> Date: Sat, 13 Sep 2025 03:22:23 +0900 Subject: [PATCH 186/201] Add nil check and fix type assertion to flattenCloudSchedulerJobHttpTargetHeaders in `google_cloud_scheduler_job` (#15128) --- .../custom_flatten/http_headers.tmpl | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/mmv1/templates/terraform/custom_flatten/http_headers.tmpl b/mmv1/templates/terraform/custom_flatten/http_headers.tmpl index 7ca9a567feb9..99ebacfad368 100644 --- a/mmv1/templates/terraform/custom_flatten/http_headers.tmpl +++ b/mmv1/templates/terraform/custom_flatten/http_headers.tmpl @@ -11,23 +11,33 @@ limitations under the License. */ -}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) (interface{}) { - var headers = v.(map[string]interface{}) + if v == nil { + return nil + } + headers, ok := v.(map[string]interface{}) + if !ok { + return nil + } if v, ok := headers["User-Agent"]; ok { - if v.(string) == "AppEngine-Google; (+http://code.google.com/appengine)" { - delete(headers, "User-Agent") - } else if v.(string) == "Google-Cloud-Scheduler" { - delete(headers, "User-Agent") - } else { - headers["User-Agent"] = strings.TrimSpace(strings.Replace(v.(string), "AppEngine-Google; (+http://code.google.com/appengine)","", -1)) + if userAgent, ok := v.(string); ok { + if userAgent == "AppEngine-Google; (+http://code.google.com/appengine)" { + delete(headers, "User-Agent") + } else if userAgent == "Google-Cloud-Scheduler" { + delete(headers, "User-Agent") + } else { + headers["User-Agent"] = strings.TrimSpace(strings.Replace(userAgent, "AppEngine-Google; (+http://code.google.com/appengine)","", -1)) + } } } if v, ok := headers["Content-Type"]; ok { - if v.(string) == "application/octet-stream" { - delete(headers, "Content-Type") + if contentType, ok := v.(string); ok { + if contentType == "application/octet-stream" { + delete(headers, "Content-Type") + } } } r := regexp.MustCompile(`(X-Google-|X-AppEngine-|Content-Length).*`) - for key := range headers { + for key := range headers { if r.MatchString(key) { delete(headers, key) } From 15470c49782155700b933a6b55b515af28dabb22 Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Fri, 12 Sep 2025 12:22:28 -0700 Subject: [PATCH 187/201] Made acceptance tests set empty string for compatibility with pre-7.X providers (#15134) --- .../builds/build_configuration_per_package.kt | 10 +++++----- .../components/builds/build_configuration_sweepers.kt | 2 +- .../builds/build_configuration_vcr_recording.kt | 2 +- .../.teamcity/components/builds/build_parameters.kt | 9 +++++++-- .../components/projects/reused/weekly_diff_tests.kt | 2 +- 5 files changed, 15 insertions(+), 10 deletions(-) diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt index 8346d819818a..1d5dbc5f9bd7 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt @@ -20,7 +20,7 @@ import replaceCharsId // BuildConfigurationsForPackages accepts a map containing details of multiple packages in a provider and returns a list of build configurations for them all. // Intended to be used in projects where we're testing all packages, e.g. the nightly test projects -fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): List { +fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: Boolean = false): List { val list = ArrayList() // Create build configurations for all packages, except sweeper @@ -38,16 +38,16 @@ fun BuildConfigurationsForPackages(packages: Map>, p // BuildConfigurationForSinglePackage accepts details of a single package in a provider and returns a build configuration for it // Intended to be used in short-lived projects where we're testing specific packages, e.g. feature branch testing -fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: String = "false"): BuildType{ +fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, testPrefix: String = "TestAcc", releaseDiffTest: Boolean = false): BuildType{ val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName, releaseDiffTest = releaseDiffTest) return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables, testPrefix = testPrefix, releaseDiffTest = releaseDiffTest) } -class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String, private val releaseDiffTest: String) { +class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String, private val releaseDiffTest: Boolean) { // buildConfiguration returns a BuildType for a service package // For BuildType docs, see https://teamcity.jetbrains.com/app/dsl-documentation/root/build-type/index.html - fun buildConfiguration(path: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, buildTimeout: Int = DefaultBuildTimeoutDuration, testPrefix: String, releaseDiffTest: String): BuildType { + fun buildConfiguration(path: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration, buildTimeout: Int = DefaultBuildTimeoutDuration, testPrefix: String, releaseDiffTest: Boolean): BuildType { val testPrefix = "TestAcc" val testTimeout = "12" @@ -72,7 +72,7 @@ class PackageDetails(private val packageName: String, private val displayName: S tagBuildToIndicateTriggerMethod() configureGoEnv() downloadTerraformBinary() - if (releaseDiffTest.toBoolean()) { + if (releaseDiffTest) { runDiffTests() } else { runAcceptanceTests() diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt index 0c93a9183c94..b58acef2b719 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_sweepers.kt @@ -61,7 +61,7 @@ class SweeperDetails(private val sweeperName: String, private val parentProjectN // These hardcoded values affect the sweeper CLI command's behaviour val testPrefix = "TestAcc" val testTimeout = "12" - val releaseDiffTest = "false" + val releaseDiffTest = false return BuildType { diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt index b1531bfefd28..a0543cbebf5f 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_vcr_recording.kt @@ -27,7 +27,7 @@ class VcrDetails(private val providerName: String, private val buildId: String, val testTimeout = "12" val parallelism = DefaultParallelism val buildTimeout: Int = DefaultBuildTimeoutDuration - val releaseDiffTest = "false" + val releaseDiffTest = false // Path is just ./google(-beta) here, whereas nightly test builds use paths like ./google/something/specific // This helps VCR testing builds to run tests across multiple packages diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt index 393493f525df..8902e900caa1 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt @@ -205,12 +205,17 @@ fun ParametrizedWithType.configureGoogleSpecificTestParameters(config: AccTestCo // ParametrizedWithType.acceptanceTestBuildParams sets build params that affect how commands to run // acceptance tests are templated -fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: String, timeout: String, releaseDiffTest: String) { +fun ParametrizedWithType.acceptanceTestBuildParams(parallelism: Int, prefix: String, timeout: String, releaseDiffTest: Boolean) { hiddenVariable("env.TF_ACC", "1", "Set to a value to run the Acceptance Tests") text("PARALLELISM", "%d".format(parallelism)) text("TEST_PREFIX", prefix) text("TIMEOUT", timeout) - text("env.RELEASE_DIFF", releaseDiffTest) + if (releaseDiffTest) { + text("env.RELEASE_DIFF", "true") + } else { + // Use an empty string for backwards-compatibility with pre-7.X release diff behavior. + text("env.RELEASE_DIFF", "") + } } // ParametrizedWithType.sweeperParameters sets build parameters that affect how sweepers are run diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt b/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt index ab3119eeb340..f2a049912243 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/reused/weekly_diff_tests.kt @@ -37,7 +37,7 @@ fun weeklyDiffTests(parentProject:String, providerName: String, vcsRoot: GitVcsR // Create build configs to run acceptance tests for each package defined in packages.kt and services.kt files // and add cron trigger to them all val allPackages = getAllPackageInProviderVersion(providerName) - val packageBuildConfigs = BuildConfigurationsForPackages(allPackages, providerName, projectId, vcsRoot, sharedResources, config, releaseDiffTest = "true") + val packageBuildConfigs = BuildConfigurationsForPackages(allPackages, providerName, projectId, vcsRoot, sharedResources, config, releaseDiffTest = true) packageBuildConfigs.forEach { buildConfiguration -> buildConfiguration.addTrigger(cron) } From fb180df88f0f32f57ece3087fbd6934d0414fc4d Mon Sep 17 00:00:00 2001 From: cherriford <71843124+cherriford@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:36:14 -0400 Subject: [PATCH 188/201] Update IAM template docs to cover WIF principals (#15131) Co-authored-by: Riley Karson --- mmv1/templates/terraform/resource_iam.html.markdown.tmpl | 1 + 1 file changed, 1 insertion(+) diff --git a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl index f4a61aa9c5a0..3667863cb129 100644 --- a/mmv1/templates/terraform/resource_iam.html.markdown.tmpl +++ b/mmv1/templates/terraform/resource_iam.html.markdown.tmpl @@ -230,6 +230,7 @@ The following arguments are supported: * **projectOwner:projectid**: Owners of the given project. For example, "projectOwner:my-example-project" * **projectEditor:projectid**: Editors of the given project. For example, "projectEditor:my-example-project" * **projectViewer:projectid**: Viewers of the given project. For example, "projectViewer:my-example-project" + * **Federated identities**: One or more federated identities in a workload or workforce identity pool, workload running on GKE, etc. Refer to the [Principal identifiers documentation](https://cloud.google.com/iam/docs/principal-identifiers#allow) for examples of targets and valid configuration. For example, "principal://iam.googleapis.com/locations/global/workforcePools/example-contractors/subject/joe@example.com" * `role` - (Required) The role that should be applied. Only one `{{ $.IamTerraformName }}_binding` can be used per role. Note that custom roles must be of the format From 4c6257388aa8f5aff7b619043d809a7e47c63ebc Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 12 Sep 2025 13:01:26 -0700 Subject: [PATCH 189/201] tgc-revival: add networksecurity resources (#15133) --- .../products/networksecurity/GatewaySecurityPolicyRule.yaml | 3 +++ mmv1/products/networksecurity/UrlLists.yaml | 1 + mmv1/templates/terraform/flatten_property_method.go.tmpl | 6 ++++++ 3 files changed, 10 insertions(+) diff --git a/mmv1/products/networksecurity/GatewaySecurityPolicyRule.yaml b/mmv1/products/networksecurity/GatewaySecurityPolicyRule.yaml index c30a183387b1..92eb381c081f 100644 --- a/mmv1/products/networksecurity/GatewaySecurityPolicyRule.yaml +++ b/mmv1/products/networksecurity/GatewaySecurityPolicyRule.yaml @@ -31,6 +31,7 @@ timeouts: insert_minutes: 30 update_minutes: 30 delete_minutes: 30 +include_in_tgc_next_DO_NOT_USE: true autogen_async: true async: actions: ['create', 'delete', 'update'] @@ -55,6 +56,8 @@ examples: vars: gateway_security_policy_id: 'my-gateway-security-policy' resource_name: 'my-gateway-security-policy-rule' + tgc_test_ignore_extra: + - 'tls_inspection_enabled' # Ignore its false value in Terraform configuration parameters: - name: 'name' type: String diff --git a/mmv1/products/networksecurity/UrlLists.yaml b/mmv1/products/networksecurity/UrlLists.yaml index 47c3c4a9adcf..f3503e173786 100644 --- a/mmv1/products/networksecurity/UrlLists.yaml +++ b/mmv1/products/networksecurity/UrlLists.yaml @@ -45,6 +45,7 @@ async: result: resource_inside_response: false custom_code: +include_in_tgc_next_DO_NOT_USE: true examples: - name: 'network_security_url_lists_basic' primary_resource_id: 'default' diff --git a/mmv1/templates/terraform/flatten_property_method.go.tmpl b/mmv1/templates/terraform/flatten_property_method.go.tmpl index e36259baebb5..814ea0f30627 100644 --- a/mmv1/templates/terraform/flatten_property_method.go.tmpl +++ b/mmv1/templates/terraform/flatten_property_method.go.tmpl @@ -130,6 +130,12 @@ func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.Reso return intVal } + {{- if and $.ResourceMetadata.IsTgcCompiler $.Required }} + if v == nil { + return 0 + } + {{- end }} + return v // let terraform core handle it otherwise {{- else if and ($.IsA "Array") ($.ItemType.IsA "ResourceRef")}} if v == nil { From 137e0b979c5003e1d69b3f575e64ef5bfd2bcfe1 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Fri, 12 Sep 2025 16:18:43 -0700 Subject: [PATCH 190/201] Use us-west2 for one google_cloud_scheduler_job test (#15135) --- .../cloudscheduler/resource_cloud_scheduler_job_test.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_test.go b/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_test.go index 72d52f264057..63b4778de33e 100644 --- a/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_test.go +++ b/mmv1/third_party/terraform/services/cloudscheduler/resource_cloud_scheduler_job_test.go @@ -86,6 +86,7 @@ resource "google_cloud_scheduler_job" "job" { schedule = "*/8 * * * *" time_zone = "America/New_York" attempt_deadline = "320s" + region = "us-west2" retry_config { retry_count = 1 @@ -109,6 +110,7 @@ resource "google_cloud_scheduler_job" "job" { schedule = "*/8 * * * *" time_zone = "America/New_York" attempt_deadline = "320s" + region = "us-west2" retry_config { retry_count = 1 From 211119cc09dfc4ac7a0e83baf674d5177e166fce Mon Sep 17 00:00:00 2001 From: Serhii Tatsiak Date: Mon, 15 Sep 2025 18:36:12 +0200 Subject: [PATCH 191/201] Container warnings (#15118) --- .../compute/resource_compute_instance.go.tmpl | 13 +++ ...resource_compute_instance_template.go.tmpl | 1 + ...rce_compute_instance_template_test.go.tmpl | 73 ++++++++++++++ .../resource_compute_instance_test.go.tmpl | 99 +++++++++++++++++++ ...e_compute_region_instance_template.go.tmpl | 1 + ...pute_region_instance_template_test.go.tmpl | 74 ++++++++++++++ 6 files changed, 261 insertions(+) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index 4f9692933dd9..d595931425db 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -206,6 +206,18 @@ func ComputeInstanceMinCpuPlatformEmptyOrAutomaticDiffSuppress(k, old, new strin return (old == "" && new == defaultVal) || (new == "" && old == defaultVal) } +func ValidateInstanceMetadata(i interface{}, k string) ([]string, []error) { + metadata, ok := i.(map[string]interface{}) + if !ok { + return nil, []error{fmt.Errorf("expected %q to be a map, got %T", k, i)} + } + var warnings []string + if _, ok := metadata["gce-container-declaration"]; ok { + warnings = append(warnings, "The option to deploy a container during VM creation using the container startup agent is deprecated. Use alternative services to run containers on your VMs. Learn more at https://cloud.google.com/compute/docs/containers/migrate-containers.") + } + return warnings, nil +} + func ResourceComputeInstance() *schema.Resource { return &schema.Resource{ Create: resourceComputeInstanceCreate, @@ -995,6 +1007,7 @@ func ResourceComputeInstance() *schema.Resource { Optional: true, Elem: &schema.Schema{Type: schema.TypeString}, Description: `Metadata key/value pairs made available within the instance.`, + ValidateFunc: ValidateInstanceMetadata, }, {{ if ne $.TargetVersionName `ga` -}} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index ba87f811432b..59e2daea62a3 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -439,6 +439,7 @@ Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key m Optional: true, ForceNew: true, Description: `Metadata key/value pairs to make available from within instances created from this template.`, + ValidateFunc: ValidateInstanceMetadata, }, "metadata_startup_script": { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl index 778e88ba42a3..5255c41b1ee0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl @@ -99,6 +99,35 @@ func TestAccComputeInstanceTemplate_imageShorthand(t *testing.T) { }) } +func TestAccComputeInstanceTemplate_metadataGceContainerDeclaration(t *testing.T) { + t.Parallel() + + var instanceTemplate compute.InstanceTemplate + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstanceTemplate_metadataGceContainerDeclaration(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceTemplateExists( + t, "google_compute_instance_template.foobar", &instanceTemplate), + testAccCheckComputeInstanceTemplateMetadata(&instanceTemplate, "foo", "bar"), + testAccCheckComputeInstanceTemplateMetadata(&instanceTemplate, "gce-container-declaration", "spec:\n containers:\n - name: test\n image: gcr.io/google-containers/busybox\n"), + ), + }, + { + ResourceName: "google_compute_instance_template.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels", "metadata.foo", "metadata.gce-container-declaration"}, + }, + }, + }) +} + func TestAccComputeInstanceTemplate_preemptible(t *testing.T) { t.Parallel() @@ -2759,6 +2788,50 @@ resource "google_compute_instance_template" "foobar" { `, context) } +func testAccComputeInstanceTemplate_metadataGceContainerDeclaration(suffix string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "foobar" { + name = "tf-test-instance-template-%s" + machine_type = "e2-medium" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + } + + network_interface { + network = "default" + } + + scheduling { + preemptible = false + automatic_restart = true + } + + metadata = { + foo = "bar" + gce-container-declaration = "spec:\n containers:\n - name: test\n image: gcr.io/google-containers/busybox\n" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } + + labels = { + my_label = "foobar" + } +} +`, suffix) +} + func testAccComputeInstanceTemplate_preemptible(suffix string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index c0f189b2ae75..d8dc45260475 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -145,6 +145,48 @@ func TestDisksForceAttachDiffSuppress(t *testing.T) { } } +func TestValidateInstanceMetadata(t *testing.T) { + cases := map[string]struct { + Metadata map[string]interface{} + ExpectWarning string + }{ + "with gce-container-declaration": { + Metadata: map[string]interface{}{ + "gce-container-declaration": "some-value", + }, + ExpectWarning: "The option to deploy a container during VM creation using the container startup agent is deprecated. Use alternative services to run containers on your VMs. Learn more at https://cloud.google.com/compute/docs/containers/migrate-containers.", + }, + "without gce-container-declaration": { + Metadata: map[string]interface{}{ + "foo": "bar", + }, + ExpectWarning: "", + }, + "with empty metadata": { + Metadata: map[string]interface{}{}, + ExpectWarning: "", + }, + } + + for tn, tc := range cases { + warnings, errs := tpgcompute.ValidateInstanceMetadata(tc.Metadata, "metadata") + if len(errs) > 0 { + t.Errorf("%s: Unexpected errors: %v", tn, errs) + } + if tc.ExpectWarning == "" { + if len(warnings) > 0 { + t.Errorf("%s: Expected no warning, got: %v", tn, warnings) + } + } else { + if len(warnings) == 0 { + t.Errorf("%s: Expected warning %q, got none", tn, tc.ExpectWarning) + } else if warnings[0] != tc.ExpectWarning { + t.Errorf("%s: Expected warning %q, got %q", tn, tc.ExpectWarning, warnings[0]) + } + } + } +} + func TestCheckForCommonAliasIp(t *testing.T) { type testCase struct { old, new []*compute.AliasIpRange @@ -350,6 +392,31 @@ func TestAccComputeInstance_basic5(t *testing.T) { }) } +func TestAccComputeInstance_metadataGceContainerDeclaration(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_metadataGceContainerDeclaration(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMetadata(&instance, "foo", "bar"), + testAccCheckComputeInstanceMetadata(&instance, "gce-container-declaration", "spec:\n containers:\n - name: test\n image: gcr.io/google-containers/busybox\n"), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{"metadata.foo", "metadata.gce-container-declaration", "desired_status"}), + }, + }) +} + func TestAccComputeInstance_resourceManagerTags(t *testing.T) { t.Parallel() @@ -6062,6 +6129,38 @@ resource "google_compute_instance" "foobar" { `, instance) } +func testAccComputeInstance_metadataGceContainerDeclaration(instance string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + tags = ["foo", "bar"] + desired_status = "RUNNING" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + metadata = { + foo = "bar" + gce-container-declaration = "spec:\n containers:\n - name: test\n image: gcr.io/google-containers/busybox\n" + } +} +`, instance) +} + func testAccComputeInstance_machineType(instance string, machineType string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl index a8b5bd424eb7..19de85a26ed2 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl @@ -411,6 +411,7 @@ Google Cloud KMS. Only one of kms_key_self_link, rsa_encrypted_key and raw_key m Optional: true, ForceNew: true, Description: `Metadata key/value pairs to make available from within instances created from this template.`, + ValidateFunc: ValidateInstanceMetadata, }, {{ if ne $.TargetVersionName `ga` -}} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl index eb7f8681125d..2dfe68ef2088 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl @@ -94,6 +94,35 @@ func TestAccComputeRegionInstanceTemplate_imageShorthand(t *testing.T) { }) } +func TestAccComputeRegionInstanceTemplate_metadataGceContainerDeclaration(t *testing.T) { + t.Parallel() + + var instanceTemplate compute.InstanceTemplate + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeRegionInstanceTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionInstanceTemplate_metadataGceContainerDeclaration(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeRegionInstanceTemplateExists( + t, "google_compute_region_instance_template.foobar", &instanceTemplate), + testAccCheckComputeRegionInstanceTemplateMetadata(&instanceTemplate, "foo", "bar"), + testAccCheckComputeRegionInstanceTemplateMetadata(&instanceTemplate, "gce-container-declaration", "spec:\n containers:\n - name: test\n image: gcr.io/google-containers/busybox\n"), + ), + }, + { + ResourceName: "google_compute_region_instance_template.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels", "metadata.foo", "metadata.gce-container-declaration"}, + }, + }, + }) +} + func TestAccComputeRegionInstanceTemplate_preemptible(t *testing.T) { t.Parallel() @@ -2299,6 +2328,51 @@ resource "google_compute_region_instance_template" "foobar" { `, context) } +func testAccComputeRegionInstanceTemplate_metadataGceContainerDeclaration(suffix string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_region_instance_template" "foobar" { + name = "tf-test-instance-template-%s" + region = "us-central1" + machine_type = "e2-medium" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + } + + network_interface { + network = "default" + } + + scheduling { + preemptible = false + automatic_restart = true + } + + metadata = { + foo = "bar" + gce-container-declaration = "spec:\n containers:\n - name: test\n image: gcr.io/google-containers/busybox\n" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } + + labels = { + my_label = "foobar" + } +} +`, suffix) +} + func testAccComputeRegionInstanceTemplate_preemptible(suffix string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { From ad70e38c215b03f7082f98235988119a8b39040d Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Mon, 15 Sep 2025 23:12:00 +0530 Subject: [PATCH 192/201] feat: (storage) added timeouts for bucket_iam resources (#15062) --- .../provider/provider_mmv1_resources.go.tmpl | 6 ++--- .../storage/iam_storage_bucket_test.go | 9 +++++++ .../terraform/tpgiamresource/iam.go.tmpl | 7 ++++++ .../tpgiamresource/resource_iam_binding.go | 10 +++++++- .../tpgiamresource/resource_iam_member.go | 12 ++++++++- .../tpgiamresource/resource_iam_policy.go | 13 +++++++--- .../docs/r/storage_bucket_iam.html.markdown | 25 +++++++++++++++++++ 7 files changed, 74 insertions(+), 8 deletions(-) diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index ea291b00f8de..d53a82fd1d36 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -507,9 +507,9 @@ var handwrittenIAMResources = map[string]*schema.Resource{ "google_storage_managed_folder_iam_binding": tpgiamresource.ResourceIamBinding(storage.StorageManagedFolderIamSchema, storage.StorageManagedFolderIamUpdaterProducer, storage.StorageManagedFolderIdParseFunc), "google_storage_managed_folder_iam_member": tpgiamresource.ResourceIamMember(storage.StorageManagedFolderIamSchema, storage.StorageManagedFolderIamUpdaterProducer, storage.StorageManagedFolderIdParseFunc), "google_storage_managed_folder_iam_policy": tpgiamresource.ResourceIamPolicy(storage.StorageManagedFolderIamSchema, storage.StorageManagedFolderIamUpdaterProducer, storage.StorageManagedFolderIdParseFunc), - "google_storage_bucket_iam_binding": tpgiamresource.ResourceIamBinding(storage.StorageBucketIamSchema, storage.StorageBucketIamUpdaterProducer, storage.StorageBucketIdParseFunc), - "google_storage_bucket_iam_member": tpgiamresource.ResourceIamMember(storage.StorageBucketIamSchema, storage.StorageBucketIamUpdaterProducer, storage.StorageBucketIdParseFunc), - "google_storage_bucket_iam_policy": tpgiamresource.ResourceIamPolicy(storage.StorageBucketIamSchema, storage.StorageBucketIamUpdaterProducer, storage.StorageBucketIdParseFunc), + "google_storage_bucket_iam_binding": tpgiamresource.ResourceIamBinding(storage.StorageBucketIamSchema, storage.StorageBucketIamUpdaterProducer, storage.StorageBucketIdParseFunc, tpgiamresource.IamCreateTimeOut(20)), + "google_storage_bucket_iam_member": tpgiamresource.ResourceIamMember(storage.StorageBucketIamSchema, storage.StorageBucketIamUpdaterProducer, storage.StorageBucketIdParseFunc, tpgiamresource.IamCreateTimeOut(20)), + "google_storage_bucket_iam_policy": tpgiamresource.ResourceIamPolicy(storage.StorageBucketIamSchema, storage.StorageBucketIamUpdaterProducer, storage.StorageBucketIdParseFunc, tpgiamresource.IamCreateTimeOut(20)), "google_organization_iam_binding": tpgiamresource.ResourceIamBinding(resourcemanager.IamOrganizationSchema, resourcemanager.NewOrganizationIamUpdater, resourcemanager.OrgIdParseFunc), "google_organization_iam_member": tpgiamresource.ResourceIamMember(resourcemanager.IamOrganizationSchema, resourcemanager.NewOrganizationIamUpdater, resourcemanager.OrgIdParseFunc), "google_organization_iam_policy": tpgiamresource.ResourceIamPolicy(resourcemanager.IamOrganizationSchema, resourcemanager.NewOrganizationIamUpdater, resourcemanager.OrgIdParseFunc), diff --git a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go index 9b72d3e2f99e..5036a047e6d9 100644 --- a/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go +++ b/mmv1/third_party/terraform/services/storage/iam_storage_bucket_test.go @@ -357,6 +357,9 @@ resource "google_storage_bucket_iam_member" "foo" { bucket = google_storage_bucket.default.name role = "%{role}" member = "user:admin@hashicorptest.com" + timeouts { + create = "5m" + } } `, context) } @@ -383,6 +386,9 @@ data "google_iam_policy" "foo" { resource "google_storage_bucket_iam_policy" "foo" { bucket = google_storage_bucket.default.name policy_data = data.google_iam_policy.foo.policy_data + timeouts { + create = "5m" + } } data "google_storage_bucket_iam_policy" "foo" { @@ -424,6 +430,9 @@ resource "google_storage_bucket_iam_binding" "foo" { bucket = google_storage_bucket.default.name role = "%{role}" members = ["user:admin@hashicorptest.com"] + timeouts { + create = "5m" + } } `, context) } diff --git a/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl b/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl index 6abe5a12745d..c42bcf27765a 100644 --- a/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl +++ b/mmv1/third_party/terraform/tpgiamresource/iam.go.tmpl @@ -469,6 +469,7 @@ type IamSettings struct { EnableBatching bool StateUpgraders []schema.StateUpgrader SchemaVersion int + CreateTimeOut int64 } func NewIamSettings(options ...func(*IamSettings)) *IamSettings { @@ -509,6 +510,12 @@ func IamWithSchemaVersion(version int) func(*IamSettings) { } } +func IamCreateTimeOut(createTimeout int64) func(s *IamSettings) { + return func(s *IamSettings) { + s.CreateTimeOut = createTimeout + } +} + // Util to deref and print auditConfigs func DebugPrintAuditConfigs(bs []*cloudresourcemanager.AuditConfig) string { v, _ := json.MarshalIndent(bs, "", "\t") diff --git a/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go b/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go index cb814c8fe902..f3e9f8380379 100644 --- a/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go +++ b/mmv1/third_party/terraform/tpgiamresource/resource_iam_binding.go @@ -5,6 +5,7 @@ import ( "fmt" "log" "strings" + "time" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -65,8 +66,9 @@ var iamBindingSchema = map[string]*schema.Schema{ func ResourceIamBinding(parentSpecificSchema map[string]*schema.Schema, newUpdaterFunc NewResourceIamUpdaterFunc, resourceIdParser ResourceIdParserFunc, options ...func(*IamSettings)) *schema.Resource { settings := NewIamSettings(options...) + createTimeOut := time.Duration(settings.CreateTimeOut) * time.Minute - return &schema.Resource{ + resource := &schema.Resource{ Create: resourceIamBindingCreateUpdate(newUpdaterFunc, settings.EnableBatching), Read: resourceIamBindingRead(newUpdaterFunc), Update: resourceIamBindingCreateUpdate(newUpdaterFunc, settings.EnableBatching), @@ -83,6 +85,12 @@ func ResourceIamBinding(parentSpecificSchema map[string]*schema.Schema, newUpdat }, UseJSONNumber: true, } + if createTimeOut > 0 { + resource.Timeouts = &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(createTimeOut), + } + } + return resource } func resourceIamBindingCreateUpdate(newUpdaterFunc NewResourceIamUpdaterFunc, enableBatching bool) func(*schema.ResourceData, interface{}) error { diff --git a/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go b/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go index 6a7b83d03ff2..a81d4309103f 100644 --- a/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go +++ b/mmv1/third_party/terraform/tpgiamresource/resource_iam_member.go @@ -6,6 +6,7 @@ import ( "log" "regexp" "strings" + "time" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -172,7 +173,9 @@ func iamMemberImport(newUpdaterFunc NewResourceIamUpdaterFunc, resourceIdParser func ResourceIamMember(parentSpecificSchema map[string]*schema.Schema, newUpdaterFunc NewResourceIamUpdaterFunc, resourceIdParser ResourceIdParserFunc, options ...func(*IamSettings)) *schema.Resource { settings := NewIamSettings(options...) - return &schema.Resource{ + createTimeOut := time.Duration(settings.CreateTimeOut) * time.Minute + + resourceSchema := &schema.Resource{ Create: resourceIamMemberCreate(newUpdaterFunc, settings.EnableBatching), Read: resourceIamMemberRead(newUpdaterFunc), Delete: resourceIamMemberDelete(newUpdaterFunc, settings.EnableBatching), @@ -189,6 +192,13 @@ func ResourceIamMember(parentSpecificSchema map[string]*schema.Schema, newUpdate }, UseJSONNumber: true, } + + if createTimeOut > 0 { + resourceSchema.Timeouts = &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(createTimeOut), + } + } + return resourceSchema } func getResourceIamMember(d *schema.ResourceData) *cloudresourcemanager.Binding { diff --git a/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go b/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go index ae02c2eb1373..94b728528e79 100644 --- a/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go +++ b/mmv1/third_party/terraform/tpgiamresource/resource_iam_policy.go @@ -1,12 +1,12 @@ package tpgiamresource import ( - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "encoding/json" "errors" "fmt" + "time" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-provider-google/google/tpgresource" transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" @@ -42,8 +42,9 @@ func iamPolicyImport(resourceIdParser ResourceIdParserFunc) schema.StateFunc { func ResourceIamPolicy(parentSpecificSchema map[string]*schema.Schema, newUpdaterFunc NewResourceIamUpdaterFunc, resourceIdParser ResourceIdParserFunc, options ...func(*IamSettings)) *schema.Resource { settings := NewIamSettings(options...) + createTimeOut := time.Duration(settings.CreateTimeOut) * time.Minute - return &schema.Resource{ + resourceSchema := &schema.Resource{ Create: ResourceIamPolicyCreate(newUpdaterFunc), Read: ResourceIamPolicyRead(newUpdaterFunc), Update: ResourceIamPolicyUpdate(newUpdaterFunc), @@ -61,6 +62,12 @@ func ResourceIamPolicy(parentSpecificSchema map[string]*schema.Schema, newUpdate }, UseJSONNumber: true, } + if createTimeOut > 0 { + resourceSchema.Timeouts = &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(createTimeOut), + } + } + return resourceSchema } func ResourceIamPolicyCreate(newUpdaterFunc NewResourceIamUpdaterFunc) schema.CreateFunc { diff --git a/mmv1/third_party/terraform/website/docs/r/storage_bucket_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/storage_bucket_iam.html.markdown index a86ae68df360..ac6cfe271161 100644 --- a/mmv1/third_party/terraform/website/docs/r/storage_bucket_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/storage_bucket_iam.html.markdown @@ -37,6 +37,9 @@ data "google_iam_policy" "admin" { resource "google_storage_bucket_iam_policy" "policy" { bucket = google_storage_bucket.default.name policy_data = data.google_iam_policy.admin.policy_data + timeouts { + create = "5m" + } } ``` @@ -61,6 +64,9 @@ data "google_iam_policy" "admin" { resource "google_storage_bucket_iam_policy" "policy" { bucket = google_storage_bucket.default.name policy_data = data.google_iam_policy.admin.policy_data + timeouts { + create = "5m" + } } ``` ## google_storage_bucket_iam_binding @@ -72,6 +78,9 @@ resource "google_storage_bucket_iam_binding" "binding" { members = [ "user:jane@example.com", ] + timeouts { + create = "5m" + } } ``` @@ -90,6 +99,9 @@ resource "google_storage_bucket_iam_binding" "binding" { description = "Expiring at midnight of 2019-12-31" expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" } + timeouts { + create = "5m" + } } ``` ## google_storage_bucket_iam_member @@ -99,6 +111,9 @@ resource "google_storage_bucket_iam_member" "member" { bucket = google_storage_bucket.default.name role = "roles/storage.admin" member = "user:jane@example.com" + timeouts { + create = "5m" + } } ``` @@ -115,6 +130,9 @@ resource "google_storage_bucket_iam_member" "member" { description = "Expiring at midnight of 2019-12-31" expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" } + timeouts { + create = "5m" + } } ``` @@ -166,6 +184,13 @@ exported: * `etag` - (Computed) The etag of the IAM policy. +## Timeouts + +This resource provides the following +[Timeouts](https://developer.hashicorp.com/terraform/plugin/sdkv2/resources/retries-and-customizable-timeouts) configuration options: + +- `create` - Default is 20 minutes. + ## Import For all import syntaxes, the "resource in question" can take any of the following forms: From 958af83dc34c3f8b48e0b43729211aa1803a3e57 Mon Sep 17 00:00:00 2001 From: Nandini Agrawal Date: Mon, 15 Sep 2025 23:48:26 +0530 Subject: [PATCH 193/201] Add Data Source Interconnect Location (#15065) --- .../provider/provider_mmv1_resources.go.tmpl | 2 + ...ce_google_compute_interconnect_location.go | 166 ++++++++++++++++++ ...ogle_compute_interconnect_location_test.go | 90 ++++++++++ ...e_google_compute_interconnect_locations.go | 131 ++++++++++++++ ...gle_compute_interconnect_locations_test.go | 29 +++ ...ompute_interconnect_location.html.markdown | 58 ++++++ ...mpute_interconnect_locations.html.markdown | 55 ++++++ 7 files changed, 531 insertions(+) create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go create mode 100644 mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go create mode 100644 mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown create mode 100644 mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl index d53a82fd1d36..29cd46da86ac 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.tmpl @@ -106,6 +106,8 @@ var handwrittenDatasources = map[string]*schema.Resource{ "google_compute_instance_serial_port": compute.DataSourceGoogleComputeInstanceSerialPort(), "google_compute_instance_template": compute.DataSourceGoogleComputeInstanceTemplate(), "google_compute_instance_guest_attributes": compute.DataSourceGoogleComputeInstanceGuestAttributes(), + "google_compute_interconnect_location": compute.DataSourceGoogleComputeInterconnectLocation(), + "google_compute_interconnect_locations": compute.DataSourceGoogleComputeInterconnectLocations(), "google_compute_lb_ip_ranges": compute.DataSourceGoogleComputeLbIpRanges(), "google_compute_machine_types": compute.DataSourceGoogleComputeMachineTypes(), "google_compute_network": compute.DataSourceGoogleComputeNetwork(), diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go new file mode 100644 index 000000000000..9d08bf1d4e33 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location.go @@ -0,0 +1,166 @@ +package compute + +import ( + "fmt" + "regexp" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var ( + computeInterconnectLocationIdTemplate = "projects/%s/global/interconnectlocations/%s" + computeInterconnectLocationLinkRegex = regexp.MustCompile(`projects/(.+)/global/interconnectlocations/(.+)$`) +) + +type ComputeInterconnectLocationId struct { + Project string + Name string +} + +func (s ComputeInterconnectLocationId) CanonicalId() string { + return fmt.Sprintf(computeInterconnectLocationIdTemplate, s.Project, s.Name) +} + +// ParseComputeInterconnectLocationId parses IDs of the form: +// - projects/{project}/global/interconnectlocations/{name} +// - {project}/{name} +// - {name} (requires config.Project) +func ParseComputeInterconnectLocationId(id string, config *transport_tpg.Config) (*ComputeInterconnectLocationId, error) { + var parts []string + if computeInterconnectLocationLinkRegex.MatchString(id) { + parts = computeInterconnectLocationLinkRegex.FindStringSubmatch(id) + return &ComputeInterconnectLocationId{ + Project: parts[1], + Name: parts[2], + }, nil + } else { + parts = strings.Split(id, "/") + } + if len(parts) == 2 { + return &ComputeInterconnectLocationId{ + Project: parts[0], + Name: parts[1], + }, nil + } else if len(parts) == 1 { + if config.Project == "" { + return nil, fmt.Errorf("The default project for the provider must be set when using the `{name}` id format.") + } + return &ComputeInterconnectLocationId{ + Project: config.Project, + Name: parts[0], + }, nil + } + return nil, fmt.Errorf("Invalid interconnect location id. Expecting resource link, `{project}/{name}` or `{name}` format.") +} +func DataSourceGoogleComputeInterconnectLocation() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleComputeInterconnectLocationRead, + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "self_link": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Computed: true, + }, + "peeringdb_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + "address": { + Type: schema.TypeString, + Computed: true, + }, + "facility_provider": { + Type: schema.TypeString, + Computed: true, + }, + "facility_provider_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + "continent": { + Type: schema.TypeString, + Computed: true, + }, + "city": { + Type: schema.TypeString, + Computed: true, + }, + "availability_zone": { + Type: schema.TypeString, + Computed: true, + }, + "status": { + Type: schema.TypeString, + Computed: true, + }, + }, + } +} +func dataSourceGoogleComputeInterconnectLocationRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + name := d.Get("name").(string) + id := fmt.Sprintf("projects/%s/global/interconnectlocations/%s", project, name) + location, err := config.NewComputeClient(userAgent).InterconnectLocations.Get(project, name).Do() + if err != nil { + return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("InterconnectLocation Not Found : %s", name), id) + } + d.SetId(location.Name) + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("self_link", location.SelfLink); err != nil { + return fmt.Errorf("Error setting self_link: %s", err) + } + if err := d.Set("description", location.Description); err != nil { + return fmt.Errorf("Error setting description: %s", err) + } + if err := d.Set("peeringdb_facility_id", location.PeeringdbFacilityId); err != nil { + return fmt.Errorf("Error setting peeringdb_facility_id: %s", err) + } + if err := d.Set("address", location.Address); err != nil { + return fmt.Errorf("Error setting address: %s", err) + } + if err := d.Set("facility_provider", location.FacilityProvider); err != nil { + return fmt.Errorf("Error setting facility_provider: %s", err) + } + if err := d.Set("facility_provider_facility_id", location.FacilityProviderFacilityId); err != nil { + return fmt.Errorf("Error setting facility_provider_facility_id: %s", err) + } + if err := d.Set("continent", location.Continent); err != nil { + return fmt.Errorf("Error setting continent: %s", err) + } + if err := d.Set("city", location.City); err != nil { + return fmt.Errorf("Error setting city: %s", err) + } + if err := d.Set("availability_zone", location.AvailabilityZone); err != nil { + return fmt.Errorf("Error setting availability_zone: %s", err) + } + if err := d.Set("status", location.Status); err != nil { + return fmt.Errorf("Error setting status: %s", err) + } + d.SetId(id) + return nil +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go new file mode 100644 index 000000000000..d78e8a82c2b7 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_location_test.go @@ -0,0 +1,90 @@ +package compute_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/compute" + "github.com/hashicorp/terraform-provider-google/google/transport" +) + +var interconnectLoc = "z2z-us-west8-zone2-ncphxk-z" + +func testAccDataSourceCheckInterconnectLocation() func(s *terraform.State) error { + return func(s *terraform.State) error { + data_source_name := "data.google_compute_interconnect_location.my_location" + ds, ok := s.RootModule().Resources[data_source_name] + if !ok { + return fmt.Errorf("root module has no resource called %s", data_source_name) + } + ds_attr := ds.Primary.Attributes + expected := map[string]string{ + "name": interconnectLoc, + "description": "Zakim-to-Zakim location", + "facility_provider": "Google", + } + for attr, expect_value := range expected { + if ds_attr[attr] != expect_value { + return fmt.Errorf("%s is %s; want %s", attr, ds_attr[attr], expect_value) + } + } + if ds_attr["self_link"] == "" { + return fmt.Errorf("self_link is not set") + } + return nil + } +} +func TestAccDataSourceGoogleComputeInterconnectLocation_basic(t *testing.T) { + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleComputeInterconnectLocationConfig(interconnectLoc), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceCheckInterconnectLocation(), + ), + }, + }, + }) +} +func testAccDataSourceGoogleComputeInterconnectLocationConfig(locationName string) string { + return fmt.Sprintf(` +data "google_compute_interconnect_location" "my_location" { + name = "%s" +} +`, locationName) +} +func TestParseComputeInterconnectLocationId(t *testing.T) { + config := &transport.Config{Project: "my-project"} + cases := []struct { + id string + wantProj string + wantName string + wantErr bool + }{ + {"projects/my-project/global/interconnectlocations/z2z-us-west8-zone2-ncphxk-z", "my-project", interconnectLoc, false}, + {"my-project/z2z-us-west8-zone2-ncphxk-z", "my-project", interconnectLoc, false}, + {interconnectLoc, "my-project", interconnectLoc, false}, + {"invalid/format/extra", "", "", true}, + } + for _, tc := range cases { + got, err := compute.ParseComputeInterconnectLocationId(tc.id, config) + if tc.wantErr { + if err == nil { + t.Errorf("ParseComputeInterconnectLocationId(%q) expected error, got nil", tc.id) + } + continue + } + if err != nil { + t.Errorf("ParseComputeInterconnectLocationId(%q) unexpected error: %v", tc.id, err) + continue + } + if got.Project != tc.wantProj || got.Name != tc.wantName { + t.Errorf("ParseComputeInterconnectLocationId(%q) = (%q, %q), want (%q, %q)", tc.id, got.Project, got.Name, tc.wantProj, tc.wantName) + } + } +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go new file mode 100644 index 000000000000..8356473c7db0 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations.go @@ -0,0 +1,131 @@ +package compute + +import ( + "fmt" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceGoogleComputeInterconnectLocations() *schema.Resource { + return &schema.Resource{ + Read: dataSourceGoogleComputeInterconnectLocationsRead, + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "locations": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Computed: true, + }, + "description": { + Type: schema.TypeString, + Computed: true, + }, + "self_link": { + Type: schema.TypeString, + Computed: true, + }, + "peeringdb_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + "address": { + Type: schema.TypeString, + Computed: true, + }, + "facility_provider": { + Type: schema.TypeString, + Computed: true, + }, + "facility_provider_facility_id": { + Type: schema.TypeString, + Computed: true, + }, + "status": { + Type: schema.TypeString, + Computed: true, + }, + "continent": { + Type: schema.TypeString, + Computed: true, + }, + "city": { + Type: schema.TypeString, + Computed: true, + }, + "availability_zone": { + Type: schema.TypeString, + Computed: true, + }, + "supports_pzs": { + Type: schema.TypeBool, + Computed: true, + }, + "available_features": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "available_link_types": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + }, + } +} +func dataSourceGoogleComputeInterconnectLocationsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + d.SetId(fmt.Sprintf("projects/%s", project)) + list, err := config.NewComputeClient(userAgent).InterconnectLocations.List(project).Do() + if err != nil { + return fmt.Errorf("Error retrieving list of interconnect locations: %s", err) + } + var locations []map[string]interface{} + for _, location := range list.Items { + locations = append(locations, map[string]interface{}{ + "name": location.Name, + "description": location.Description, + "self_link": location.SelfLink, + "peeringdb_facility_id": location.PeeringdbFacilityId, + "address": location.Address, + "facility_provider": location.FacilityProvider, + "facility_provider_facility_id": location.FacilityProviderFacilityId, + "status": location.Status, + "continent": location.Continent, + "city": location.City, + "availability_zone": location.AvailabilityZone, + "supports_pzs": location.SupportsPzs, + "available_features": location.AvailableFeatures, + "available_link_types": location.AvailableLinkTypes, + }) + } + if err := d.Set("locations", locations); err != nil { + return fmt.Errorf("Error setting locations: %s", err) + } + return nil +} diff --git a/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go new file mode 100644 index 000000000000..5833738b1a3f --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/data_source_google_compute_interconnect_locations_test.go @@ -0,0 +1,29 @@ +package compute_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceGoogleComputeInterconnectLocations_basic(t *testing.T) { + t.Parallel() + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleComputeInterconnectLocations_basic(), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_compute_interconnect_locations.all", "locations.0.self_link"), + ), + }, + }, + }) +} +func testAccDataSourceGoogleComputeInterconnectLocations_basic() string { + return ` +data "google_compute_interconnect_locations" "all" {} +` +} diff --git a/mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown new file mode 100644 index 000000000000..4d582258c5d1 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_location.html.markdown @@ -0,0 +1,58 @@ +--- +subcategory: "Compute Engine" +description: |- + Get the details of a single interconnect location. +--- + +# google_compute_interconnect_location +Get the details of a single interconnect location. For more information see +the official [API](https://cloud.google.com/compute/docs/reference/rest/v1/interconnectLocations/get) documentation. + +## Example Usage +```tf +data "google_compute_interconnect_location" "this" { + name = "iad-zone1-1" +} +resource "google_compute_interconnect" "this" { + project = data.google_compute_interconnect_location.this.project + location = data.google_compute_interconnect_location.this.self_link + name = "my-dedicated-connection-1" +} +``` + +## Argument Reference +The following arguments are supported: +* `name` - (Required) The name of the interconnect location. +--- +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are +exported: +* `self_link` - The URI of the created resource. + +* `description` - A textual description of the resource. + +* `peeringdb_facility_id` - The PeeringDB facility ID for this facility. + +* `address` - The postal address of the Point of Presence. + +* `facility_provider` - The name of the provider for this facility. + +* `facility_provider_facility_id` - A provider-assigned Identifier for this facility. + +* `status` - The status of this InterconnectLocation. + +* `continent` - The continent for this location. + +* `city` - The city for this location. + +* `availability_zone` - The availability zone for this InterconnectLocation. + +* `supports_pzs` - Reserved for future use. + +* `available_features` - A list of features available at this InterconnectLocation. + +* `available_link_types` - A list of link types available at this InterconnectLocation. diff --git a/mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown new file mode 100644 index 000000000000..a4742755a670 --- /dev/null +++ b/mmv1/third_party/terraform/website/docs/d/compute_interconnect_locations.html.markdown @@ -0,0 +1,55 @@ +--- +subcategory: "Compute Engine" +description: |- + Get a list of interconnect locations. +--- + +# google_compute_interconnect_locations +Get a list of interconnect locations. For more information see +the official [API](https://cloud.google.com/compute/docs/reference/rest/v1/interconnectLocations/list) documentation. + +## Example Usage +```tf +data "google_compute_interconnect_locations" "all" {} +output "interconnect_locations" { + value = data.google_compute_interconnect_locations.all.locations +} +``` + +## Argument Reference + +The following arguments are supported: +* `project` - (Optional) The project in which the resource belongs. If it + is not provided, the provider project is used. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are +exported: +* `locations` - A list of interconnect locations. Each location will have the following attributes: + + * `self_link` - The URI of the created resource. + + * `description` - A textual description of the resource. + + * `peeringdb_facility_id` - The PeeringDB facility ID for this facility. + + * `address` - The postal address of the Point of Presence. + + * `facility_provider` - The name of the provider for this facility. + + * `facility_provider_facility_id` - A provider-assigned Identifier for this facility. + + * `status` - The status of this InterconnectLocation. + + * `continent` - The continent for this location. + + * `city` - The city for this location. + + * `availability_zone` - The availability zone for this InterconnectLocation. + + * `supports_pzs` - Reserved for future use. + + * `available_features` - A list of features available at this InterconnectLocation. + + * `available_link_types` - A list of link types available at this InterconnectLocation. From d8b6962586883a749ea4739ccfc5a110e7621644 Mon Sep 17 00:00:00 2001 From: Zhenhua Li Date: Mon, 15 Sep 2025 11:42:08 -0700 Subject: [PATCH 194/201] tgc-revival: decide the cai identifier automatically (#15085) --- mmv1/api/resource.go | 61 +++++++++++++++++-- mmv1/products/cloudasset/FolderFeed.yaml | 1 - .../products/cloudasset/OrganizationFeed.yaml | 1 - mmv1/products/cloudasset/ProjectFeed.yaml | 1 - 4 files changed, 55 insertions(+), 9 deletions(-) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 4d88776422c8..6189ca449878 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -387,9 +387,8 @@ type TGCResource struct { TGCIgnoreTerraformEncoder bool `yaml:"tgc_ignore_terraform_encoder,omitempty"` // [Optional] The parameter that uniquely identifies the resource. - // Generally, it's safe to leave empty, in which case it defaults to `name`. - // Other values are normally useful in cases where an object has a parent - // and is identified by some non-name value, such as an ip+port pair. + // Generally, it shouldn't be set when the identity can be decided. + // Otherswise, it should be set. CaiIdentity string `yaml:"cai_identity,omitempty"` } @@ -1910,22 +1909,72 @@ func (r Resource) DefineAssetTypeForResourceInProduct() bool { // For example: //monitoring.googleapis.com/v3/projects/{{project}}/services/{{service_id}} func (r Resource) rawCaiAssetNameTemplate(productBackendName string) string { caiBaseUrl := "" - caiId := "name" + caiId := "" if r.CaiIdentity != "" { caiId = r.CaiIdentity + } else { + caiId = r.getCaiId() } + caiIdTemplate := fmt.Sprintf("{{%s}}", caiId) if r.CaiBaseUrl != "" { - caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.CaiBaseUrl, caiId) + if caiId == "" || strings.Contains(r.CaiBaseUrl, caiIdTemplate) { + caiBaseUrl = r.CaiBaseUrl + } else { + caiBaseUrl = fmt.Sprintf("%s/%s", r.CaiBaseUrl, caiIdTemplate) + } } if caiBaseUrl == "" { caiBaseUrl = r.SelfLink } if caiBaseUrl == "" { - caiBaseUrl = fmt.Sprintf("%s/{{%s}}", r.BaseUrl, caiId) + if caiId == "" || strings.Contains(r.BaseUrl, caiIdTemplate) { + caiBaseUrl = r.BaseUrl + } else { + caiBaseUrl = fmt.Sprintf("%s/%s", r.BaseUrl, caiIdTemplate) + } } return fmt.Sprintf("//%s.googleapis.com/%s", productBackendName, caiBaseUrl) } +// Guesses the identifier of the resource, as "name" is not always the identifier +// For example, the cai identifier is feed_id in google_cloud_asset_folder_feed +func (r Resource) getCaiId() string { + for _, p := range r.AllUserProperties() { + if p.Name == "name" && !p.Output { + return "name" + } + } + + // Get the last identifier extracted from selfLink + id := r.getCandidateCaiId(r.SelfLink) + if id != "" { + return id + } + + // Get the last identifier extracted from createUrl + id = r.getCandidateCaiId(r.CreateUrl) + if id != "" { + return id + } + + return "" +} + +// Extracts the last identifier from the url, if it is not computed, +// then it is the candidate identifier +func (r Resource) getCandidateCaiId(url string) string { + identifiers := r.ExtractIdentifiers(url) + if len(identifiers) > 0 { + id := identifiers[len(identifiers)-1] + for _, p := range r.AllUserProperties() { + if google.Underscore(p.Name) == id && !p.Output { + return id + } + } + } + return "" +} + // Gets the Cai asset name template, which doesn't include version // For example: //monitoring.googleapis.com/projects/{{project}}/services/{{service_id}} func (r Resource) CaiAssetNameTemplate(productBackendName string) string { diff --git a/mmv1/products/cloudasset/FolderFeed.yaml b/mmv1/products/cloudasset/FolderFeed.yaml index 36a8de774956..1291acf6e195 100644 --- a/mmv1/products/cloudasset/FolderFeed.yaml +++ b/mmv1/products/cloudasset/FolderFeed.yaml @@ -42,7 +42,6 @@ custom_code: supports_indirect_user_project_override: true include_in_tgc_next_DO_NOT_USE: true cai_base_url: 'folders/{{folder}}/feeds' -cai_identity: 'feed_id' tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_folder_feed' diff --git a/mmv1/products/cloudasset/OrganizationFeed.yaml b/mmv1/products/cloudasset/OrganizationFeed.yaml index 0c2082471491..ab36431c93d5 100644 --- a/mmv1/products/cloudasset/OrganizationFeed.yaml +++ b/mmv1/products/cloudasset/OrganizationFeed.yaml @@ -42,7 +42,6 @@ custom_code: supports_indirect_user_project_override: true include_in_tgc_next_DO_NOT_USE: true cai_base_url: 'organizations/{{org_id}}/feeds' -cai_identity: 'feed_id' tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_organization_feed' diff --git a/mmv1/products/cloudasset/ProjectFeed.yaml b/mmv1/products/cloudasset/ProjectFeed.yaml index 4132e44ff685..7b9cf2e33195 100644 --- a/mmv1/products/cloudasset/ProjectFeed.yaml +++ b/mmv1/products/cloudasset/ProjectFeed.yaml @@ -40,7 +40,6 @@ custom_code: custom_import: 'templates/terraform/custom_import/cloud_asset_feed.go.tmpl' include_in_tgc_next_DO_NOT_USE: true cai_base_url: 'projects/{{project}}/feeds' -cai_identity: 'feed_id' tgc_ignore_terraform_encoder: true examples: - name: 'cloud_asset_project_feed' From dd02040c77394204720e33d45a70c816977fd102 Mon Sep 17 00:00:00 2001 From: shivangd <30335782+Gorlami96@users.noreply.github.com> Date: Tue, 16 Sep 2025 01:31:52 +0530 Subject: [PATCH 195/201] Promote cipher fields to GA in `google_compute_vpn_tunnel` resource (#15138) --- mmv1/products/compute/VpnTunnel.yaml | 11 ----------- .../examples/vpn_tunnel_cipher_suite.tf.tmpl | 8 -------- 2 files changed, 19 deletions(-) diff --git a/mmv1/products/compute/VpnTunnel.yaml b/mmv1/products/compute/VpnTunnel.yaml index 6aa59f9b177a..d4913fa156e9 100644 --- a/mmv1/products/compute/VpnTunnel.yaml +++ b/mmv1/products/compute/VpnTunnel.yaml @@ -53,7 +53,6 @@ examples: udp4500_forwarding_rule_name: 'fr-udp4500' route_name: 'route1' - name: 'vpn_tunnel_cipher_suite' - min_version: 'beta' primary_resource_id: 'tunnel1' vars: vpn_tunnel_name: 'tunnel-cipher' @@ -233,66 +232,56 @@ properties: output: true - name: 'cipherSuite' type: NestedObject - min_version: 'beta' description: | User specified list of ciphers to use for the phase 1 and phase 2 of the IKE protocol. properties: - name: 'phase1' type: NestedObject description: 'Cipher configuration for phase 1 of the IKE protocol.' - min_version: 'beta' properties: - name: 'encryption' type: Array description: 'Encryption algorithms.' is_set: true - min_version: 'beta' item_type: type: String - name: 'integrity' type: Array description: 'Integrity algorithms.' is_set: true - min_version: 'beta' item_type: type: String - name: 'prf' type: Array description: 'Pseudo-random functions.' is_set: true - min_version: 'beta' item_type: type: String - name: 'dh' type: Array description: 'Diffie-Hellman groups.' is_set: true - min_version: 'beta' item_type: type: String - name: 'phase2' type: NestedObject description: 'Cipher configuration for phase 2 of the IKE protocol.' - min_version: 'beta' properties: - name: 'encryption' type: Array description: 'Encryption algorithms.' is_set: true - min_version: 'beta' item_type: type: String - name: 'integrity' type: Array description: 'Integrity algorithms.' is_set: true - min_version: 'beta' item_type: type: String - name: 'pfs' type: Array description: 'Perfect forward secrecy groups.' is_set: true - min_version: 'beta' item_type: type: String diff --git a/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl b/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl index 2e1052da5374..8fd4b8846795 100644 --- a/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl +++ b/mmv1/templates/terraform/examples/vpn_tunnel_cipher_suite.tf.tmpl @@ -1,5 +1,4 @@ resource "google_compute_vpn_tunnel" "tunnel1" { - provider = google-beta name = "{{index $.Vars "vpn_tunnel_name"}}" peer_ip = "15.0.0.120" shared_secret = "a secret message" @@ -32,23 +31,19 @@ resource "google_compute_vpn_tunnel" "tunnel1" { } resource "google_compute_vpn_gateway" "target_gateway" { - provider = google-beta name = "{{index $.Vars "target_vpn_gateway_name"}}" network = google_compute_network.network1.id } resource "google_compute_network" "network1" { - provider = google-beta name = "{{index $.Vars "network_name"}}" } resource "google_compute_address" "vpn_static_ip" { - provider = google-beta name = "{{index $.Vars "address_name"}}" } resource "google_compute_forwarding_rule" "fr_esp" { - provider = google-beta name = "{{index $.Vars "esp_forwarding_rule_name"}}" ip_protocol = "ESP" ip_address = google_compute_address.vpn_static_ip.address @@ -56,7 +51,6 @@ resource "google_compute_forwarding_rule" "fr_esp" { } resource "google_compute_forwarding_rule" "fr_udp500" { - provider = google-beta name = "{{index $.Vars "udp500_forwarding_rule_name"}}" ip_protocol = "UDP" port_range = "500" @@ -65,7 +59,6 @@ resource "google_compute_forwarding_rule" "fr_udp500" { } resource "google_compute_forwarding_rule" "fr_udp4500" { - provider = google-beta name = "{{index $.Vars "udp4500_forwarding_rule_name"}}" ip_protocol = "UDP" port_range = "4500" @@ -74,7 +67,6 @@ resource "google_compute_forwarding_rule" "fr_udp4500" { } resource "google_compute_route" "route1" { - provider = google-beta name = "{{index $.Vars "route_name"}}" network = google_compute_network.network1.name dest_range = "15.0.0.0/24" From 85e5a0485eb469483c18623206d78076fdb81ef0 Mon Sep 17 00:00:00 2001 From: Thomas Rodgers Date: Mon, 15 Sep 2025 16:55:54 -0700 Subject: [PATCH 196/201] Add vacation for trodge (#15146) --- .ci/magician/github/membership_data.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index 1dedcf7dcc19..ab649216f2cc 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -140,6 +140,10 @@ var ( startDate: newDate(2025, 8, 7), endDate: newDate(2025, 8, 10), }, + { + startDate: newDate(2025, 9, 18), + endDate: newDate(2025, 9, 28), + }, }, }, "zli82016": { From 1182f2f28415ff3f61bdaddcc135fd0fb50539ce Mon Sep 17 00:00:00 2001 From: Cezary Sobczak <57288981+Cezarus27@users.noreply.github.com> Date: Tue, 16 Sep 2025 16:53:23 +0200 Subject: [PATCH 197/201] Feature gap: Add skip_guest_os_shutdown for scheduling in instance (#15041) Signed-off-by: Cezary Sobczak --- .../compute/compute_instance_helpers.go.tmpl | 23 +++-- .../compute/resource_compute_instance.go.tmpl | 7 ++ ...resource_compute_instance_template.go.tmpl | 7 ++ ...rce_compute_instance_template_test.go.tmpl | 76 ++++++++++++++++ .../resource_compute_instance_test.go.tmpl | 89 +++++++++++++++++++ ...e_compute_region_instance_template.go.tmpl | 6 ++ ...pute_region_instance_template_test.go.tmpl | 63 +++++++++++++ .../docs/r/compute_instance.html.markdown | 2 + .../r/compute_instance_template.html.markdown | 2 + ...ute_region_instance_template.html.markdown | 2 + 10 files changed, 271 insertions(+), 6 deletions(-) diff --git a/mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.tmpl b/mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.tmpl index 026a810e8278..740e1c8c465d 100644 --- a/mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/compute_instance_helpers.go.tmpl @@ -190,6 +190,11 @@ func expandScheduling(v interface{}) (*compute.Scheduling, error) { scheduling.GracefulShutdown = transformedGracefulShutdown scheduling.ForceSendFields = append(scheduling.ForceSendFields, "GracefulShutdown") } + + if v, ok := original["skip_guest_os_shutdown"]; ok { + scheduling.SkipGuestOsShutdown = v.(bool) + scheduling.ForceSendFields = append(scheduling.ForceSendFields, "SkipGuestOsShutdown") + } {{- end }} if v, ok := original["local_ssd_recovery_timeout"]; ok { transformedLocalSsdRecoveryTimeout, err := expandComputeLocalSsdRecoveryTimeout(v) @@ -341,13 +346,13 @@ func expandGracefulShutdownMaxDuration(v interface{}) (*compute.Duration, error) func flattenScheduling(resp *compute.Scheduling) []map[string]interface{} { schedulingMap := map[string]interface{}{ - "on_host_maintenance": resp.OnHostMaintenance, - "preemptible": resp.Preemptible, - "min_node_cpus": resp.MinNodeCpus, - "provisioning_model": resp.ProvisioningModel, + "on_host_maintenance": resp.OnHostMaintenance, + "preemptible": resp.Preemptible, + "min_node_cpus": resp.MinNodeCpus, + "provisioning_model": resp.ProvisioningModel, "instance_termination_action": resp.InstanceTerminationAction, - "availability_domain": resp.AvailabilityDomain, - "termination_time": resp.TerminationTime, + "availability_domain": resp.AvailabilityDomain, + "termination_time": resp.TerminationTime, } if resp.AutomaticRestart != nil { @@ -363,6 +368,8 @@ func flattenScheduling(resp *compute.Scheduling) []map[string]interface{} { } {{ if ne $.TargetVersionName `ga` -}} + schedulingMap["skip_guest_os_shutdown"] = resp.SkipGuestOsShutdown + if resp.HostErrorTimeoutSeconds != 0 { schedulingMap["host_error_timeout_seconds"] = resp.HostErrorTimeoutSeconds } @@ -852,6 +859,10 @@ func schedulingHasChangeWithoutReboot(d *schema.ResourceData) bool { return true } + if oScheduling["skip_guest_os_shutdown"] != newScheduling["skip_guest_os_shutdown"] { + return true + } + if hasGracefulShutdownChanged(oScheduling, newScheduling) { return true } diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl index d595931425db..619dd04c1598 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.tmpl @@ -122,6 +122,7 @@ var ( "scheduling.0.maintenance_interval", "scheduling.0.host_error_timeout_seconds", "scheduling.0.graceful_shutdown", + "scheduling.0.skip_guest_os_shutdown", {{- end }} "scheduling.0.local_ssd_recovery_timeout", } @@ -1256,6 +1257,12 @@ be from 0 to 999,999,999 inclusive.`, }, }, }, + "skip_guest_os_shutdown": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `Default is false and there will be 120 seconds between GCE ACPI G2 Soft Off and ACPI G3 Mechanical Off for Standard VMs and 30 seconds for Spot VMs.`, + }, {{- end }} }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl index 59e2daea62a3..c66460b83266 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template.go.tmpl @@ -43,6 +43,7 @@ var ( "scheduling.0.maintenance_interval", "scheduling.0.host_error_timeout_seconds", "scheduling.0.graceful_shutdown", + "scheduling.0.skip_guest_os_shutdown", {{- end }} "scheduling.0.local_ssd_recovery_timeout", } @@ -940,6 +941,12 @@ be from 0 to 999,999,999 inclusive.`, }, }, }, + "skip_guest_os_shutdown": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `Default is false and there will be 120 seconds between GCE ACPI G2 Soft Off and ACPI G3 Mechanical Off for Standard VMs and 30 seconds for Spot VMs.`, + }, {{- end }} }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl index 5255c41b1ee0..932b155f1d7f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_template_test.go.tmpl @@ -2027,6 +2027,40 @@ func TestAccComputeInstanceTemplate_gracefulShutdown(t *testing.T) { }, }) } + +func TestAccComputeInstanceTemplate_schedulingSkipGuestOSShutdown(t *testing.T) { + t.Parallel() + + var instanceTemplate compute.InstanceTemplate + instanceName := fmt.Sprintf("tf-test-instance-%s", acctest.RandString(t, 10)) + + variant_1 := map[string]interface{}{ + "instance_name": instanceName, + "skip_guest_os_shutdown": true, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstanceTemplate_schedulingSkipGuestOSShutdown(variant_1), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceTemplateExists( + t, "google_compute_instance_template.foobar", &instanceTemplate), + resource.TestCheckResourceAttr("google_compute_instance_template.foobar", "scheduling.0.skip_guest_os_shutdown", "true"), + ), + }, + { + ResourceName: "google_compute_instance_template.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} {{- end }} func TestUnitComputeInstanceTemplate_IpCidrRangeDiffSuppress(t *testing.T) { @@ -5296,6 +5330,48 @@ resource "google_compute_instance_template" "foobar" { `, context) } +func testAccComputeInstanceTemplate_schedulingSkipGuestOSShutdown(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance_template" "foobar" { + name = "%{instance_name}" + machine_type = "e2-medium" + can_ip_forward = false + tags = ["foo", "bar"] + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + } + + network_interface { + network = "default" + } + + scheduling { + skip_guest_os_shutdown = %{skip_guest_os_shutdown} + } + + metadata = { + foo = "bar" + } + + service_account { + scopes = ["userinfo-email", "compute-ro", "storage-ro"] + } + + labels = { + my_label = "foobar" + } +} +`, context) +} + {{- end }} func testAccComputeInstanceTemplate_keyRevocationActionType(context map[string]interface{}) string { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl index d8dc45260475..bfcc7dbf27f0 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.tmpl @@ -4590,6 +4590,37 @@ func TestAccComputeInstance_GracefulShutdownWithoutResetUpdate(t *testing.T) { }) } +func TestAccComputeInstance_schedulingSkipGuestOSShutdown(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_schedulingSkipGuestOSShutdown(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), + { + Config: testAccComputeInstance_schedulingSkipGuestOSShutdownUpdated(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), + }, + }) +} + func testAccComputeInstance_nic_securityPolicyCreateWithTwoAccessConfigs(t *testing.T) { var instance compute.Instance var instanceName = fmt.Sprintf("tf-test-instance-%s", acctest.RandString(t, 10)) @@ -4924,6 +4955,64 @@ resource "google_compute_instance" "foobar" { `, context) } +func testAccComputeInstance_schedulingSkipGuestOSShutdown(instance string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + scheduling { + skip_guest_os_shutdown = true + } +} +`, instance) +} + +func testAccComputeInstance_schedulingSkipGuestOSShutdownUpdated(instance string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + scheduling { + skip_guest_os_shutdown = false + } +} +`, instance) +} + {{ end }} func testAccCheckComputeInstanceUpdateMachineType(t *testing.T, n string) resource.TestCheckFunc { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl index 19de85a26ed2..ab85f253b8c1 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template.go.tmpl @@ -895,6 +895,12 @@ be from 0 to 999,999,999 inclusive.`, }, }, }, + "skip_guest_os_shutdown": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `Default is false and there will be 120 seconds between GCE ACPI G2 Soft Off and ACPI G3 Mechanical Off for Standard VMs and 30 seconds for Spot VMs.`, + }, {{- end }} }, }, diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl index 2dfe68ef2088..06834314fc04 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_template_test.go.tmpl @@ -1670,6 +1670,40 @@ func TestAccComputeRegionInstanceTemplate_gracefulShutdown(t *testing.T) { }, }) } + +func TestAccComputeRegionInstanceTemplate_schedulingSkipGuestOSShutdown(t *testing.T) { + t.Parallel() + + var instanceTemplate compute.InstanceTemplate + instanceName := fmt.Sprintf("tf-test-instance-%s", acctest.RandString(t, 10)) + + variant_1 := map[string]interface{}{ + "instance_name": instanceName, + "skip_guest_os_shutdown": true, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceTemplateDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeRegionInstanceTemplate_schedulingSkipGuestOSShutdown(variant_1), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeRegionInstanceTemplateExists( + t, "google_compute_region_instance_template.foobar", &instanceTemplate), + resource.TestCheckResourceAttr("google_compute_region_instance_template.foobar", "scheduling.0.skip_guest_os_shutdown", "true"), + ), + }, + { + ResourceName: "google_compute_region_instance_template.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} {{- end }} func TestAccComputeRegionInstanceTemplate_GuestOsFeatures(t *testing.T) { @@ -4763,4 +4797,33 @@ resource "google_compute_region_instance_template" "foobar" { } `, context) } + +func testAccComputeRegionInstanceTemplate_schedulingSkipGuestOSShutdown(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_region_instance_template" "foobar" { + name = "%{instance_name}" + machine_type = "e2-medium" + region = "us-central1" + + disk { + source_image = data.google_compute_image.my_image.self_link + auto_delete = true + boot = true + } + + network_interface { + network = "default" + } + + scheduling { + skip_guest_os_shutdown = %{skip_guest_os_shutdown} + } +} +`, context) +} {{- end }} diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown index dca0f3569ded..d07f25158f2f 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance.html.markdown @@ -586,6 +586,8 @@ specified, then this instance will have no external IPv6 Internet access. Struct * `graceful_shutdown` - (Optional) [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) Settings for the instance to perform a graceful shutdown. Structure is [documented below](#nested_graceful_shutdown). +* `skip_guest_os_shutdown` - (Optional) [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) Boolean parameter. Default is false and there will be 120 seconds between GCE ACPI G2 Soft Off and ACPI G3 Mechanical Off for Standard VMs and 30 seconds for Spot VMs. + The `graceful_shutdown` block supports: * `enabled` - (Required) Opts-in for graceful shutdown. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown index f72b252840f8..8d5b7254ee07 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_template.html.markdown @@ -687,6 +687,8 @@ specified, then this instance will have no external IPv6 Internet access. Struct * `graceful_shutdown` - (Optional) [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) Settings for the instance to perform a graceful shutdown. Structure is [documented below](#nested_graceful_shutdown). +* `skip_guest_os_shutdown` - (Optional) [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) Boolean parameter. Default is false and there will be 120 seconds between GCE ACPI G2 Soft Off and ACPI G3 Mechanical Off for Standard VMs and 30 seconds for Spot VMs. + The `graceful_shutdown` block supports: * `enabled` - (Required) Opts-in for graceful shutdown. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown index afd836162e1a..ebfe146c183a 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_template.html.markdown @@ -669,6 +669,8 @@ specified, then this instance will have no external IPv6 Internet access. Struct * `graceful_shutdown` - (Optional) [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) Settings for the instance to perform a graceful shutdown. Structure is [documented below](#nested_graceful_shutdown). +* `skip_guest_os_shutdown` - (Optional) [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html) Boolean parameter. Default is false and there will be 120 seconds between GCE ACPI G2 Soft Off and ACPI G3 Mechanical Off for Standard VMs and 30 seconds for Spot VMs. + The `graceful_shutdown` block supports: * `enabled` - (Required) Opts-in for graceful shutdown. From 8059e0b433b96f9d1184c31a4bb4e19ad61ac0a1 Mon Sep 17 00:00:00 2001 From: Dawid212 Date: Tue, 16 Sep 2025 17:42:26 +0200 Subject: [PATCH 198/201] Add Feature Service attachment tunneling config (#14602) --- mmv1/products/compute/ServiceAttachment.yaml | 42 ++++++ ...ervice_attachment_tunneling_config.tf.tmpl | 82 +++++++++++ ..._service_attachment_tunneling_test.go.tmpl | 130 ++++++++++++++++++ 3 files changed, 254 insertions(+) create mode 100644 mmv1/templates/terraform/examples/service_attachment_tunneling_config.tf.tmpl create mode 100644 mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_tunneling_test.go.tmpl diff --git a/mmv1/products/compute/ServiceAttachment.yaml b/mmv1/products/compute/ServiceAttachment.yaml index f89c9250f4ee..0323bfb3ff72 100644 --- a/mmv1/products/compute/ServiceAttachment.yaml +++ b/mmv1/products/compute/ServiceAttachment.yaml @@ -95,6 +95,17 @@ examples: producer_forwarding_rule_name: 'producer-forwarding-rule' consumer_address_name: 'psc-ilb-consumer-address' consumer_forwarding_rule_name: 'psc-ilb-consumer-forwarding-rule' + - name: 'service_attachment_tunneling_config' + primary_resource_id: 'psc_ilb_service_attachment' + min_version: beta + vars: + service_attachment_name: 'my-psc-ilb' + network_name: 'psc-ilb-network' + nat_subnetwork_name: 'psc-ilb-nat' + producer_subnetwork_name: 'psc-ilb-producer-subnetwork' + producer_health_check_name: 'producer-service-health-check' + producer_service_name: 'producer-service' + producer_forwarding_rule_name: 'producer-forwarding-rule' - name: 'service_attachment_cross_region_ilb' primary_resource_id: 'psc_ilb_service_attachment' vars: @@ -133,6 +144,22 @@ properties: Fingerprint of this resource. This field is used internally during updates of this resource. output: true + - name: 'pscServiceAttachmentId' + type: NestedObject + description: | + An 128-bit global unique ID of the PSC service attachment. + output: true + properties: + - name: 'high' + type: String + description: | + The high 64 bits of the PSC service attachment ID. + output: true + - name: 'low' + type: String + description: | + The low 64 bits of the PSC service attachment ID. + output: true - name: 'connectionPreference' type: String description: | @@ -213,6 +240,21 @@ properties: immutable: true item_type: type: String + - name: 'tunnelingConfig' + type: NestedObject + description: | + Tunneling configuration for this service attachment. + min_version: beta + ignore_read: true + properties: + - name: 'routingMode' + type: String + description: | + The routing mode for tunneling traffic. + - name: 'encapsulationProfile' + type: String + description: | + The encapsulation profile for tunneling traffic. - name: 'consumerRejectLists' type: Array description: | diff --git a/mmv1/templates/terraform/examples/service_attachment_tunneling_config.tf.tmpl b/mmv1/templates/terraform/examples/service_attachment_tunneling_config.tf.tmpl new file mode 100644 index 000000000000..2b9ee778ee7f --- /dev/null +++ b/mmv1/templates/terraform/examples/service_attachment_tunneling_config.tf.tmpl @@ -0,0 +1,82 @@ +provider "google-beta" { +} + +resource "google_compute_service_attachment" "{{$.PrimaryResourceId}}" { + provider = google-beta + + name = "{{index $.Vars "service_attachment_name"}}" + region = "us-west2" + description = "A service attachment configured with tunneling" + + enable_proxy_protocol = false + connection_preference = "ACCEPT_AUTOMATIC" + nat_subnets = [google_compute_subnetwork.psc_ilb_nat.id] + target_service = google_compute_forwarding_rule.psc_ilb_target_service.id + + tunneling_config { + routing_mode = "REGIONAL" + encapsulation_profile = "IPV4" + } +} + +resource "google_compute_forwarding_rule" "psc_ilb_target_service" { + provider = google-beta + + name = "{{index $.Vars "producer_forwarding_rule_name"}}" + region = "us-west2" + + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.producer_service_backend.id + all_ports = true + network = google_compute_network.psc_ilb_network.name + subnetwork = google_compute_subnetwork.psc_ilb_producer_subnetwork.name +} + +resource "google_compute_region_backend_service" "producer_service_backend" { + provider = google-beta + + name = "{{index $.Vars "producer_service_name"}}" + region = "us-west2" + + health_checks = [google_compute_health_check.producer_service_health_check.id] +} + +resource "google_compute_health_check" "producer_service_health_check" { + provider = google-beta + + name = "{{index $.Vars "producer_health_check_name"}}" + + check_interval_sec = 1 + timeout_sec = 1 + tcp_health_check { + port = "80" + } +} + +resource "google_compute_network" "psc_ilb_network" { + provider = google-beta + + name = "{{index $.Vars "network_name"}}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "psc_ilb_producer_subnetwork" { + provider = google-beta + + name = "{{index $.Vars "producer_subnetwork_name"}}" + region = "us-west2" + + network = google_compute_network.psc_ilb_network.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_subnetwork" "psc_ilb_nat" { + provider = google-beta + + name = "{{index $.Vars "nat_subnetwork_name"}}" + region = "us-west2" + + network = google_compute_network.psc_ilb_network.id + purpose = "PRIVATE_SERVICE_CONNECT" + ip_cidr_range = "10.1.0.0/16" +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_tunneling_test.go.tmpl b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_tunneling_test.go.tmpl new file mode 100644 index 000000000000..fd8e148922b9 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/resource_compute_service_attachment_tunneling_test.go.tmpl @@ -0,0 +1,130 @@ +package compute_test +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccComputeServiceAttachment_tunnelingConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeServiceAttachmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeServiceAttachment_tunnelingConfig(context, "REGIONAL", "IPV4"), + }, + { + ResourceName: "google_compute_service_attachment.psc_ilb_service_attachment", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"target_service", "region", "tunneling_config"}, + }, + { + Config: testAccComputeServiceAttachment_tunnelingConfig(context, "GLOBAL", "IPV6"), + }, + { + ResourceName: "google_compute_service_attachment.psc_ilb_service_attachment", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"target_service", "region", "tunneling_config"}, + }, + }, + }) +} + +func testAccComputeServiceAttachment_tunnelingConfig(context map[string]interface{}, routingMode, encapsulationProfile string) string { + context["routing_mode"] = routingMode + context["encapsulation_profile"] = encapsulationProfile + + return acctest.Nprintf(` +resource "google_compute_service_attachment" "psc_ilb_service_attachment" { + provider = google-beta + + name = "tf-test-my-psc-ilb%{random_suffix}" + region = "us-west2" + description = "A service attachment configured with tunneling" + + enable_proxy_protocol = false + connection_preference = "ACCEPT_AUTOMATIC" + nat_subnets = [google_compute_subnetwork.psc_ilb_nat.id] + target_service = google_compute_forwarding_rule.psc_ilb_target_service.id + + tunneling_config { + routing_mode = "%{routing_mode}" + encapsulation_profile = "%{encapsulation_profile}" + } +} + +resource "google_compute_forwarding_rule" "psc_ilb_target_service" { + provider = google-beta + + name = "tf-test-producer-forwarding-rule%{random_suffix}" + region = "us-west2" + + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.producer_service_backend.id + all_ports = true + network = google_compute_network.psc_ilb_network.name + subnetwork = google_compute_subnetwork.psc_ilb_producer_subnetwork.name +} + +resource "google_compute_region_backend_service" "producer_service_backend" { + provider = google-beta + + name = "tf-test-producer-service%{random_suffix}" + region = "us-west2" + + health_checks = [google_compute_health_check.producer_service_health_check.id] +} + +resource "google_compute_health_check" "producer_service_health_check" { + provider = google-beta + + name = "tf-test-producer-service-health-check%{random_suffix}" + + check_interval_sec = 1 + timeout_sec = 1 + tcp_health_check { + port = "80" + } +} + +resource "google_compute_network" "psc_ilb_network" { + provider = google-beta + + name = "tf-test-psc-ilb-network%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "psc_ilb_producer_subnetwork" { + provider = google-beta + + name = "tf-test-psc-ilb-producer-subnetwork%{random_suffix}" + region = "us-west2" + + network = google_compute_network.psc_ilb_network.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_subnetwork" "psc_ilb_nat" { + provider = google-beta + + name = "tf-test-psc-ilb-nat%{random_suffix}" + region = "us-west2" + + network = google_compute_network.psc_ilb_network.id + purpose = "PRIVATE_SERVICE_CONNECT" + ip_cidr_range = "10.1.0.0/16" +} +`, context) +} +{{- end }} From c879c95b0a50355495e7461b15ea0d748cfd379c Mon Sep 17 00:00:00 2001 From: "Stephen Lewis (Burrows)" Date: Tue, 16 Sep 2025 10:41:39 -0700 Subject: [PATCH 199/201] Update membership_data.go (#15153) --- .ci/magician/github/membership_data.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.ci/magician/github/membership_data.go b/.ci/magician/github/membership_data.go index ab649216f2cc..e34942374a4e 100644 --- a/.ci/magician/github/membership_data.go +++ b/.ci/magician/github/membership_data.go @@ -83,8 +83,8 @@ var ( "melinath": { vacations: []Vacation{ { - startDate: newDate(2025, 6, 26), - endDate: newDate(2025, 7, 7), + startDate: newDate(2025, 9, 17), + endDate: newDate(2025, 9, 22), }, }, }, From 3e114d4d6cbee1d3502d52cc892d8bf616c01778 Mon Sep 17 00:00:00 2001 From: Cameron Thornton Date: Tue, 16 Sep 2025 16:59:17 -0500 Subject: [PATCH 200/201] PF: import function and timeouts (#15145) --- mmv1/templates/terraform/resource_fw.go.tmpl | 39 ++++++++++++++++---- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/mmv1/templates/terraform/resource_fw.go.tmpl b/mmv1/templates/terraform/resource_fw.go.tmpl index e76c10c326e4..b47bf8dd87af 100644 --- a/mmv1/templates/terraform/resource_fw.go.tmpl +++ b/mmv1/templates/terraform/resource_fw.go.tmpl @@ -189,6 +189,13 @@ func (d *{{$.ResourceName}}FWResource) Schema(ctx context.Context, _ resource.Sc stringplanmodifier.UseStateForUnknown(), }, }, + "timeouts": timeouts.Block(ctx, timeouts.Opts{ + Create: true, + {{- if or $.Updatable $.RootLabels }} + Update: true, + {{- end }} + Delete: true, + }), }, } } @@ -262,9 +269,7 @@ func (r *{{$.ResourceName}}FWResource) Create(ctx context.Context, req resource. obj["{{ $prop.ApiName -}}"] = {{ $prop.ApiName -}}Prop {{- end }} - - {{/* TODO default timeouts */}} - createTimeout, diags := data.Timeouts.Create(ctx, 20*time.Minute) + createTimeout, diags := data.Timeouts.Create(ctx, {{ $.Timeouts.InsertMinutes }}*time.Minute) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return @@ -480,8 +485,7 @@ func (r *{{$.ResourceName}}FWResource) Update(ctx context.Context, req resource. } {{- end }} - {{/* TODO default timeouts */}} - updateTimeout, diags := data.Timeouts.Update(ctx, 20*time.Minute) + updateTimeout, diags := data.Timeouts.Update(ctx, {{ $.Timeouts.UpdateMinutes }}*time.Minute) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return @@ -622,7 +626,7 @@ func (r *{{$.ResourceName}}FWResource) Delete(ctx context.Context, req resource. obj := make(map[string]interface{}) - deleteTimeout, diags := data.Timeouts.Delete(ctx, 20*time.Minute) + deleteTimeout, diags := data.Timeouts.Delete(ctx, {{ $.Timeouts.DeleteMinutes }}*time.Minute) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return @@ -697,7 +701,28 @@ func (r *{{$.ResourceName}}FWResource) Delete(ctx context.Context, req resource. } func (r *{{$.ResourceName}}FWResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("id"), req, resp) + patterns := []string{ + {{- range $id := $.ImportIdFormatsFromResource }} + "^{{ format2regex $id }}$", + {{- end }} + } + + var resourceSchemaResp resource.SchemaResponse + r.Schema(ctx, resource.SchemaRequest{}, &resourceSchemaResp) + if resourceSchemaResp.Diagnostics.HasError() { + resp.Diagnostics.Append(resourceSchemaResp.Diagnostics...) + return + } + + parsed, diags := fwresource.ParseImportId(ctx, req, resourceSchemaResp.Schema, r.providerConfig, patterns) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + for name, value := range parsed { + resp.Diagnostics.Append(resp.State.SetAttribute(ctx, path.Root(name), value)...) + } } func (r *{{$.ResourceName}}FWResource) {{$.ResourceName}}FWRefresh(ctx context.Context, data *{{$.ResourceName}}FWModel, state *tfsdk.State, req interface{}, diag *diag.Diagnostics) { From ac9adbe0917034e2437fcf6303492bbbf8d4c691 Mon Sep 17 00:00:00 2001 From: abhishek kumar tiwari Date: Wed, 17 Sep 2025 15:27:42 +0000 Subject: [PATCH 201/201] Add support for `google_cloud_run_v2_service` to TGC (#15132) --- .../tgc/resource_converters.go.tmpl | 1 + .../data/example_cloud_run_v2_service.json | 44 +++++++++++++ .../data/example_cloud_run_v2_service.tf | 62 +++++++++++++++++++ 3 files changed, 107 insertions(+) create mode 100644 mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.json create mode 100644 mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.tf diff --git a/mmv1/third_party/tgc/resource_converters.go.tmpl b/mmv1/third_party/tgc/resource_converters.go.tmpl index e3f240ae5137..1f6a876f1c51 100644 --- a/mmv1/third_party/tgc/resource_converters.go.tmpl +++ b/mmv1/third_party/tgc/resource_converters.go.tmpl @@ -146,6 +146,7 @@ func ResourceConverters() map[string][]cai.ResourceConverter { "google_access_context_manager_service_perimeter": {accesscontextmanager.ResourceConverterAccessContextManagerServicePerimeter()}, "google_access_context_manager_access_policy": {accesscontextmanager.ResourceConverterAccessContextManagerAccessPolicy()}, "google_cloud_run_service": {cloudrun.ResourceConverterCloudRunService()}, + "google_cloud_run_v2_service": {cloudrunv2.ResourceConverterCloudRunV2Service()}, "google_cloud_run_domain_mapping": {cloudrun.ResourceConverterCloudRunDomainMapping()}, "google_cloud_run_v2_job": {cloudrunv2.ResourceConverterCloudRunV2Job()}, "google_cloudfunctions_function": {cloudfunctions.ResourceConverterCloudFunctionsCloudFunction()}, diff --git a/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.json b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.json new file mode 100644 index 000000000000..f7c09a734e3c --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.json @@ -0,0 +1,44 @@ +[ + { + "name": "//run.googleapis.com/projects/{{.Provider.project}}/locations/us-central1/services/cloudrunv2-to-get-cai", + "asset_type": "run.googleapis.com/Service", + "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", + "resource": { + "version": "v2", + "discovery_document_uri": "https://www.googleapis.com/discovery/v1/apis/run/v2/rest", + "discovery_name": "Service", + "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", + "data": { + "annotations": { + "generated-by": "magic-modules" + }, + "labels": { + "goog-terraform-provisioned": "true" + }, + "template": { + "containers": [ + { + "args": [ + "arrgs" + ], + "image": "gcr.io/cloudrun/hello", + "ports": [ + { + "containerPort": 8080 + } + ] + } + ], + "maxInstanceRequestConcurrency": 10, + "timeout": "600s" + }, + "traffic": [ + { + "percent": 100, + "type": "TRAFFIC_TARGET_ALLOCATION_TYPE_LATEST" + } + ] + } + } + } +] \ No newline at end of file diff --git a/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.tf b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.tf new file mode 100644 index 000000000000..7a66fbc0b5d4 --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_cloud_run_v2_service.tf @@ -0,0 +1,62 @@ +/** + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +terraform { + required_providers { + google = { + source = "hashicorp/google-beta" + version = "~> {{.Provider.version}}" + } + } +} + +provider "google" { + {{if .Provider.credentials }}credentials = "{{.Provider.credentials}}"{{end}} +} + +resource "google_cloud_run_v2_service" "default" { + name = "cloudrunv2-to-get-cai" + location = "us-central1" + project = "{{.Provider.project}}" + + annotations = { + "generated-by" = "magic-modules" + } + + template { + max_instance_request_concurrency = 10 + timeout = "600s" + + containers { + image = "gcr.io/cloudrun/hello" + args = ["arrgs"] + ports { + container_port = 8080 + } + } + } + + traffic { + percent = 100 + type = "TRAFFIC_TARGET_ALLOCATION_TYPE_LATEST" + } + + lifecycle { + ignore_changes = [ + annotations, + ] + } +}