diff --git a/Makefile b/Makefile index e912db20..f5caf694 100644 --- a/Makefile +++ b/Makefile @@ -18,7 +18,7 @@ # Make will use bash instead of sh SHELL := /usr/bin/env bash -DOCKER_TAG_VERSION_DEVELOPER_TOOLS := 1.4 +DOCKER_TAG_VERSION_DEVELOPER_TOOLS := 1.10 DOCKER_IMAGE_DEVELOPER_TOOLS := cft/developer-tools REGISTRY_URL := gcr.io/cloud-foundation-cicd diff --git a/build/int.cloudbuild.yaml b/build/int.cloudbuild.yaml index 1a881854..be31f8f4 100644 --- a/build/int.cloudbuild.yaml +++ b/build/int.cloudbuild.yaml @@ -89,4 +89,4 @@ tags: - 'integration' substitutions: _DOCKER_IMAGE_DEVELOPER_TOOLS: 'cft/developer-tools' - _DOCKER_TAG_VERSION_DEVELOPER_TOOLS: '1.4' + _DOCKER_TAG_VERSION_DEVELOPER_TOOLS: '1.10' diff --git a/build/lint.cloudbuild.yaml b/build/lint.cloudbuild.yaml index 7437fd5a..d15fd36b 100644 --- a/build/lint.cloudbuild.yaml +++ b/build/lint.cloudbuild.yaml @@ -21,4 +21,4 @@ tags: - 'lint' substitutions: _DOCKER_IMAGE_DEVELOPER_TOOLS: 'cft/developer-tools' - _DOCKER_TAG_VERSION_DEVELOPER_TOOLS: '1.4' + _DOCKER_TAG_VERSION_DEVELOPER_TOOLS: '1.10' diff --git a/metadata.yaml b/metadata.yaml new file mode 100644 index 00000000..bbcd12c9 --- /dev/null +++ b/metadata.yaml @@ -0,0 +1,181 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: blueprints.cloud.google.com/v1alpha1 +kind: BlueprintMetadata +metadata: + name: terraform-google-log-export + annotations: + config.kubernetes.io/local-config: "true" +spec: + title: Terraform Log Export Module + source: + repo: https://github.com/terraform-google-modules/terraform-google-log-export.git + sourceType: git + version: 7.4.2 + actuationTool: + type: Terraform + version: '>= 0.13' + subBlueprints: + - name: bigquery + location: modules/bigquery + - name: bq-log-alerting + location: modules/bq-log-alerting + - name: logbucket + location: modules/logbucket + - name: pubsub + location: modules/pubsub + - name: storage + location: modules/storage + examples: + - name: billing_account + location: examples/bigquery/billing_account + - name: billing_account + location: examples/pubsub/billing_account + - name: billing_account + location: examples/storage/billing_account + - name: bq-log-alerting + location: examples/bq-log-alerting + - name: datadog-sink + location: examples/datadog-sink + - name: folder + location: examples/bigquery/folder + - name: folder + location: examples/logbucket/folder + - name: folder + location: examples/pubsub/folder + - name: folder + location: examples/storage/folder + - name: organization + location: examples/bigquery/organization + - name: organization + location: examples/logbucket/organization + - name: organization + location: examples/pubsub/organization + - name: organization + location: examples/storage/organization + - name: project + location: examples/bigquery/project + - name: project + location: examples/logbucket/project + - name: project + location: examples/pubsub/project + - name: project + location: examples/storage/project + - name: splunk-sink + location: examples/splunk-sink + variables: + - name: bigquery_options + description: (Optional) Options that affect sinks exporting data to BigQuery. use_partitioned_tables - (Required) Whether to use BigQuery's partition tables. + type: |- + object({ + use_partitioned_tables = bool + }) + required: false + - name: destination_uri + description: The self_link URI of the destination resource (This is available as an output coming from one of the destination submodules) + type: string + required: true + - name: exclusions + description: (Optional) A list of sink exclusion filters. + type: |- + list(object({ + name = string, + description = string, + filter = string, + disabled = bool + })) + default: [] + required: false + - name: filter + description: The filter to apply when exporting logs. Only log entries that match the filter are exported. Default is '' which exports all logs. + type: string + default: "" + required: false + - name: include_children + description: Only valid if 'organization' or 'folder' is chosen as var.parent_resource.type. Determines whether or not to include children organizations/folders in the sink export. If true, logs associated with child projects are also exported; otherwise only logs relating to the provided organization/folder are included. + type: bool + default: false + required: false + - name: log_sink_name + description: The name of the log sink to be created. + type: string + required: true + - name: parent_resource_id + description: The ID of the GCP resource in which you create the log sink. If var.parent_resource_type is set to 'project', then this is the Project ID (and etc). + type: string + required: true + - name: parent_resource_type + description: 'The GCP resource in which you create the log sink. The value must not be computed, and must be one of the following: ''project'', ''folder'', ''billing_account'', or ''organization''.' + type: string + default: project + required: false + - name: unique_writer_identity + description: Whether or not to create a unique identity associated with this sink. If false (the default), then the writer_identity used is serviceAccount:cloud-logs@system.gserviceaccount.com. If true, then a unique service account is created and used for the logging sink. + type: bool + default: false + required: false + outputs: + - name: filter + description: The filter to be applied when exporting logs. + - name: log_sink_resource_id + description: The resource ID of the log sink that was created. + - name: log_sink_resource_name + description: The resource name of the log sink that was created. + - name: parent_resource_id + description: The ID of the GCP resource in which you create the log sink. + - name: writer_identity + description: The service account that logging uses to write log entries to the destination. + roles: + - level: Project + roles: + - roles/resourcemanager.projectCreator + - roles/logging.configWriter + - level: Project + roles: + - roles/iam.serviceAccountAdmin + - roles/storage.admin + - roles/pubsub.admin + - roles/bigquery.dataOwner + - roles/serviceusage.serviceUsageAdmin + - roles/resourcemanager.projectIamAdmin + - roles/logging.configWriter + - roles/cloudfunctions.developer + - roles/iam.serviceAccountUser + - roles/cloudscheduler.admin + - roles/appengine.appCreator + - roles/appengine.appAdmin + - level: Project + roles: + - roles/billing.user + - level: Project + roles: + - roles/logging.configWriter + - roles/billing.projectManager + - roles/securitycenter.sourcesEditor + - roles/resourcemanager.organizationAdmin + services: + - cloudapis.googleapis.com + - cloudbuild.googleapis.com + - cloudfunctions.googleapis.com + - cloudscheduler.googleapis.com + - securitycenter.googleapis.com + - cloudresourcemanager.googleapis.com + - oslogin.googleapis.com + - compute.googleapis.com + - pubsub.googleapis.com + - storage-component.googleapis.com + - storage-api.googleapis.com + - iam.googleapis.com + - cloudbilling.googleapis.com diff --git a/modules/bigquery/metadata.yaml b/modules/bigquery/metadata.yaml new file mode 100644 index 00000000..5cdd80e0 --- /dev/null +++ b/modules/bigquery/metadata.yaml @@ -0,0 +1,162 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: blueprints.cloud.google.com/v1alpha1 +kind: BlueprintMetadata +metadata: + name: terraform-google-log-export + annotations: + config.kubernetes.io/local-config: "true" +spec: + title: 'Log Export: BigQuery destination submodule' + source: + repo: https://github.com/terraform-google-modules/terraform-google-log-export.git + sourceType: git + version: 7.4.2 + actuationTool: + type: Terraform + version: '>= 0.13' + examples: + - name: billing_account + location: examples/bigquery/billing_account + - name: billing_account + location: examples/pubsub/billing_account + - name: billing_account + location: examples/storage/billing_account + - name: bq-log-alerting + location: examples/bq-log-alerting + - name: datadog-sink + location: examples/datadog-sink + - name: folder + location: examples/bigquery/folder + - name: folder + location: examples/logbucket/folder + - name: folder + location: examples/pubsub/folder + - name: folder + location: examples/storage/folder + - name: organization + location: examples/bigquery/organization + - name: organization + location: examples/logbucket/organization + - name: organization + location: examples/pubsub/organization + - name: organization + location: examples/storage/organization + - name: project + location: examples/bigquery/project + - name: project + location: examples/logbucket/project + - name: project + location: examples/pubsub/project + - name: project + location: examples/storage/project + - name: splunk-sink + location: examples/splunk-sink + variables: + - name: dataset_name + description: The name of the bigquery dataset to be created and used for log entries matching the filter. + type: string + required: true + - name: delete_contents_on_destroy + description: (Optional) If set to true, delete all the tables in the dataset when destroying the resource; otherwise, destroying the resource will fail if tables are present. + type: bool + default: false + required: false + - name: description + description: A use-friendly description of the dataset + type: string + default: Log export dataset + required: false + - name: expiration_days + description: Table expiration time. If unset logs will never be deleted. + type: number + required: false + - name: kms_key_name + description: ID of a Cloud KMS key that will be used to encrypt destination BigQuery table. The BigQuery Service Account associated with your project requires access to this encryption key. + type: string + required: false + - name: labels + description: Dataset labels + type: map(string) + default: {} + required: false + - name: location + description: The location of the storage bucket. + type: string + default: US + required: false + - name: log_sink_writer_identity + description: The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). + type: string + required: true + - name: project_id + description: The ID of the project in which the bigquery dataset will be created. + type: string + required: true + outputs: + - name: console_link + description: The console link to the destination bigquery dataset + - name: destination_uri + description: The destination URI for the bigquery dataset. + - name: project + description: The project in which the bigquery dataset was created. + - name: resource_id + description: The resource id for the destination bigquery dataset + - name: resource_name + description: The resource name for the destination bigquery dataset + - name: self_link + description: The self_link URI for the destination bigquery dataset + roles: + - level: Project + roles: + - roles/iam.serviceAccountAdmin + - roles/storage.admin + - roles/pubsub.admin + - roles/bigquery.dataOwner + - roles/serviceusage.serviceUsageAdmin + - roles/resourcemanager.projectIamAdmin + - roles/logging.configWriter + - roles/cloudfunctions.developer + - roles/iam.serviceAccountUser + - roles/cloudscheduler.admin + - roles/appengine.appCreator + - roles/appengine.appAdmin + - level: Project + roles: + - roles/billing.user + - level: Project + roles: + - roles/logging.configWriter + - roles/billing.projectManager + - roles/securitycenter.sourcesEditor + - roles/resourcemanager.organizationAdmin + - level: Project + roles: + - roles/resourcemanager.projectCreator + - roles/logging.configWriter + services: + - cloudapis.googleapis.com + - cloudbuild.googleapis.com + - cloudfunctions.googleapis.com + - cloudscheduler.googleapis.com + - securitycenter.googleapis.com + - cloudresourcemanager.googleapis.com + - oslogin.googleapis.com + - compute.googleapis.com + - pubsub.googleapis.com + - storage-component.googleapis.com + - storage-api.googleapis.com + - iam.googleapis.com + - cloudbilling.googleapis.com diff --git a/modules/bq-log-alerting/metadata.yaml b/modules/bq-log-alerting/metadata.yaml new file mode 100644 index 00000000..f097deaa --- /dev/null +++ b/modules/bq-log-alerting/metadata.yaml @@ -0,0 +1,173 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: blueprints.cloud.google.com/v1alpha1 +kind: BlueprintMetadata +metadata: + name: terraform-google-log-export + annotations: + config.kubernetes.io/local-config: "true" +spec: + title: 'Log Export: BigQuery Log Alerting' + source: + repo: https://github.com/terraform-google-modules/terraform-google-log-export.git + sourceType: git + actuationTool: + type: Terraform + version: '>= 0.13' + examples: + - name: billing_account + location: examples/bigquery/billing_account + - name: billing_account + location: examples/pubsub/billing_account + - name: billing_account + location: examples/storage/billing_account + - name: bq-log-alerting + location: examples/bq-log-alerting + - name: datadog-sink + location: examples/datadog-sink + - name: folder + location: examples/bigquery/folder + - name: folder + location: examples/logbucket/folder + - name: folder + location: examples/pubsub/folder + - name: folder + location: examples/storage/folder + - name: organization + location: examples/bigquery/organization + - name: organization + location: examples/logbucket/organization + - name: organization + location: examples/pubsub/organization + - name: organization + location: examples/storage/organization + - name: project + location: examples/bigquery/project + - name: project + location: examples/logbucket/project + - name: project + location: examples/pubsub/project + - name: project + location: examples/storage/project + - name: splunk-sink + location: examples/splunk-sink + variables: + - name: bigquery_location + description: Location for BigQuery resources. See https://cloud.google.com/bigquery/docs/locations for valid values. + type: string + default: US + required: false + - name: dry_run + description: Enable dry_run execution of the Cloud Function. If is true it will just print the object the would be converted as a finding + type: bool + default: false + required: false + - name: function_memory + description: The amount of memory in megabytes allotted for the Cloud function to use. + type: number + default: "256" + required: false + - name: function_region + description: Region for the Cloud function resources. See https://cloud.google.com/functions/docs/locations for valid values. + type: string + required: true + - name: function_timeout + description: The amount of time in seconds allotted for the execution of the function. + type: number + default: "540" + required: false + - name: job_schedule + description: The schedule on which the job will be executed in the unix-cron string format (https://cloud.google.com/scheduler/docs/configuring/cron-job-schedules#defining_the_job_schedule). Defaults to 15 minutes. + type: string + default: '*/15 * * * *' + required: false + - name: logging_project + description: The project to deploy the tool. + type: string + required: true + - name: org_id + description: The organization ID for the associated services + type: string + required: true + - name: source_name + description: The Security Command Center Source name for the "BQ Log Alerts" Source if the source had been created before. The format is `organizations//sources/` + type: string + default: "" + required: false + - name: time_window_quantity + description: The time window quantity used in the query in the view in BigQuery. + type: string + default: "20" + required: false + - name: time_window_unit + description: The time window unit used in the query in the view in BigQuery. Valid values are 'MICROSECOND', 'MILLISECOND', 'SECOND', 'MINUTE', 'HOUR' + type: string + default: MINUTE + required: false + outputs: + - name: bq_views_dataset_id + description: The ID of the BigQuery Views dataset + - name: cloud_function_service_account_email + description: The email of the service account created to be used by the Cloud Function + - name: cloud_scheduler_job + description: The Cloud Scheduler job instance + - name: cloud_scheduler_job_name + description: The name of the Cloud Scheduler job created + - name: pubsub_topic_name + description: Pub/Sub topic name + - name: source_name + description: The Security Command Center Source name for the "BQ Log Alerts" Source + roles: + - level: Project + roles: + - roles/resourcemanager.projectCreator + - roles/logging.configWriter + - level: Project + roles: + - roles/iam.serviceAccountAdmin + - roles/storage.admin + - roles/pubsub.admin + - roles/bigquery.dataOwner + - roles/serviceusage.serviceUsageAdmin + - roles/resourcemanager.projectIamAdmin + - roles/logging.configWriter + - roles/cloudfunctions.developer + - roles/iam.serviceAccountUser + - roles/cloudscheduler.admin + - roles/appengine.appCreator + - roles/appengine.appAdmin + - level: Project + roles: + - roles/billing.user + - level: Project + roles: + - roles/logging.configWriter + - roles/billing.projectManager + - roles/securitycenter.sourcesEditor + - roles/resourcemanager.organizationAdmin + services: + - cloudapis.googleapis.com + - cloudbuild.googleapis.com + - cloudfunctions.googleapis.com + - cloudscheduler.googleapis.com + - securitycenter.googleapis.com + - cloudresourcemanager.googleapis.com + - oslogin.googleapis.com + - compute.googleapis.com + - pubsub.googleapis.com + - storage-component.googleapis.com + - storage-api.googleapis.com + - iam.googleapis.com + - cloudbilling.googleapis.com diff --git a/modules/bq-log-alerting/versions.tf b/modules/bq-log-alerting/versions.tf index 00dd9b12..fa49d133 100644 --- a/modules/bq-log-alerting/versions.tf +++ b/modules/bq-log-alerting/versions.tf @@ -22,5 +22,9 @@ terraform { source = "hashicorp/google" version = ">= 3.53, < 5.0" } + random = { + source = "hashicorp/random" + version = "~> 3.2" + } } } diff --git a/modules/logbucket/metadata.yaml b/modules/logbucket/metadata.yaml new file mode 100644 index 00000000..f1c965a8 --- /dev/null +++ b/modules/logbucket/metadata.yaml @@ -0,0 +1,145 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: blueprints.cloud.google.com/v1alpha1 +kind: BlueprintMetadata +metadata: + name: terraform-google-log-export + annotations: + config.kubernetes.io/local-config: "true" +spec: + title: 'Log Export: Log Bucket destination submodule' + source: + repo: https://github.com/terraform-google-modules/terraform-google-log-export.git + sourceType: git + version: 7.4.2 + actuationTool: + type: Terraform + version: '>= 0.13' + examples: + - name: billing_account + location: examples/bigquery/billing_account + - name: billing_account + location: examples/pubsub/billing_account + - name: billing_account + location: examples/storage/billing_account + - name: bq-log-alerting + location: examples/bq-log-alerting + - name: datadog-sink + location: examples/datadog-sink + - name: folder + location: examples/bigquery/folder + - name: folder + location: examples/logbucket/folder + - name: folder + location: examples/pubsub/folder + - name: folder + location: examples/storage/folder + - name: organization + location: examples/bigquery/organization + - name: organization + location: examples/logbucket/organization + - name: organization + location: examples/pubsub/organization + - name: organization + location: examples/storage/organization + - name: project + location: examples/bigquery/project + - name: project + location: examples/logbucket/project + - name: project + location: examples/pubsub/project + - name: project + location: examples/storage/project + - name: splunk-sink + location: examples/splunk-sink + variables: + - name: grant_write_permission_on_bkt + description: (Optional) Indicates whether the module is responsible for granting write permission on the logbucket. This permission will be given by default, but if the user wants, this module can skip this step. This is the case when the sink route logs to a log bucket in the same Cloud project, no new service account will be created and this module will need to bypass granting permissions. + type: bool + default: true + required: false + - name: location + description: The location of the log bucket. + type: string + default: global + required: false + - name: log_sink_writer_identity + description: The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). + type: string + required: true + - name: name + description: The name of the log bucket to be created and used for log entries matching the filter. + type: string + required: true + - name: project_id + description: The ID of the project in which the log bucket will be created. + type: string + required: true + - name: retention_days + description: The number of days data should be retained for the log bucket. + type: number + default: 30 + required: false + outputs: + - name: console_link + description: The console link to the destination log buckets + - name: destination_uri + description: The destination URI for the log bucket. + - name: project + description: The project in which the log bucket was created. + - name: resource_name + description: The resource name for the destination log bucket + roles: + - level: Project + roles: + - roles/logging.configWriter + - roles/billing.projectManager + - roles/securitycenter.sourcesEditor + - roles/resourcemanager.organizationAdmin + - level: Project + roles: + - roles/resourcemanager.projectCreator + - roles/logging.configWriter + - level: Project + roles: + - roles/iam.serviceAccountAdmin + - roles/storage.admin + - roles/pubsub.admin + - roles/bigquery.dataOwner + - roles/serviceusage.serviceUsageAdmin + - roles/resourcemanager.projectIamAdmin + - roles/logging.configWriter + - roles/cloudfunctions.developer + - roles/iam.serviceAccountUser + - roles/cloudscheduler.admin + - roles/appengine.appCreator + - roles/appengine.appAdmin + - level: Project + roles: + - roles/billing.user + services: + - cloudapis.googleapis.com + - cloudbuild.googleapis.com + - cloudfunctions.googleapis.com + - cloudscheduler.googleapis.com + - securitycenter.googleapis.com + - cloudresourcemanager.googleapis.com + - oslogin.googleapis.com + - compute.googleapis.com + - pubsub.googleapis.com + - storage-component.googleapis.com + - storage-api.googleapis.com + - iam.googleapis.com + - cloudbilling.googleapis.com diff --git a/modules/pubsub/metadata.yaml b/modules/pubsub/metadata.yaml new file mode 100644 index 00000000..952220ac --- /dev/null +++ b/modules/pubsub/metadata.yaml @@ -0,0 +1,172 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: blueprints.cloud.google.com/v1alpha1 +kind: BlueprintMetadata +metadata: + name: terraform-google-log-export + annotations: + config.kubernetes.io/local-config: "true" +spec: + title: 'Log Export: PubSub destination submodule' + source: + repo: https://github.com/terraform-google-modules/terraform-google-log-export.git + sourceType: git + version: 7.4.2 + actuationTool: + type: Terraform + version: '>= 0.13' + examples: + - name: billing_account + location: examples/bigquery/billing_account + - name: billing_account + location: examples/pubsub/billing_account + - name: billing_account + location: examples/storage/billing_account + - name: bq-log-alerting + location: examples/bq-log-alerting + - name: datadog-sink + location: examples/datadog-sink + - name: folder + location: examples/bigquery/folder + - name: folder + location: examples/logbucket/folder + - name: folder + location: examples/pubsub/folder + - name: folder + location: examples/storage/folder + - name: organization + location: examples/bigquery/organization + - name: organization + location: examples/logbucket/organization + - name: organization + location: examples/pubsub/organization + - name: organization + location: examples/storage/organization + - name: project + location: examples/bigquery/project + - name: project + location: examples/logbucket/project + - name: project + location: examples/pubsub/project + - name: project + location: examples/storage/project + - name: splunk-sink + location: examples/splunk-sink + variables: + - name: create_push_subscriber + description: Whether to add a push configuration to the subcription. If 'true', a push subscription is created along with a service account that is granted roles/pubsub.subscriber and roles/pubsub.viewer to the topic. + type: bool + default: false + required: false + - name: create_subscriber + description: Whether to create a subscription to the topic that was created and used for log entries matching the filter. If 'true', a pull subscription is created along with a service account that is granted roles/pubsub.subscriber and roles/pubsub.viewer to the topic. + type: bool + default: false + required: false + - name: kms_key_name + description: ID of a Cloud KMS CryptoKey to be used to protect access to messages published on this topic. Your project's PubSub service account requires access to this encryption key. + type: string + required: false + - name: log_sink_writer_identity + description: The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). + type: string + required: true + - name: project_id + description: The ID of the project in which the pubsub topic will be created. + type: string + required: true + - name: push_endpoint + description: The URL locating the endpoint to which messages should be pushed. + type: string + default: "" + required: false + - name: subscriber_id + description: The ID to give the pubsub pull subscriber service account (optional). + type: string + default: "" + required: false + - name: subscription_labels + description: A set of key/value label pairs to assign to the pubsub subscription. + type: map(string) + default: {} + required: false + - name: topic_labels + description: A set of key/value label pairs to assign to the pubsub topic. + type: map(string) + default: {} + required: false + - name: topic_name + description: The name of the pubsub topic to be created and used for log entries matching the filter. + type: string + required: true + outputs: + - name: console_link + description: The console link to the destination storage bucket + - name: destination_uri + description: The destination URI for the topic. + - name: project + description: The project in which the topic was created. + - name: pubsub_push_subscription + description: Pub/Sub push subscription id (if any) + - name: pubsub_subscriber + description: Pub/Sub subscriber email (if any) + - name: pubsub_subscription + description: Pub/Sub subscription id (if any) + - name: resource_id + description: The resource id for the destination topic + - name: resource_name + description: The resource name for the destination topic + roles: + - level: Project + roles: + - roles/resourcemanager.projectCreator + - roles/logging.configWriter + - level: Project + roles: + - roles/iam.serviceAccountAdmin + - roles/storage.admin + - roles/pubsub.admin + - roles/bigquery.dataOwner + - roles/serviceusage.serviceUsageAdmin + - roles/resourcemanager.projectIamAdmin + - roles/logging.configWriter + - roles/cloudfunctions.developer + - roles/iam.serviceAccountUser + - roles/cloudscheduler.admin + - roles/appengine.appCreator + - roles/appengine.appAdmin + - level: Project + roles: + - roles/billing.user + - level: Project + roles: + - roles/logging.configWriter + - roles/billing.projectManager + - roles/securitycenter.sourcesEditor + - roles/resourcemanager.organizationAdmin + services: + - cloudapis.googleapis.com + - cloudbuild.googleapis.com + - cloudfunctions.googleapis.com + - cloudscheduler.googleapis.com + - securitycenter.googleapis.com + - cloudresourcemanager.googleapis.com + - oslogin.googleapis.com + - compute.googleapis.com + - pubsub.googleapis.com + - storage-component.googleapis.com + - storage-api.googleapis.com + - iam.googleapis.com + - cloudbilling.googleapis.com diff --git a/modules/storage/metadata.yaml b/modules/storage/metadata.yaml new file mode 100644 index 00000000..e9448885 --- /dev/null +++ b/modules/storage/metadata.yaml @@ -0,0 +1,196 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: blueprints.cloud.google.com/v1alpha1 +kind: BlueprintMetadata +metadata: + name: terraform-google-log-export + annotations: + config.kubernetes.io/local-config: "true" +spec: + title: 'Log Export: Storage destination submodule' + source: + repo: https://github.com/terraform-google-modules/terraform-google-log-export.git + sourceType: git + version: 7.4.2 + actuationTool: + type: Terraform + version: '>= 0.13' + examples: + - name: billing_account + location: examples/bigquery/billing_account + - name: billing_account + location: examples/pubsub/billing_account + - name: billing_account + location: examples/storage/billing_account + - name: bq-log-alerting + location: examples/bq-log-alerting + - name: datadog-sink + location: examples/datadog-sink + - name: folder + location: examples/bigquery/folder + - name: folder + location: examples/logbucket/folder + - name: folder + location: examples/pubsub/folder + - name: folder + location: examples/storage/folder + - name: organization + location: examples/bigquery/organization + - name: organization + location: examples/logbucket/organization + - name: organization + location: examples/pubsub/organization + - name: organization + location: examples/storage/organization + - name: project + location: examples/bigquery/project + - name: project + location: examples/logbucket/project + - name: project + location: examples/pubsub/project + - name: project + location: examples/storage/project + - name: splunk-sink + location: examples/splunk-sink + variables: + - name: force_destroy + description: When deleting a bucket, this boolean option will delete all contained objects. + type: bool + default: false + required: false + - name: kms_key_name + description: ID of a Cloud KMS key that will be used to encrypt objects inserted into this bucket. Automatic Google Cloud Storage service account for the bucket's project requires access to this encryption key. + type: string + required: false + - name: lifecycle_rules + description: List of lifecycle rules to configure. Format is the same as described in provider documentation https://www.terraform.io/docs/providers/google/r/storage_bucket.html#lifecycle_rule except condition.matches_storage_class should be a comma delimited string. + type: |- + set(object({ + # Object with keys: + # - type - The type of the action of this Lifecycle Rule. Supported values: Delete and SetStorageClass. + # - storage_class - (Required if action type is SetStorageClass) The target Storage Class of objects affected by this Lifecycle Rule. + action = map(string) + + # Object with keys: + # - age - (Optional) Minimum age of an object in days to satisfy this condition. + # - created_before - (Optional) Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition. + # - with_state - (Optional) Match to live and/or archived objects. Supported values include: "LIVE", "ARCHIVED", "ANY". + # - matches_storage_class - (Optional) Comma delimited string for storage class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, STANDARD, DURABLE_REDUCED_AVAILABILITY. + # - num_newer_versions - (Optional) Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition. + # - days_since_custom_time - (Optional) The number of days from the Custom-Time metadata attribute after which this condition becomes true. + condition = map(string) + })) + default: [] + required: false + - name: location + description: The location of the storage bucket. + type: string + default: US + required: false + - name: log_sink_writer_identity + description: The service account that logging uses to write log entries to the destination. (This is available as an output coming from the root module). + type: string + required: true + - name: project_id + description: The ID of the project in which the storage bucket will be created. + type: string + required: true + - name: retention_policy + description: Configuration of the bucket's data retention policy for how long objects in the bucket should be retained. + type: |- + object({ + is_locked = bool + retention_period_days = number + }) + required: false + - name: storage_bucket_labels + description: Labels to apply to the storage bucket. + type: map(string) + default: {} + required: false + - name: storage_bucket_name + description: The name of the storage bucket to be created and used for log entries matching the filter. + type: string + required: true + - name: storage_class + description: The storage class of the storage bucket. + type: string + default: STANDARD + required: false + - name: uniform_bucket_level_access + description: Enables Uniform bucket-level access to a bucket. + type: bool + default: true + required: false + - name: versioning + description: Toggles bucket versioning, ability to retain a non-current object version when the live object version gets replaced or deleted. + type: bool + default: false + required: false + outputs: + - name: console_link + description: The console link to the destination storage bucket + - name: destination_uri + description: The destination URI for the storage bucket. + - name: project + description: The project in which the storage bucket was created. + - name: resource_id + description: The resource id for the destination storage bucket + - name: resource_name + description: The resource name for the destination storage bucket + - name: self_link + description: The self_link URI for the destination storage bucket + roles: + - level: Project + roles: + - roles/resourcemanager.projectCreator + - roles/logging.configWriter + - level: Project + roles: + - roles/iam.serviceAccountAdmin + - roles/storage.admin + - roles/pubsub.admin + - roles/bigquery.dataOwner + - roles/serviceusage.serviceUsageAdmin + - roles/resourcemanager.projectIamAdmin + - roles/logging.configWriter + - roles/cloudfunctions.developer + - roles/iam.serviceAccountUser + - roles/cloudscheduler.admin + - roles/appengine.appCreator + - roles/appengine.appAdmin + - level: Project + roles: + - roles/billing.user + - level: Project + roles: + - roles/logging.configWriter + - roles/billing.projectManager + - roles/securitycenter.sourcesEditor + - roles/resourcemanager.organizationAdmin + services: + - cloudapis.googleapis.com + - cloudbuild.googleapis.com + - cloudfunctions.googleapis.com + - cloudscheduler.googleapis.com + - securitycenter.googleapis.com + - cloudresourcemanager.googleapis.com + - oslogin.googleapis.com + - compute.googleapis.com + - pubsub.googleapis.com + - storage-component.googleapis.com + - storage-api.googleapis.com + - iam.googleapis.com + - cloudbilling.googleapis.com