Skip to content

Commit

Permalink
chore: Update GCP release notes dataset to Airflow 2 (#436)
Browse files Browse the repository at this point in the history
  • Loading branch information
adlersantos committed Aug 8, 2022
1 parent b619b71 commit b3b82b3
Show file tree
Hide file tree
Showing 9 changed files with 254 additions and 37 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2021 Google LLC
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down
2 changes: 1 addition & 1 deletion datasets/google_cloud_release_notes/infra/provider.tf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2021 Google LLC
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2021 Google LLC
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -16,15 +16,10 @@


resource "google_bigquery_table" "google_cloud_release_notes_release_notes" {
project = var.project_id
dataset_id = "google_cloud_release_notes"
table_id = "release_notes"

project = var.project_id
dataset_id = "google_cloud_release_notes"
table_id = "release_notes"
description = "This table contains release notes for the majority of generally available Google Cloud products found on cloud.google.com. You can use this BigQuery public dataset to consume release notes programmatically across all products. HTML versions of release notes are available within each product\u0027s documentation and also in a filterable format at https://console.cloud.google.com/release-notes."




depends_on = [
google_bigquery_dataset.google_cloud_release_notes
]
Expand Down
5 changes: 4 additions & 1 deletion datasets/google_cloud_release_notes/infra/variables.tf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/**
* Copyright 2021 Google LLC
* Copyright 2022 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -20,4 +20,7 @@ variable "bucket_name_prefix" {}
variable "impersonating_acct" {}
variable "region" {}
variable "env" {}
variable "iam_policies" {
default = {}
}

Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

FROM python:3.8
ENV PYTHONUNBUFFERED True
COPY requirements.txt ./
RUN python3 -m pip install --no-cache-dir -r requirements.txt
WORKDIR /custom
COPY ./script.py .
CMD ["python3", "script.py"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
google-api-core
google-cloud-bigquery
google-cloud-bigquery-datatransfer
protobuf
Original file line number Diff line number Diff line change
@@ -0,0 +1,180 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import operator
import os
import time

from google.api_core.exceptions import ResourceExhausted
from google.cloud import bigquery_datatransfer_v1
from google.protobuf.timestamp_pb2 import Timestamp

RETRY_DELAY = 10


class TimeoutError(Exception):
"""Raised when the BQ transfer jobs haven't all finished within the allotted time"""

pass


def main(
source_project_id: str,
source_bq_dataset: str,
target_project_id: str,
target_bq_dataset: str,
service_account: str,
timeout: int,
):
client = bigquery_datatransfer_v1.DataTransferServiceClient()
transfer_config_name = f"{source_project_id}-{source_bq_dataset}-copy"
existing_config = find_existing_config(
client, target_project_id, transfer_config_name
)

if not existing_config:
existing_config = create_transfer_config(
client,
source_project_id,
source_bq_dataset,
target_project_id,
target_bq_dataset,
transfer_config_name,
service_account,
)

trigger_config(client, existing_config)
wait_for_completion(client, existing_config, timeout)


def find_existing_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
gcp_project: str,
transfer_config_name: str,
) -> bigquery_datatransfer_v1.types.TransferConfig:
all_transfer_configs = client.list_transfer_configs(
request=bigquery_datatransfer_v1.types.ListTransferConfigsRequest(
parent=f"projects/{gcp_project}"
)
)

return next(
(
config
for config in all_transfer_configs
if config.display_name == transfer_config_name
),
None,
)


def wait_for_completion(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
running_config: bigquery_datatransfer_v1.types.TransferConfig,
timeout: int,
) -> None:
_start = int(time.time())

while True:
latest_runs = []
latest_runs.append(latest_transfer_run(client, running_config))

logging.info(f"States: {[str(run.state) for run in latest_runs]}")

# Mark as complete when all runs have succeeded
if all([str(run.state) == "TransferState.SUCCEEDED" for run in latest_runs]):
return

# Stop the process when it's longer than the allotted time
if int(time.time()) - _start > timeout:
raise TimeoutError

time.sleep(RETRY_DELAY)


def latest_transfer_run(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
config: bigquery_datatransfer_v1.types.TransferConfig,
) -> bigquery_datatransfer_v1.types.TransferRun:
transfer_runs = client.list_transfer_runs(parent=config.name)
return max(transfer_runs, key=operator.attrgetter("run_time"))


def create_transfer_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
source_project_id: str,
source_dataset_id: str,
target_project_id: str,
target_dataset_id: str,
display_name: str,
service_account: str,
) -> bigquery_datatransfer_v1.types.TransferConfig:
transfer_config = bigquery_datatransfer_v1.TransferConfig(
destination_dataset_id=target_dataset_id,
display_name=display_name,
data_source_id="cross_region_copy",
dataset_region="US",
params={
"overwrite_destination_table": True,
"source_project_id": source_project_id,
"source_dataset_id": source_dataset_id,
},
schedule_options=bigquery_datatransfer_v1.ScheduleOptions(
disable_auto_scheduling=True
),
)

request = bigquery_datatransfer_v1.types.CreateTransferConfigRequest(
parent=client.common_project_path(target_project_id),
transfer_config=transfer_config,
service_account_name=service_account,
)

return client.create_transfer_config(request=request)


def trigger_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
config: bigquery_datatransfer_v1.types.TransferConfig,
) -> None:
now = time.time()
seconds = int(now)
nanos = int((now - seconds) * pow(10, 9))

try:
client.start_manual_transfer_runs(
request=bigquery_datatransfer_v1.types.StartManualTransferRunsRequest(
parent=config.name,
requested_run_time=Timestamp(seconds=seconds, nanos=nanos),
)
)
except ResourceExhausted:
logging.info(
f"Transfer job is currently running for config ({config.display_name}) {config.name}."
)
return


if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)

main(
source_project_id=os.environ["SOURCE_PROJECT_ID"],
source_bq_dataset=os.environ["SOURCE_BQ_DATASET"],
target_project_id=os.environ["TARGET_PROJECT_ID"],
target_bq_dataset=os.environ["TARGET_BQ_DATASET"],
service_account=os.environ["SERVICE_ACCOUNT"],
timeout=int(os.getenv("TIMEOUT", 1200)),
)
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2021 Google LLC
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -19,7 +19,7 @@ resources:
description: "This table contains release notes for the majority of generally available Google Cloud products found on cloud.google.com. You can use this BigQuery public dataset to consume release notes programmatically across all products. HTML versions of release notes are available within each product's documentation and also in a filterable format at https://console.cloud.google.com/release-notes."

dag:
airflow_version: 1
airflow_version: 2
initialize:
dag_id: release_notes
default_args:
Expand All @@ -32,16 +32,24 @@ dag:
default_view: graph

tasks:
- operator: "BigQueryToBigQueryOperator"
description: "Task to run a BQ to BQ operator"

- operator: "KubernetesPodOperator"
description: "Copy GCP release notes dataset"
args:
task_id: "google_cloud_release_notes"
source_project_dataset_tables: ["{{ var.json.google_cloud_release_notes.release_notes.source_project_dataset_table }}"]
destination_project_dataset_table: "{{ var.json.google_cloud_release_notes.release_notes.destination_project_dataset_table }}"
impersonation_chain: "{{ var.json.google_cloud_release_notes.service_account }}"
write_disposition: "WRITE_TRUNCATE"
gcp_conn_id: "google_cloud_release_notes_conn"
task_id: "copy_bq_dataset"
name: "copy_bq_dataset"
namespace: "composer"
service_account_name: "datasets"
image_pull_policy: "Always"
image: "{{ var.json.google_cloud_release_notes.container_registry.copy_bq_dataset }}"
env_vars:
SOURCE_PROJECT_ID: "{{ var.json.google_cloud_release_notes.source_project_id }}"
SOURCE_BQ_DATASET: "{{ var.json.google_cloud_release_notes.source_bq_dataset }}"
TARGET_PROJECT_ID: "{{ var.value.gcp_project }}"
TARGET_BQ_DATASET: google_cloud_release_notes
SERVICE_ACCOUNT: "{{ var.json.google_cloud_release_notes.service_account }}"
resources:
request_memory: "128M"
request_cpu: "200m"

graph_paths:
- "google_cloud_release_notes"
- "copy_bq_dataset"
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2021 Google LLC
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -14,7 +14,7 @@


from airflow import DAG
from airflow.contrib.operators import bigquery_to_bigquery
from airflow.providers.cncf.kubernetes.operators import kubernetes_pod

default_args = {
"owner": "Google",
Expand All @@ -32,16 +32,22 @@
default_view="graph",
) as dag:

# Task to run a BQ to BQ operator
google_cloud_release_notes = bigquery_to_bigquery.BigQueryToBigQueryOperator(
task_id="google_cloud_release_notes",
source_project_dataset_tables=[
"{{ var.json.google_cloud_release_notes.release_notes.source_project_dataset_table }}"
],
destination_project_dataset_table="{{ var.json.google_cloud_release_notes.release_notes.destination_project_dataset_table }}",
impersonation_chain="{{ var.json.google_cloud_release_notes.service_account }}",
write_disposition="WRITE_TRUNCATE",
gcp_conn_id="google_cloud_release_notes_conn",
# Copy GCP release notes dataset
copy_bq_dataset = kubernetes_pod.KubernetesPodOperator(
task_id="copy_bq_dataset",
name="copy_bq_dataset",
namespace="composer",
service_account_name="datasets",
image_pull_policy="Always",
image="{{ var.json.google_cloud_release_notes.container_registry.copy_bq_dataset }}",
env_vars={
"SOURCE_PROJECT_ID": "{{ var.json.google_cloud_release_notes.source_project_id }}",
"SOURCE_BQ_DATASET": "{{ var.json.google_cloud_release_notes.source_bq_dataset }}",
"TARGET_PROJECT_ID": "{{ var.value.gcp_project }}",
"TARGET_BQ_DATASET": "google_cloud_release_notes",
"SERVICE_ACCOUNT": "{{ var.json.google_cloud_release_notes.service_account }}",
},
resources={"request_memory": "128M", "request_cpu": "200m"},
)

google_cloud_release_notes
copy_bq_dataset

0 comments on commit b3b82b3

Please sign in to comment.