Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat: Onboard EBI CHemBL Previous Data dataset #470

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
37 changes: 37 additions & 0 deletions datasets/ebi_chembl/pipelines/_images/bq_data_transfer/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# The base image for this build
FROM python:3.8

# Allow statements and log messages to appear in Cloud logs
ENV PYTHONUNBUFFERED True

# Copy the requirements file into the image
COPY requirements.txt ./

# Install the packages specified in the requirements file
RUN python3 -m pip install --no-cache-dir -r requirements.txt

# The WORKDIR instruction sets the working directory for any RUN, CMD,
# ENTRYPOINT, COPY and ADD instructions that follow it in the Dockerfile.
# If the WORKDIR doesn’t exist, it will be created even if it’s not used in
# any subsequent Dockerfile instruction
WORKDIR /custom

# Copy the specific data processing script/s in the image under /custom/*
COPY ./script.py .

# Command to run the data processing script when the container is run
CMD ["python3", "script.py"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
google-api-core
google-cloud-bigquery
google-cloud-bigquery-datatransfer
protobuf
165 changes: 165 additions & 0 deletions datasets/ebi_chembl/pipelines/_images/bq_data_transfer/script.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import logging
import operator
import os
import time

from google.api_core.exceptions import ResourceExhausted
from google.cloud import bigquery_datatransfer_v1
from google.protobuf.timestamp_pb2 import Timestamp

RETRY_DELAY = 10


class TimeoutError(Exception):
"""Raised when the BQ transfer jobs haven't all finished within the allotted time"""

pass


def main(
source_project_id: str,
source_bq_dataset: str,
target_project_id: str,
target_bq_dataset: str,
timeout: int,
):
client = bigquery_datatransfer_v1.DataTransferServiceClient()
transfer_config_name = f"{source_project_id}-{source_bq_dataset}-copy"
existing_config = find_existing_config(
client, target_project_id, transfer_config_name
)
if not existing_config:
existing_config = create_transfer_config(
client,
source_project_id,
source_bq_dataset,
target_project_id,
target_bq_dataset,
transfer_config_name,
)

trigger_config(client, existing_config)
wait_for_completion(client, existing_config, timeout)


def find_existing_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
gcp_project: str,
transfer_config_name: str,
) -> bigquery_datatransfer_v1.types.TransferConfig:
all_transfer_configs = client.list_transfer_configs(
request=bigquery_datatransfer_v1.types.ListTransferConfigsRequest(
parent=f"projects/{gcp_project}"
)
)
return next(
(
config
for config in all_transfer_configs
if config.display_name == transfer_config_name
),
None,
)


def wait_for_completion(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
running_config: bigquery_datatransfer_v1.types.TransferConfig,
timeout: int,
) -> None:
_start = int(time.time())
while True:
latest_runs = []
latest_runs.append(latest_transfer_run(client, running_config))
logging.info(f"States: {[str(run.state) for run in latest_runs]}")
# Mark as complete when all runs have succeeded
if all([str(run.state) == "TransferState.SUCCEEDED" for run in latest_runs]):
return
# Stop the process when it's longer than the allotted time
if int(time.time()) - _start > timeout:
raise TimeoutError
time.sleep(RETRY_DELAY)


def latest_transfer_run(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
config: bigquery_datatransfer_v1.types.TransferConfig,
) -> bigquery_datatransfer_v1.types.TransferRun:
transfer_runs = client.list_transfer_runs(parent=config.name)
return max(transfer_runs, key=operator.attrgetter("run_time"))


def create_transfer_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
source_project_id: str,
source_dataset_id: str,
target_project_id: str,
target_dataset_id: str,
display_name: str,
) -> bigquery_datatransfer_v1.types.TransferConfig:
transfer_config = bigquery_datatransfer_v1.TransferConfig(
destination_dataset_id=target_dataset_id,
display_name=display_name,
data_source_id="cross_region_copy",
dataset_region="US",
params={
"overwrite_destination_table": True,
"source_project_id": source_project_id,
"source_dataset_id": source_dataset_id,
},
schedule_options=bigquery_datatransfer_v1.ScheduleOptions(
disable_auto_scheduling=True
),
)
request = bigquery_datatransfer_v1.types.CreateTransferConfigRequest(
parent=client.common_project_path(target_project_id),
transfer_config=transfer_config,
)
return client.create_transfer_config(request=request)


def trigger_config(
client: bigquery_datatransfer_v1.DataTransferServiceClient,
config: bigquery_datatransfer_v1.types.TransferConfig,
) -> None:
now = time.time()
seconds = int(now)
nanos = int((now - seconds) * pow(10, 9))
try:
client.start_manual_transfer_runs(
request=bigquery_datatransfer_v1.types.StartManualTransferRunsRequest(
parent=config.name,
requested_run_time=Timestamp(seconds=seconds, nanos=nanos),
)
)
except ResourceExhausted:
logging.info(
f"Transfer job is currently running for config ({config.display_name}) {config.name}."
)
return


if __name__ == "__main__":
logging.getLogger().setLevel(logging.INFO)

main(
source_project_id=os.environ["SOURCE_PROJECT_ID"],
source_bq_dataset=os.environ["SOURCE_BQ_DATASET"],
target_project_id=os.environ["TARGET_PROJECT_ID"],
target_bq_dataset=os.environ["TARGET_BQ_DATASET"],
timeout=int(os.getenv("TIMEOUT", 12000)),
)
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from airflow import DAG
from airflow.providers.cncf.kubernetes.operators import kubernetes_pod

default_args = {
"owner": "Google",
"depends_on_past": False,
"start_date": "2022-07-01",
}


with DAG(
dag_id="ebi_chembl.ebi_chembl_old_data",
default_args=default_args,
max_active_runs=1,
schedule_interval="@weekly",
catchup=False,
default_view="graph",
) as dag:

# Copy patents-public-data.ebi_chembl dataset
copy_bq_datasets = kubernetes_pod.KubernetesPodOperator(
task_id="copy_bq_datasets",
name="copy_bq_datasets",
namespace="composer",
service_account_name="datasets",
image_pull_policy="Always",
image="{{ var.json.ebi_chembl_old_data.container_registry.bq_data_transfer }}",
env_vars={
"SOURCE_PROJECT_ID": "{{ var.json.ebi_chembl_old_data.source_project_id }}",
"SOURCE_BQ_DATASET": "{{ var.json.ebi_chembl_old_data.source_bq_dataset }}",
"TARGET_PROJECT_ID": "{{ var.value.gcp_project }}",
"TARGET_BQ_DATASET": "{{ var.json.ebi_chembl_old_data.target_bq_dataset }}",
},
resources={"request_memory": "128M", "request_cpu": "200m"},
)

copy_bq_datasets
51 changes: 51 additions & 0 deletions datasets/ebi_chembl/pipelines/ebi_chembl_old_data/pipeline.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

---
resources: ~

dag:
airflow_version: 2
initialize:
dag_id: ebi_chembl_old_data
default_args:
owner: "Google"
depends_on_past: False
start_date: '2022-07-01'
max_active_runs: 1
schedule_interval: "@weekly"
catchup: False
default_view: graph

tasks:
- operator: "KubernetesPodOperator"
description: "Copy patents-public-data.ebi_chembl dataset"
args:
task_id: "copy_bq_datasets"
name: "copy_bq_datasets"
namespace: "composer"
service_account_name: "datasets"
image_pull_policy: "Always"
image: "{{ var.json.ebi_chembl_old_data.container_registry.bq_data_transfer }}"
env_vars:
SOURCE_PROJECT_ID: "{{ var.json.ebi_chembl_old_data.source_project_id }}"
SOURCE_BQ_DATASET: "{{ var.json.ebi_chembl_old_data.source_bq_dataset }}"
TARGET_PROJECT_ID: "{{ var.value.gcp_project }}"
TARGET_BQ_DATASET: "{{ var.json.ebi_chembl_old_data.target_bq_dataset }}"
resources:
request_memory: "128M"
request_cpu: "200m"

graph_paths:
- "copy_bq_datasets"