Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update sample dag and doc for RDS #23651

Merged
merged 3 commits into from
May 22, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
149 changes: 0 additions & 149 deletions airflow/providers/amazon/aws/example_dags/example_rds.py

This file was deleted.

58 changes: 58 additions & 0 deletions airflow/providers/amazon/aws/example_dags/example_rds_event.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.providers.amazon.aws.operators.rds import (
RdsCreateEventSubscriptionOperator,
RdsDeleteEventSubscriptionOperator,
)

SUBSCRIPTION_NAME = getenv("SUBSCRIPTION_NAME", "subscription-name")
SNS_TOPIC_ARN = getenv("SNS_TOPIC_ARN", "arn:aws:sns:<region>:<account number>:MyTopic")
RDS_DB_IDENTIFIER = getenv("RDS_DB_IDENTIFIER", "database-identifier")

with DAG(
dag_id='example_rds_event',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# [START howto_operator_rds_create_event_subscription]
create_subscription = RdsCreateEventSubscriptionOperator(
task_id='create_subscription',
subscription_name=SUBSCRIPTION_NAME,
sns_topic_arn=SNS_TOPIC_ARN,
source_type='db-instance',
source_ids=[RDS_DB_IDENTIFIER],
event_categories=['availability'],
)
# [END howto_operator_rds_create_event_subscription]

# [START howto_operator_rds_delete_event_subscription]
delete_subscription = RdsDeleteEventSubscriptionOperator(
task_id='delete_subscription',
subscription_name=SUBSCRIPTION_NAME,
)
# [END howto_operator_rds_delete_event_subscription]

chain(create_subscription, delete_subscription)
71 changes: 71 additions & 0 deletions airflow/providers/amazon/aws/example_dags/example_rds_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.providers.amazon.aws.operators.rds import RdsCancelExportTaskOperator, RdsStartExportTaskOperator
from airflow.providers.amazon.aws.sensors.rds import RdsExportTaskExistenceSensor

RDS_EXPORT_TASK_IDENTIFIER = getenv("RDS_EXPORT_TASK_IDENTIFIER", "export-task-identifier")
RDS_EXPORT_SOURCE_ARN = getenv(
"RDS_EXPORT_SOURCE_ARN", "arn:aws:rds:<region>:<account number>:snapshot:snap-id"
)
BUCKET_NAME = getenv("BUCKET_NAME", "bucket-name")
BUCKET_PREFIX = getenv("BUCKET_PREFIX", "bucket-prefix")
ROLE_ARN = getenv("ROLE_ARN", "arn:aws:iam::<account number>:role/Role")
KMS_KEY_ID = getenv("KMS_KEY_ID", "arn:aws:kms:<region>:<account number>:key/key-id")


with DAG(
dag_id='example_rds_export',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# [START howto_operator_rds_start_export_task]
start_export = RdsStartExportTaskOperator(
task_id='start_export',
export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
source_arn=RDS_EXPORT_SOURCE_ARN,
s3_bucket_name=BUCKET_NAME,
s3_prefix=BUCKET_PREFIX,
iam_role_arn=ROLE_ARN,
kms_key_id=KMS_KEY_ID,
)
# [END howto_operator_rds_start_export_task]

# [START howto_operator_rds_cancel_export]
cancel_export = RdsCancelExportTaskOperator(
task_id='cancel_export',
export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
)
# [END howto_operator_rds_cancel_export]

# [START howto_sensor_rds_export_task_existence]
export_sensor = RdsExportTaskExistenceSensor(
task_id='export_sensor',
export_task_identifier=RDS_EXPORT_TASK_IDENTIFIER,
target_statuses=['canceled'],
)
# [END howto_sensor_rds_export_task_existence]

chain(start_export, cancel_export, export_sensor)
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

from datetime import datetime
from os import getenv

from airflow import DAG
from airflow.models.baseoperator import chain
from airflow.providers.amazon.aws.operators.rds import (
RdsCopyDbSnapshotOperator,
RdsCreateDbSnapshotOperator,
RdsDeleteDbSnapshotOperator,
)
from airflow.providers.amazon.aws.sensors.rds import RdsSnapshotExistenceSensor

RDS_DB_IDENTIFIER = getenv("RDS_DB_IDENTIFIER", "database-identifier")
RDS_DB_SNAPSHOT_IDENTIFIER = getenv("RDS_DB_SNAPSHOT_IDENTIFIER", "database-1-snap")

with DAG(
dag_id='example_rds_snapshot',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
tags=['example'],
catchup=False,
) as dag:
# [START howto_operator_rds_create_db_snapshot]
create_snapshot = RdsCreateDbSnapshotOperator(
task_id='create_snapshot',
db_type='instance',
db_identifier=RDS_DB_IDENTIFIER,
db_snapshot_identifier=RDS_DB_SNAPSHOT_IDENTIFIER,
)
# [END howto_operator_rds_create_db_snapshot]

# [START howto_sensor_rds_snapshot_existence]
snapshot_sensor = RdsSnapshotExistenceSensor(
task_id='snapshot_sensor',
db_type='instance',
db_snapshot_identifier=RDS_DB_IDENTIFIER,
target_statuses=['available'],
)
# [END howto_sensor_rds_snapshot_existence]

# [START howto_operator_rds_copy_snapshot]
copy_snapshot = RdsCopyDbSnapshotOperator(
task_id='copy_snapshot',
db_type='instance',
source_db_snapshot_identifier=RDS_DB_IDENTIFIER,
target_db_snapshot_identifier=f'{RDS_DB_IDENTIFIER}-copy',
)
# [END howto_operator_rds_copy_snapshot]

# [START howto_operator_rds_delete_snapshot]
delete_snapshot = RdsDeleteDbSnapshotOperator(
task_id='delete_snapshot',
db_type='instance',
db_snapshot_identifier=RDS_DB_IDENTIFIER,
)
# [END howto_operator_rds_delete_snapshot]

chain(create_snapshot, snapshot_sensor, copy_snapshot, delete_snapshot)
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/sensors/rds.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ class RdsSnapshotExistenceSensor(RdsBaseSensor):

.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:RdsSnapshotExistenceSensor`
:ref:`howto/sensor:RdsSnapshotExistenceSensor`

:param db_type: Type of the DB - either "instance" or "cluster"
:param db_snapshot_identifier: The identifier for the DB snapshot
Expand Down Expand Up @@ -113,7 +113,7 @@ class RdsExportTaskExistenceSensor(RdsBaseSensor):

.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:RdsExportTaskExistenceSensor`
:ref:`howto/sensor:RdsExportTaskExistenceSensor`

:param export_task_identifier: A unique identifier for the snapshot export task.
:param target_statuses: Target status of export task
Expand Down
4 changes: 2 additions & 2 deletions docs/apache-airflow-providers-amazon/operators/ec2.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ Prerequisite Tasks

.. include:: _partials/prerequisite_tasks.rst

.. _howto/operator:EC2StartInstanceOperator:

Operators
---------

.. _howto/operator:EC2StartInstanceOperator:

Start an Amazon EC2 instance
============================

Expand Down
Loading