-
Notifications
You must be signed in to change notification settings - Fork 16.4k
Description
Looks like there is a bug in Airflow templated operator argument when it has any string that ends with .json. Following is my DAG - please notice "--files", "s3://dummy/spark/application.json" in the STEPS variable.
from datetime import timedelta
from airflow import DAG
from airflow.providers.amazon.aws.operators.emr_create_job_flow import EmrCreateJobFlowOperator
from airflow.providers.amazon.aws.operators.emr_terminate_job_flow import EmrTerminateJobFlowOperator
from airflow.providers.amazon.aws.operators.emr_add_steps import EmrAddStepsOperator
from airflow.providers.amazon.aws.sensors.emr_job_flow import EmrJobFlowSensor
from airflow.utils.dates import days_ago
DEFAULT_ARGS = {
'owner': 'Commscope',
'depends_on_past': False,
'email': ['smishra@commscope.com'],
'email_on_failure': False,
'email_on_retry': False
}
JOB_FLOW_OVERRIDES = {
'Name': 'PiCalc',
'ReleaseLabel': 'emr-5.29.0',
'Instances': {
'InstanceGroups': [
{
'Name': 'Master node',
'Market': 'SPOT',
'InstanceRole': 'MASTER',
'InstanceType': 'm1.medium',
'InstanceCount': 1,
}
],
'KeepJobFlowAliveWhenNoSteps': True,
'TerminationProtected': False,
},
'JobFlowRole': 'EMR_EC2_DefaultRole',
'ServiceRole': 'EMR_DefaultRole',
}
STEPS = [{
"Name": "Process data",
"ActionOnFailure": "CONTINUE",
"HadoopJarStep": {
"Jar": "command-runner.jar",
"Args": [
"--class", "com.dummy.Application",
"--files", "s3://dummy/spark/application.json",
"--driver-java-options",
"-Dlog4j.configuration=log4j.properties",
"--driver-java-options",
"-Dconfig.resource=application.json",
"--driver-java-options"
"s3://dummy/spark/app-jar-with-dependencies.jar",
"application.json"
]
}
}]
with DAG(
dag_id='data_processing',
default_args=DEFAULT_ARGS,
dagrun_timeout=timedelta(hours=2),
start_date=days_ago(2),
schedule_interval='0 3 * * *',
tags=['inquire', 'bronze'],
) as dag:
job_flow_creator = EmrCreateJobFlowOperator(
task_id='launch_emr_cluster',
job_flow_overrides=JOB_FLOW_OVERRIDES,
aws_conn_id='aws_default',
emr_conn_id='emr_default'
)
job_flow_sensor = EmrJobFlowSensor(
task_id='check_cluster',
job_flow_id="{{ task_instance.xcom_pull(task_ids='launch_emr_cluster', key='return_value') }}",
target_states=['RUNNING', 'WAITING'],
aws_conn_id='aws_default'
)
proc_step = EmrAddStepsOperator(
task_id='process_data',
job_flow_id="{{ task_instance.xcom_pull(task_ids='launch_emr_cluster', key='return_value') }}",
aws_conn_id='aws_default',
steps=STEPS,
)
job_flow_terminator = EmrTerminateJobFlowOperator(
task_id='terminate_emr_cluster',
job_flow_id="{{ task_instance.xcom_pull(task_ids='launch_emr_cluster', key='return_value') }}",
aws_conn_id='aws_default',
trigger_rule="all_done"
)
job_flow_creator >> job_flow_sensor >> proc_step >> job_flow_terminator
`
The cluster launches successfully but the Airflow fails with following error
[2020-08-21 15:06:42,307] {taskinstance.py:1145} ERROR - s3://dummy/spark/application.json Traceback (most recent call last): File "/usr/local/lib/python3.7/site-packages/airflow/models/taskinstance.py", line 964, in _run_raw_task self.render_templates(context=context) ... ... File "/usr/local/lib/python3.7/site-packages/jinja2/loaders.py", line 187, in get_source raise TemplateNotFound(template) jinja2.exceptions.TemplateNotFound: s3://dummy/spark/application.json