Permalink
Cannot retrieve contributors at this time
5634 lines (4835 sloc)
199 KB
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# | |
# Licensed to the Apache Software Foundation (ASF) under one | |
# or more contributor license agreements. See the NOTICE file | |
# distributed with this work for additional information | |
# regarding copyright ownership. The ASF licenses this file | |
# to you under the Apache License, Version 2.0 (the | |
# "License"); you may not use this file except in compliance | |
# with the License. You may obtain a copy of the License at | |
# | |
# http://www.apache.org/licenses/LICENSE-2.0 | |
# | |
# Unless required by applicable law or agreed to in writing, | |
# software distributed under the License is distributed on an | |
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
# KIND, either express or implied. See the License for the | |
# specific language governing permissions and limitations | |
# under the License. | |
# | |
import collections | |
import copy | |
import itertools | |
import json | |
import logging | |
import math | |
import re | |
import sys | |
import traceback | |
import warnings | |
from bisect import insort_left | |
from collections import defaultdict | |
from datetime import datetime, timedelta | |
from functools import wraps | |
from json import JSONDecodeError | |
from operator import itemgetter | |
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union | |
from urllib.parse import parse_qsl, unquote, urlencode, urlparse | |
import lazy_object_proxy | |
import markupsafe | |
import nvd3 | |
import sqlalchemy as sqla | |
from croniter import croniter | |
from flask import ( | |
Response, | |
abort, | |
before_render_template, | |
flash, | |
g, | |
jsonify, | |
make_response, | |
redirect, | |
render_template, | |
request, | |
send_from_directory, | |
session as flask_session, | |
url_for, | |
) | |
from flask_appbuilder import BaseView, ModelView, expose | |
from flask_appbuilder.actions import action | |
from flask_appbuilder.fieldwidgets import Select2Widget | |
from flask_appbuilder.models.sqla.filters import BaseFilter | |
from flask_appbuilder.security.decorators import has_access | |
from flask_appbuilder.security.views import ( | |
PermissionModelView, | |
PermissionViewModelView, | |
ResetMyPasswordView, | |
ResetPasswordView, | |
RoleModelView, | |
UserDBModelView, | |
UserInfoEditView, | |
UserLDAPModelView, | |
UserOAuthModelView, | |
UserOIDModelView, | |
UserRemoteUserModelView, | |
UserStatsChartView, | |
ViewMenuModelView, | |
) | |
from flask_appbuilder.urltools import get_order_args, get_page_args, get_page_size_args | |
from flask_appbuilder.widgets import FormWidget | |
from flask_babel import lazy_gettext | |
from jinja2.utils import htmlsafe_json_dumps, pformat # type: ignore | |
from markupsafe import Markup, escape | |
from pendulum.datetime import DateTime | |
from pendulum.parsing.exceptions import ParserError | |
from pygments import highlight, lexers | |
from pygments.formatters import HtmlFormatter | |
from sqlalchemy import Date, and_, desc, func, inspect, union_all | |
from sqlalchemy.exc import IntegrityError | |
from sqlalchemy.orm import Session, joinedload | |
from wtforms import SelectField, validators | |
from wtforms.validators import InputRequired | |
import airflow | |
from airflow import models, plugins_manager, settings | |
from airflow.api.common.mark_tasks import ( | |
set_dag_run_state_to_failed, | |
set_dag_run_state_to_queued, | |
set_dag_run_state_to_success, | |
set_state, | |
) | |
from airflow.compat.functools import cached_property | |
from airflow.configuration import AIRFLOW_CONFIG, conf | |
from airflow.exceptions import AirflowException, ParamValidationError | |
from airflow.executors.executor_loader import ExecutorLoader | |
from airflow.jobs.base_job import BaseJob | |
from airflow.jobs.scheduler_job import SchedulerJob | |
from airflow.jobs.triggerer_job import TriggererJob | |
from airflow.models import DAG, Connection, DagModel, DagTag, Log, SlaMiss, TaskFail, XCom, errors | |
from airflow.models.abstractoperator import AbstractOperator | |
from airflow.models.dagcode import DagCode | |
from airflow.models.dagrun import DagRun, DagRunType | |
from airflow.models.operator import Operator | |
from airflow.models.serialized_dag import SerializedDagModel | |
from airflow.models.taskinstance import TaskInstance | |
from airflow.providers_manager import ProvidersManager | |
from airflow.security import permissions | |
from airflow.ti_deps.dep_context import DepContext | |
from airflow.ti_deps.dependencies_deps import RUNNING_DEPS, SCHEDULER_QUEUED_DEPS | |
from airflow.timetables.base import DataInterval, TimeRestriction | |
from airflow.timetables.interval import CronDataIntervalTimetable | |
from airflow.utils import json as utils_json, timezone, yaml | |
from airflow.utils.airflow_flask_app import get_airflow_app | |
from airflow.utils.dates import infer_time_unit, scale_time_units | |
from airflow.utils.docs import get_doc_url_for_provider, get_docs_url | |
from airflow.utils.helpers import alchemy_to_dict | |
from airflow.utils.log import secrets_masker | |
from airflow.utils.log.log_reader import TaskLogReader | |
from airflow.utils.net import get_hostname | |
from airflow.utils.session import NEW_SESSION, create_session, provide_session | |
from airflow.utils.state import State, TaskInstanceState | |
from airflow.utils.strings import to_boolean | |
from airflow.utils.timezone import td_format, utcnow | |
from airflow.version import version | |
from airflow.www import auth, utils as wwwutils | |
from airflow.www.decorators import action_logging, gzipped | |
from airflow.www.forms import ( | |
ConnectionForm, | |
DagRunEditForm, | |
DateTimeForm, | |
DateTimeWithNumRunsForm, | |
DateTimeWithNumRunsWithDagRunsForm, | |
TaskInstanceEditForm, | |
) | |
from airflow.www.widgets import AirflowModelListWidget, AirflowVariableShowWidget | |
PAGE_SIZE = conf.getint('webserver', 'page_size') | |
FILTER_TAGS_COOKIE = 'tags_filter' | |
FILTER_STATUS_COOKIE = 'dag_status_filter' | |
LINECHART_X_AXIS_TICKFORMAT = ( | |
"function (d, i) { let xLabel;" | |
"if (i === undefined) {xLabel = d3.time.format('%H:%M, %d %b %Y')(new Date(parseInt(d)));" | |
"} else {xLabel = d3.time.format('%H:%M, %d %b')(new Date(parseInt(d)));} return xLabel;}" | |
) | |
def truncate_task_duration(task_duration): | |
""" | |
Cast the task_duration to an int was for optimization for large/huge dags if task_duration > 10s | |
otherwise we keep it as a float with 3dp | |
""" | |
return int(task_duration) if task_duration > 10.0 else round(task_duration, 3) | |
def get_safe_url(url): | |
"""Given a user-supplied URL, ensure it points to our web server""" | |
valid_schemes = ['http', 'https', ''] | |
valid_netlocs = [request.host, ''] | |
if not url: | |
return url_for('Airflow.index') | |
parsed = urlparse(url) | |
# If the url contains semicolon, redirect it to homepage to avoid | |
# potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967)) | |
if ';' in unquote(url): | |
return url_for('Airflow.index') | |
query = parse_qsl(parsed.query, keep_blank_values=True) | |
url = parsed._replace(query=urlencode(query)).geturl() | |
if parsed.scheme in valid_schemes and parsed.netloc in valid_netlocs: | |
return url | |
return url_for('Airflow.index') | |
def get_date_time_num_runs_dag_runs_form_data(www_request, session, dag): | |
"""Get Execution Data, Base Date & Number of runs from a Request""" | |
date_time = www_request.args.get('execution_date') | |
if date_time: | |
date_time = _safe_parse_datetime(date_time) | |
else: | |
date_time = dag.get_latest_execution_date(session=session) or timezone.utcnow() | |
base_date = www_request.args.get('base_date') | |
if base_date: | |
base_date = _safe_parse_datetime(base_date) | |
else: | |
# The DateTimeField widget truncates milliseconds and would loose | |
# the first dag run. Round to next second. | |
base_date = (date_time + timedelta(seconds=1)).replace(microsecond=0) | |
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number') | |
num_runs = www_request.args.get('num_runs', default=default_dag_run, type=int) | |
# When base_date has been rounded up because of the DateTimeField widget, we want | |
# to use the execution_date as the starting point for our query just to ensure a | |
# link targeting a specific dag run actually loads that dag run. If there are | |
# more than num_runs dag runs in the "rounded period" then those dagruns would get | |
# loaded and the actual requested run would be excluded by the limit(). Once | |
# the user has changed base date to be anything else we want to use that instead. | |
query_date = base_date | |
if date_time < base_date and date_time + timedelta(seconds=1) >= base_date: | |
query_date = date_time | |
drs = ( | |
session.query(DagRun) | |
.filter(DagRun.dag_id == dag.dag_id, DagRun.execution_date <= query_date) | |
.order_by(desc(DagRun.execution_date)) | |
.limit(num_runs) | |
.all() | |
) | |
dr_choices = [] | |
dr_state = None | |
for dr in drs: | |
dr_choices.append((dr.execution_date.isoformat(), dr.run_id)) | |
if date_time == dr.execution_date: | |
dr_state = dr.state | |
# Happens if base_date was changed and the selected dag run is not in result | |
if not dr_state and drs: | |
dr = drs[0] | |
date_time = dr.execution_date | |
dr_state = dr.state | |
return { | |
'dttm': date_time, | |
'base_date': base_date, | |
'num_runs': num_runs, | |
'execution_date': date_time.isoformat(), | |
'dr_choices': dr_choices, | |
'dr_state': dr_state, | |
} | |
def _safe_parse_datetime(v): | |
"""Parse datetime and return error message for invalid dates""" | |
try: | |
return timezone.parse(v) | |
except (TypeError, ParserError): | |
abort(400, f"Invalid datetime: {v!r}") | |
def dag_to_grid(dag, dag_runs, session): | |
""" | |
Create a nested dict representation of the DAG's TaskGroup and its children | |
used to construct the Graph and Grid views. | |
""" | |
query = ( | |
session.query( | |
TaskInstance.task_id, | |
TaskInstance.run_id, | |
TaskInstance.state, | |
sqla.func.count(sqla.func.coalesce(TaskInstance.state, sqla.literal('no_status'))).label( | |
'state_count' | |
), | |
sqla.func.min(TaskInstance.start_date).label('start_date'), | |
sqla.func.max(TaskInstance.end_date).label('end_date'), | |
sqla.func.max(TaskInstance._try_number).label('_try_number'), | |
) | |
.filter( | |
TaskInstance.dag_id == dag.dag_id, | |
TaskInstance.run_id.in_([dag_run.run_id for dag_run in dag_runs]), | |
) | |
.group_by(TaskInstance.task_id, TaskInstance.run_id, TaskInstance.state) | |
.order_by(TaskInstance.task_id, TaskInstance.run_id) | |
) | |
grouped_tis = {task_id: list(tis) for task_id, tis in itertools.groupby(query, key=lambda ti: ti.task_id)} | |
def task_group_to_grid(item, dag_runs, grouped_tis): | |
if isinstance(item, AbstractOperator): | |
def _get_summary(task_instance): | |
try_count = ( | |
task_instance._try_number | |
if task_instance._try_number != 0 or task_instance.state in State.running | |
else task_instance._try_number + 1 | |
) | |
return { | |
'task_id': task_instance.task_id, | |
'run_id': task_instance.run_id, | |
'state': task_instance.state, | |
'start_date': task_instance.start_date, | |
'end_date': task_instance.end_date, | |
'try_number': try_count, | |
} | |
def _mapped_summary(ti_summaries): | |
run_id = None | |
record = None | |
def set_overall_state(record): | |
for state in wwwutils.priority: | |
if state in record['mapped_states']: | |
record['state'] = state | |
break | |
if None in record['mapped_states']: | |
# When turnong the dict into JSON we can't have None as a key, so use the string that | |
# the UI does | |
record['mapped_states']['no_status'] = record['mapped_states'].pop(None) | |
for ti_summary in ti_summaries: | |
if ti_summary.state is None: | |
ti_summary.state == 'no_status' | |
if run_id != ti_summary.run_id: | |
run_id = ti_summary.run_id | |
if record: | |
set_overall_state(record) | |
yield record | |
record = { | |
'task_id': ti_summary.task_id, | |
'run_id': run_id, | |
'start_date': ti_summary.start_date, | |
'end_date': ti_summary.end_date, | |
'mapped_states': {ti_summary.state: ti_summary.state_count}, | |
'state': None, # We change this before yielding | |
} | |
continue | |
record['start_date'] = min( | |
filter(None, [record['start_date'], ti_summary.start_date]), default=None | |
) | |
record['end_date'] = max( | |
filter(None, [record['end_date'], ti_summary.end_date]), default=None | |
) | |
record['mapped_states'][ti_summary.state] = ti_summary.state_count | |
if record: | |
set_overall_state(record) | |
yield record | |
if item.is_mapped: | |
instances = list(_mapped_summary(grouped_tis.get(item.task_id, []))) | |
else: | |
instances = list(map(_get_summary, grouped_tis.get(item.task_id, []))) | |
return { | |
'id': item.task_id, | |
'instances': instances, | |
'label': item.label, | |
'extra_links': item.extra_links, | |
'is_mapped': item.is_mapped, | |
} | |
# Task Group | |
task_group = item | |
children = [ | |
task_group_to_grid(child, dag_runs, grouped_tis) for child in task_group.topological_sort() | |
] | |
def get_summary(dag_run, children): | |
child_instances = [child['instances'] for child in children if 'instances' in child] | |
child_instances = [ | |
item for sublist in child_instances for item in sublist if item['run_id'] == dag_run.run_id | |
] | |
children_start_dates = (item['start_date'] for item in child_instances if item) | |
children_end_dates = (item['end_date'] for item in child_instances if item) | |
children_states = {item['state'] for item in child_instances if item} | |
group_state = None | |
for state in wwwutils.priority: | |
if state in children_states: | |
group_state = state | |
break | |
group_start_date = min(filter(None, children_start_dates), default=None) | |
group_end_date = max(filter(None, children_end_dates), default=None) | |
return { | |
'task_id': task_group.group_id, | |
'run_id': dag_run.run_id, | |
'state': group_state, | |
'start_date': group_start_date, | |
'end_date': group_end_date, | |
} | |
# We don't need to calculate summaries for the root | |
if task_group.group_id is None: | |
return { | |
'id': task_group.group_id, | |
'label': task_group.label, | |
'children': children, | |
'instances': [], | |
} | |
group_summaries = [get_summary(dr, children) for dr in dag_runs] | |
return { | |
'id': task_group.group_id, | |
'label': task_group.label, | |
'children': children, | |
'tooltip': task_group.tooltip, | |
'instances': group_summaries, | |
} | |
return task_group_to_grid(dag.task_group, dag_runs, grouped_tis) | |
def task_group_to_dict(task_item_or_group): | |
""" | |
Create a nested dict representation of this TaskGroup and its children used to construct | |
the Graph. | |
""" | |
if isinstance(task_item_or_group, AbstractOperator): | |
return { | |
'id': task_item_or_group.task_id, | |
'value': { | |
'label': task_item_or_group.label, | |
'labelStyle': f"fill:{task_item_or_group.ui_fgcolor};", | |
'style': f"fill:{task_item_or_group.ui_color};", | |
'rx': 5, | |
'ry': 5, | |
}, | |
} | |
task_group = task_item_or_group | |
children = [ | |
task_group_to_dict(child) for child in sorted(task_group.children.values(), key=lambda t: t.label) | |
] | |
if task_group.upstream_group_ids or task_group.upstream_task_ids: | |
children.append( | |
{ | |
'id': task_group.upstream_join_id, | |
'value': { | |
'label': '', | |
'labelStyle': f"fill:{task_group.ui_fgcolor};", | |
'style': f"fill:{task_group.ui_color};", | |
'shape': 'circle', | |
}, | |
} | |
) | |
if task_group.downstream_group_ids or task_group.downstream_task_ids: | |
# This is the join node used to reduce the number of edges between two TaskGroup. | |
children.append( | |
{ | |
'id': task_group.downstream_join_id, | |
'value': { | |
'label': '', | |
'labelStyle': f"fill:{task_group.ui_fgcolor};", | |
'style': f"fill:{task_group.ui_color};", | |
'shape': 'circle', | |
}, | |
} | |
) | |
return { | |
"id": task_group.group_id, | |
'value': { | |
'label': task_group.label, | |
'labelStyle': f"fill:{task_group.ui_fgcolor};", | |
'style': f"fill:{task_group.ui_color}", | |
'rx': 5, | |
'ry': 5, | |
'clusterLabelPos': 'top', | |
'tooltip': task_group.tooltip, | |
}, | |
'children': children, | |
} | |
def get_key_paths(input_dict): | |
"""Return a list of dot-separated dictionary paths""" | |
for key, value in input_dict.items(): | |
if isinstance(value, dict): | |
for sub_key in get_key_paths(value): | |
yield '.'.join((key, sub_key)) | |
else: | |
yield key | |
def get_value_from_path(key_path, content): | |
"""Return the value from a dictionary based on dot-separated path of keys""" | |
elem = content | |
for x in key_path.strip(".").split("."): | |
try: | |
x = int(x) | |
elem = elem[x] | |
except ValueError: | |
elem = elem.get(x) | |
return elem | |
def dag_edges(dag): | |
""" | |
Create the list of edges needed to construct the Graph view. | |
A special case is made if a TaskGroup is immediately upstream/downstream of another | |
TaskGroup or task. Two dummy nodes named upstream_join_id and downstream_join_id are | |
created for the TaskGroup. Instead of drawing an edge onto every task in the TaskGroup, | |
all edges are directed onto the dummy nodes. This is to cut down the number of edges on | |
the graph. | |
For example: A DAG with TaskGroups group1 and group2: | |
group1: task1, task2, task3 | |
group2: task4, task5, task6 | |
group2 is downstream of group1: | |
group1 >> group2 | |
Edges to add (This avoids having to create edges between every task in group1 and group2): | |
task1 >> downstream_join_id | |
task2 >> downstream_join_id | |
task3 >> downstream_join_id | |
downstream_join_id >> upstream_join_id | |
upstream_join_id >> task4 | |
upstream_join_id >> task5 | |
upstream_join_id >> task6 | |
""" | |
# Edges to add between TaskGroup | |
edges_to_add = set() | |
# Edges to remove between individual tasks that are replaced by edges_to_add. | |
edges_to_skip = set() | |
task_group_map = dag.task_group.get_task_group_dict() | |
def collect_edges(task_group): | |
"""Update edges_to_add and edges_to_skip according to TaskGroups.""" | |
if isinstance(task_group, AbstractOperator): | |
return | |
for target_id in task_group.downstream_group_ids: | |
# For every TaskGroup immediately downstream, add edges between downstream_join_id | |
# and upstream_join_id. Skip edges between individual tasks of the TaskGroups. | |
target_group = task_group_map[target_id] | |
edges_to_add.add((task_group.downstream_join_id, target_group.upstream_join_id)) | |
for child in task_group.get_leaves(): | |
edges_to_add.add((child.task_id, task_group.downstream_join_id)) | |
for target in target_group.get_roots(): | |
edges_to_skip.add((child.task_id, target.task_id)) | |
edges_to_skip.add((child.task_id, target_group.upstream_join_id)) | |
for child in target_group.get_roots(): | |
edges_to_add.add((target_group.upstream_join_id, child.task_id)) | |
edges_to_skip.add((task_group.downstream_join_id, child.task_id)) | |
# For every individual task immediately downstream, add edges between downstream_join_id and | |
# the downstream task. Skip edges between individual tasks of the TaskGroup and the | |
# downstream task. | |
for target_id in task_group.downstream_task_ids: | |
edges_to_add.add((task_group.downstream_join_id, target_id)) | |
for child in task_group.get_leaves(): | |
edges_to_add.add((child.task_id, task_group.downstream_join_id)) | |
edges_to_skip.add((child.task_id, target_id)) | |
# For every individual task immediately upstream, add edges between the upstream task | |
# and upstream_join_id. Skip edges between the upstream task and individual tasks | |
# of the TaskGroup. | |
for source_id in task_group.upstream_task_ids: | |
edges_to_add.add((source_id, task_group.upstream_join_id)) | |
for child in task_group.get_roots(): | |
edges_to_add.add((task_group.upstream_join_id, child.task_id)) | |
edges_to_skip.add((source_id, child.task_id)) | |
for child in task_group.children.values(): | |
collect_edges(child) | |
collect_edges(dag.task_group) | |
# Collect all the edges between individual tasks | |
edges = set() | |
def get_downstream(task): | |
for child in task.downstream_list: | |
edge = (task.task_id, child.task_id) | |
if edge not in edges: | |
edges.add(edge) | |
get_downstream(child) | |
for root in dag.roots: | |
get_downstream(root) | |
result = [] | |
# Build result dicts with the two ends of the edge, plus any extra metadata | |
# if we have it. | |
for source_id, target_id in sorted(edges.union(edges_to_add) - edges_to_skip): | |
record = {"source_id": source_id, "target_id": target_id} | |
label = dag.get_edge_info(source_id, target_id).get("label") | |
if label: | |
record["label"] = label | |
result.append(record) | |
return result | |
def get_task_stats_from_query(qry): | |
""" | |
Return a dict of the task quantity, grouped by dag id and task status. | |
:param qry: The data in the format (<dag id>, <task state>, <is dag running>, <task count>), | |
ordered by <dag id> and <is dag running> | |
""" | |
data = {} | |
last_dag_id = None | |
has_running_dags = False | |
for dag_id, state, is_dag_running, count in qry: | |
if last_dag_id != dag_id: | |
last_dag_id = dag_id | |
has_running_dags = False | |
elif not is_dag_running and has_running_dags: | |
continue | |
if is_dag_running: | |
has_running_dags = True | |
if dag_id not in data: | |
data[dag_id] = {} | |
data[dag_id][state] = count | |
return data | |
def redirect_or_json(origin, msg, status="", status_code=200): | |
""" | |
Some endpoints are called by javascript, | |
returning json will allow us to more elegantly handle side-effects in-page | |
""" | |
if request.headers.get('Accept') == 'application/json': | |
if status == 'error' and status_code == 200: | |
status_code = 500 | |
return Response(response=msg, status=status_code, mimetype="application/json") | |
else: | |
if status: | |
flash(msg, status) | |
else: | |
flash(msg) | |
return redirect(origin) | |
###################################################################################### | |
# Error handlers | |
###################################################################################### | |
def not_found(error): | |
"""Show Not Found on screen for any error in the Webserver""" | |
return ( | |
render_template( | |
'airflow/not_found.html', | |
hostname=get_hostname() | |
if conf.getboolean('webserver', 'EXPOSE_HOSTNAME', fallback=True) | |
else 'redact', | |
), | |
404, | |
) | |
def show_traceback(error): | |
"""Show Traceback for a given error""" | |
return ( | |
render_template( | |
'airflow/traceback.html', | |
python_version=sys.version.split(" ")[0], | |
airflow_version=version, | |
hostname=get_hostname() | |
if conf.getboolean('webserver', 'EXPOSE_HOSTNAME', fallback=True) | |
else 'redact', | |
info=traceback.format_exc() | |
if conf.getboolean('webserver', 'EXPOSE_STACKTRACE', fallback=True) | |
else 'Error! Please contact server admin.', | |
), | |
500, | |
) | |
###################################################################################### | |
# BaseViews | |
###################################################################################### | |
class AirflowBaseView(BaseView): | |
"""Base View to set Airflow related properties""" | |
from airflow import macros | |
route_base = '' | |
extra_args = { | |
# Make our macros available to our UI templates too. | |
'macros': macros, | |
'get_docs_url': get_docs_url, | |
} | |
if not conf.getboolean('core', 'unit_test_mode'): | |
extra_args['sqlite_warning'] = settings.engine.dialect.name == 'sqlite' | |
extra_args['sequential_executor_warning'] = conf.get('core', 'executor') == 'SequentialExecutor' | |
line_chart_attr = { | |
'legend.maxKeyLength': 200, | |
} | |
def render_template(self, *args, **kwargs): | |
# Add triggerer_job only if we need it | |
if TriggererJob.is_needed(): | |
kwargs["triggerer_job"] = lazy_object_proxy.Proxy(TriggererJob.most_recent_job) | |
return super().render_template( | |
*args, | |
# Cache this at most once per request, not for the lifetime of the view instance | |
scheduler_job=lazy_object_proxy.Proxy(SchedulerJob.most_recent_job), | |
**kwargs, | |
) | |
def add_user_permissions_to_dag(sender, template, context, **extra): | |
""" | |
Adds `.can_edit`, `.can_trigger`, and `.can_delete` properties | |
to DAG based on current user's permissions. | |
Located in `views.py` rather than the DAG model to keep | |
permissions logic out of the Airflow core. | |
""" | |
if 'dag' in context: | |
dag = context['dag'] | |
can_create_dag_run = get_airflow_app().appbuilder.sm.has_access( | |
permissions.ACTION_CAN_CREATE, permissions.RESOURCE_DAG_RUN | |
) | |
dag.can_edit = get_airflow_app().appbuilder.sm.can_edit_dag(dag.dag_id) | |
dag.can_trigger = dag.can_edit and can_create_dag_run | |
dag.can_delete = get_airflow_app().appbuilder.sm.can_delete_dag(dag.dag_id) | |
context['dag'] = dag | |
before_render_template.connect(add_user_permissions_to_dag) | |
class Airflow(AirflowBaseView): | |
"""Main Airflow application.""" | |
@expose('/health') | |
def health(self): | |
""" | |
An endpoint helping check the health status of the Airflow instance, | |
including metadatabase and scheduler. | |
""" | |
payload = {'metadatabase': {'status': 'unhealthy'}} | |
latest_scheduler_heartbeat = None | |
scheduler_status = 'unhealthy' | |
payload['metadatabase'] = {'status': 'healthy'} | |
try: | |
scheduler_job = SchedulerJob.most_recent_job() | |
if scheduler_job: | |
latest_scheduler_heartbeat = scheduler_job.latest_heartbeat.isoformat() | |
if scheduler_job.is_alive(): | |
scheduler_status = 'healthy' | |
except Exception: | |
payload['metadatabase']['status'] = 'unhealthy' | |
payload['scheduler'] = { | |
'status': scheduler_status, | |
'latest_scheduler_heartbeat': latest_scheduler_heartbeat, | |
} | |
return wwwutils.json_response(payload) | |
@expose('/home') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_WEBSITE), | |
] | |
) | |
def index(self): | |
"""Home view.""" | |
hide_paused_dags_by_default = conf.getboolean('webserver', 'hide_paused_dags_by_default') | |
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number') | |
num_runs = request.args.get('num_runs', default=default_dag_run, type=int) | |
current_page = request.args.get('page', default=0, type=int) | |
arg_search_query = request.args.get('search') | |
arg_tags_filter = request.args.getlist('tags') | |
arg_status_filter = request.args.get('status') | |
arg_sorting_key = request.args.get('sorting_key', 'dag_id') | |
arg_sorting_direction = request.args.get('sorting_direction', default='asc') | |
if request.args.get('reset_tags') is not None: | |
flask_session[FILTER_TAGS_COOKIE] = None | |
# Remove the reset_tags=reset from the URL | |
return redirect(url_for('Airflow.index')) | |
cookie_val = flask_session.get(FILTER_TAGS_COOKIE) | |
if arg_tags_filter: | |
flask_session[FILTER_TAGS_COOKIE] = ','.join(arg_tags_filter) | |
elif cookie_val: | |
# If tags exist in cookie, but not URL, add them to the URL | |
return redirect(url_for('Airflow.index', tags=cookie_val.split(','))) | |
if arg_status_filter is None: | |
cookie_val = flask_session.get(FILTER_STATUS_COOKIE) | |
if cookie_val: | |
arg_status_filter = cookie_val | |
else: | |
arg_status_filter = 'active' if hide_paused_dags_by_default else 'all' | |
flask_session[FILTER_STATUS_COOKIE] = arg_status_filter | |
else: | |
status = arg_status_filter.strip().lower() | |
flask_session[FILTER_STATUS_COOKIE] = status | |
arg_status_filter = status | |
dags_per_page = PAGE_SIZE | |
start = current_page * dags_per_page | |
end = start + dags_per_page | |
# Get all the dag id the user could access | |
filter_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) | |
with create_session() as session: | |
# read orm_dags from the db | |
dags_query = session.query(DagModel).filter(~DagModel.is_subdag, DagModel.is_active) | |
if arg_search_query: | |
dags_query = dags_query.filter( | |
DagModel.dag_id.ilike('%' + arg_search_query + '%') | |
| DagModel.owners.ilike('%' + arg_search_query + '%') | |
) | |
if arg_tags_filter: | |
dags_query = dags_query.filter(DagModel.tags.any(DagTag.name.in_(arg_tags_filter))) | |
dags_query = dags_query.filter(DagModel.dag_id.in_(filter_dag_ids)) | |
all_dags = dags_query | |
active_dags = dags_query.filter(~DagModel.is_paused) | |
paused_dags = dags_query.filter(DagModel.is_paused) | |
is_paused_count = dict( | |
all_dags.with_entities(DagModel.is_paused, func.count(DagModel.dag_id)) | |
.group_by(DagModel.is_paused) | |
.all() | |
) | |
status_count_active = is_paused_count.get(False, 0) | |
status_count_paused = is_paused_count.get(True, 0) | |
all_dags_count = status_count_active + status_count_paused | |
if arg_status_filter == 'active': | |
current_dags = active_dags | |
num_of_all_dags = status_count_active | |
elif arg_status_filter == 'paused': | |
current_dags = paused_dags | |
num_of_all_dags = status_count_paused | |
else: | |
current_dags = all_dags | |
num_of_all_dags = all_dags_count | |
sort_column = DagModel.__table__.c.get(arg_sorting_key) | |
if sort_column is not None: | |
if arg_sorting_direction == 'desc': | |
sort_column = sort_column.desc() | |
current_dags = current_dags.order_by(sort_column) | |
dags = current_dags.options(joinedload(DagModel.tags)).offset(start).limit(dags_per_page).all() | |
user_permissions = g.user.perms | |
all_dags_editable = (permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG) in user_permissions | |
can_create_dag_run = ( | |
permissions.ACTION_CAN_CREATE, | |
permissions.RESOURCE_DAG_RUN, | |
) in user_permissions | |
all_dags_deletable = ( | |
permissions.ACTION_CAN_DELETE, | |
permissions.RESOURCE_DAG, | |
) in user_permissions | |
for dag in dags: | |
dag_resource_name = permissions.RESOURCE_DAG_PREFIX + dag.dag_id | |
if all_dags_editable: | |
dag.can_edit = True | |
else: | |
dag.can_edit = (permissions.ACTION_CAN_EDIT, dag_resource_name) in user_permissions | |
dag.can_trigger = dag.can_edit and can_create_dag_run | |
if all_dags_deletable: | |
dag.can_delete = True | |
else: | |
dag.can_delete = (permissions.ACTION_CAN_DELETE, dag_resource_name) in user_permissions | |
dagtags = session.query(DagTag.name).distinct(DagTag.name).all() | |
tags = [ | |
{"name": name, "selected": bool(arg_tags_filter and name in arg_tags_filter)} | |
for name, in dagtags | |
] | |
import_errors = session.query(errors.ImportError).order_by(errors.ImportError.id) | |
if (permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG) not in user_permissions: | |
# if the user doesn't have access to all DAGs, only display errors from visible DAGs | |
import_errors = import_errors.join( | |
DagModel, DagModel.fileloc == errors.ImportError.filename | |
).filter(DagModel.dag_id.in_(filter_dag_ids)) | |
for import_error in import_errors: | |
flash( | |
"Broken DAG: [{ie.filename}] {ie.stacktrace}".format(ie=import_error), | |
"dag_import_error", | |
) | |
from airflow.plugins_manager import import_errors as plugin_import_errors | |
for filename, stacktrace in plugin_import_errors.items(): | |
flash( | |
f"Broken plugin: [{filename}] {stacktrace}", | |
"error", | |
) | |
num_of_pages = int(math.ceil(num_of_all_dags / float(dags_per_page))) | |
state_color_mapping = State.state_color.copy() | |
state_color_mapping["null"] = state_color_mapping.pop(None) | |
page_title = conf.get(section="webserver", key="instance_name", fallback="DAGs") | |
page_title_has_markup = conf.getboolean( | |
section="webserver", key="instance_name_has_markup", fallback=False | |
) | |
dashboard_alerts = [ | |
fm for fm in settings.DASHBOARD_UIALERTS if fm.should_show(get_airflow_app().appbuilder.sm) | |
] | |
def _iter_parsed_moved_data_table_names(): | |
for table_name in inspect(session.get_bind()).get_table_names(): | |
segments = table_name.split("__", 3) | |
if len(segments) < 3: | |
continue | |
if segments[0] != settings.AIRFLOW_MOVED_TABLE_PREFIX: | |
continue | |
# Second segment is a version marker that we don't need to show. | |
yield segments[-1], table_name | |
if ( | |
permissions.ACTION_CAN_ACCESS_MENU, | |
permissions.RESOURCE_ADMIN_MENU, | |
) in user_permissions and conf.getboolean("webserver", "warn_deployment_exposure"): | |
robots_file_access_count = ( | |
session.query(Log) | |
.filter(Log.event == "robots") | |
.filter(Log.dttm > (utcnow() - timedelta(days=7))) | |
.count() | |
) | |
if robots_file_access_count > 0: | |
flash( | |
Markup( | |
'Recent requests have been made to /robots.txt. ' | |
'This indicates that this deployment may be accessible to the public internet. ' | |
'This warning can be disabled by setting webserver.warn_deployment_exposure=False in ' | |
'airflow.cfg. Read more about web deployment security <a href=' | |
f'"{get_docs_url("security/webserver.html")}">' | |
'here</a>' | |
), | |
"warning", | |
) | |
return self.render_template( | |
'airflow/dags.html', | |
dags=dags, | |
dashboard_alerts=dashboard_alerts, | |
migration_moved_data_alerts=sorted(set(_iter_parsed_moved_data_table_names())), | |
current_page=current_page, | |
search_query=arg_search_query if arg_search_query else '', | |
page_title=Markup(page_title) if page_title_has_markup else page_title, | |
page_size=dags_per_page, | |
num_of_pages=num_of_pages, | |
num_dag_from=min(start + 1, num_of_all_dags), | |
num_dag_to=min(end, num_of_all_dags), | |
num_of_all_dags=num_of_all_dags, | |
paging=wwwutils.generate_pages( | |
current_page, | |
num_of_pages, | |
search=escape(arg_search_query) if arg_search_query else None, | |
status=arg_status_filter if arg_status_filter else None, | |
tags=arg_tags_filter if arg_tags_filter else None, | |
), | |
num_runs=num_runs, | |
tags=tags, | |
state_color=state_color_mapping, | |
status_filter=arg_status_filter, | |
status_count_all=all_dags_count, | |
status_count_active=status_count_active, | |
status_count_paused=status_count_paused, | |
tags_filter=arg_tags_filter, | |
sorting_key=arg_sorting_key, | |
sorting_direction=arg_sorting_direction, | |
auto_refresh_interval=conf.getint('webserver', 'auto_refresh_interval'), | |
) | |
@expose('/dag_stats', methods=['POST']) | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), | |
] | |
) | |
@provide_session | |
def dag_stats(self, session=None): | |
"""Dag statistics.""" | |
dr = models.DagRun | |
allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) | |
dag_state_stats = session.query(dr.dag_id, dr.state, sqla.func.count(dr.state)).group_by( | |
dr.dag_id, dr.state | |
) | |
# Filter by post parameters | |
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id} | |
if selected_dag_ids: | |
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids) | |
else: | |
filter_dag_ids = allowed_dag_ids | |
if not filter_dag_ids: | |
return wwwutils.json_response({}) | |
payload = {} | |
dag_state_stats = dag_state_stats.filter(dr.dag_id.in_(filter_dag_ids)) | |
data = {} | |
for dag_id, state, count in dag_state_stats: | |
if dag_id not in data: | |
data[dag_id] = {} | |
data[dag_id][state] = count | |
for dag_id in filter_dag_ids: | |
payload[dag_id] = [] | |
for state in State.dag_states: | |
count = data.get(dag_id, {}).get(state, 0) | |
payload[dag_id].append({'state': state, 'count': count}) | |
return wwwutils.json_response(payload) | |
@expose('/task_stats', methods=['POST']) | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
] | |
) | |
@provide_session | |
def task_stats(self, session=None): | |
"""Task Statistics""" | |
allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) | |
if not allowed_dag_ids: | |
return wwwutils.json_response({}) | |
# Filter by post parameters | |
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id} | |
if selected_dag_ids: | |
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids) | |
else: | |
filter_dag_ids = allowed_dag_ids | |
running_dag_run_query_result = ( | |
session.query(DagRun.dag_id, DagRun.run_id) | |
.join(DagModel, DagModel.dag_id == DagRun.dag_id) | |
.filter(DagRun.state == State.RUNNING, DagModel.is_active) | |
) | |
running_dag_run_query_result = running_dag_run_query_result.filter(DagRun.dag_id.in_(filter_dag_ids)) | |
running_dag_run_query_result = running_dag_run_query_result.subquery('running_dag_run') | |
# Select all task_instances from active dag_runs. | |
running_task_instance_query_result = session.query( | |
TaskInstance.dag_id.label('dag_id'), | |
TaskInstance.state.label('state'), | |
sqla.literal(True).label('is_dag_running'), | |
).join( | |
running_dag_run_query_result, | |
and_( | |
running_dag_run_query_result.c.dag_id == TaskInstance.dag_id, | |
running_dag_run_query_result.c.run_id == TaskInstance.run_id, | |
), | |
) | |
if conf.getboolean('webserver', 'SHOW_RECENT_STATS_FOR_COMPLETED_RUNS', fallback=True): | |
last_dag_run = ( | |
session.query(DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('execution_date')) | |
.join(DagModel, DagModel.dag_id == DagRun.dag_id) | |
.filter(DagRun.state != State.RUNNING, DagModel.is_active) | |
.group_by(DagRun.dag_id) | |
) | |
last_dag_run = last_dag_run.filter(DagRun.dag_id.in_(filter_dag_ids)) | |
last_dag_run = last_dag_run.subquery('last_dag_run') | |
# Select all task_instances from active dag_runs. | |
# If no dag_run is active, return task instances from most recent dag_run. | |
last_task_instance_query_result = ( | |
session.query( | |
TaskInstance.dag_id.label('dag_id'), | |
TaskInstance.state.label('state'), | |
sqla.literal(False).label('is_dag_running'), | |
) | |
.join(TaskInstance.dag_run) | |
.join( | |
last_dag_run, | |
and_( | |
last_dag_run.c.dag_id == TaskInstance.dag_id, | |
last_dag_run.c.execution_date == DagRun.execution_date, | |
), | |
) | |
) | |
final_task_instance_query_result = union_all( | |
last_task_instance_query_result, running_task_instance_query_result | |
).alias('final_ti') | |
else: | |
final_task_instance_query_result = running_task_instance_query_result.subquery('final_ti') | |
qry = ( | |
session.query( | |
final_task_instance_query_result.c.dag_id, | |
final_task_instance_query_result.c.state, | |
final_task_instance_query_result.c.is_dag_running, | |
sqla.func.count(), | |
) | |
.group_by( | |
final_task_instance_query_result.c.dag_id, | |
final_task_instance_query_result.c.state, | |
final_task_instance_query_result.c.is_dag_running, | |
) | |
.order_by( | |
final_task_instance_query_result.c.dag_id, | |
final_task_instance_query_result.c.is_dag_running.desc(), | |
) | |
) | |
data = get_task_stats_from_query(qry) | |
payload = {} | |
for dag_id in filter_dag_ids: | |
payload[dag_id] = [] | |
for state in State.task_states: | |
count = data.get(dag_id, {}).get(state, 0) | |
payload[dag_id].append({'state': state, 'count': count}) | |
return wwwutils.json_response(payload) | |
@expose('/last_dagruns', methods=['POST']) | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), | |
] | |
) | |
@provide_session | |
def last_dagruns(self, session=None): | |
"""Last DAG runs""" | |
allowed_dag_ids = get_airflow_app().appbuilder.sm.get_accessible_dag_ids(g.user) | |
# Filter by post parameters | |
selected_dag_ids = {unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id} | |
if selected_dag_ids: | |
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids) | |
else: | |
filter_dag_ids = allowed_dag_ids | |
if not filter_dag_ids: | |
return wwwutils.json_response({}) | |
last_runs_subquery = ( | |
session.query( | |
DagRun.dag_id, | |
sqla.func.max(DagRun.execution_date).label("max_execution_date"), | |
) | |
.group_by(DagRun.dag_id) | |
.filter(DagRun.dag_id.in_(filter_dag_ids)) # Only include accessible/selected DAGs. | |
.subquery("last_runs") | |
) | |
query = session.query( | |
DagRun.dag_id, | |
DagRun.start_date, | |
DagRun.end_date, | |
DagRun.state, | |
DagRun.execution_date, | |
DagRun.data_interval_start, | |
DagRun.data_interval_end, | |
).join( | |
last_runs_subquery, | |
and_( | |
last_runs_subquery.c.dag_id == DagRun.dag_id, | |
last_runs_subquery.c.max_execution_date == DagRun.execution_date, | |
), | |
) | |
resp = { | |
r.dag_id.replace('.', '__dot__'): { | |
"dag_id": r.dag_id, | |
"state": r.state, | |
"execution_date": wwwutils.datetime_to_string(r.execution_date), | |
"start_date": wwwutils.datetime_to_string(r.start_date), | |
"end_date": wwwutils.datetime_to_string(r.end_date), | |
"data_interval_start": wwwutils.datetime_to_string(r.data_interval_start), | |
"data_interval_end": wwwutils.datetime_to_string(r.data_interval_end), | |
} | |
for r in query | |
} | |
return wwwutils.json_response(resp) | |
@expose('/code') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE), | |
] | |
) | |
def legacy_code(self): | |
"""Redirect from url param.""" | |
return redirect(url_for('Airflow.code', **request.args)) | |
@expose('/dags/<string:dag_id>/code') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_CODE), | |
] | |
) | |
@provide_session | |
def code(self, dag_id, session=None): | |
"""Dag Code.""" | |
all_errors = "" | |
dag_orm = None | |
try: | |
dag_orm = DagModel.get_dagmodel(dag_id, session=session) | |
code = DagCode.get_code_by_fileloc(dag_orm.fileloc) | |
html_code = Markup(highlight(code, lexers.PythonLexer(), HtmlFormatter(linenos=True))) | |
except Exception as e: | |
all_errors += ( | |
"Exception encountered during " | |
f"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{e}\n" | |
) | |
html_code = Markup('<p>Failed to load DAG file Code.</p><p>Details: {}</p>').format( | |
escape(all_errors) | |
) | |
wwwutils.check_import_errors(dag_orm.fileloc, session) | |
wwwutils.check_dag_warnings(dag_orm.dag_id, session) | |
return self.render_template( | |
'airflow/dag_code.html', | |
html_code=html_code, | |
dag=dag_orm, | |
dag_model=dag_orm, | |
title=dag_id, | |
root=request.args.get('root'), | |
wrapped=conf.getboolean('webserver', 'default_wrap'), | |
) | |
@expose('/dag_details') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), | |
] | |
) | |
def legacy_dag_details(self): | |
"""Redirect from url param.""" | |
return redirect(url_for('Airflow.dag_details', **request.args)) | |
@expose('/dags/<string:dag_id>/details') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG_RUN), | |
] | |
) | |
@provide_session | |
def dag_details(self, dag_id, session=None): | |
"""Get Dag details.""" | |
dag = get_airflow_app().dag_bag.get_dag(dag_id) | |
dag_model = DagModel.get_dagmodel(dag_id, session=session) | |
title = "DAG Details" | |
root = request.args.get('root', '') | |
wwwutils.check_import_errors(dag.fileloc, session) | |
wwwutils.check_dag_warnings(dag.dag_id, session) | |
states = ( | |
session.query(TaskInstance.state, sqla.func.count(TaskInstance.dag_id)) | |
.filter(TaskInstance.dag_id == dag_id) | |
.group_by(TaskInstance.state) | |
.all() | |
) | |
active_runs = models.DagRun.find(dag_id=dag_id, state=State.RUNNING, external_trigger=False) | |
tags = session.query(models.DagTag).filter(models.DagTag.dag_id == dag_id).all() | |
attrs_to_avoid = [ | |
"NUM_DAGS_PER_DAGRUN_QUERY", | |
"serialized_dag", | |
"tags", | |
"default_view", | |
"relative_fileloc", | |
"dag_id", | |
"description", | |
"max_active_runs", | |
"max_active_tasks", | |
"schedule_interval", | |
"owners", | |
"is_paused", | |
] | |
attrs_to_avoid.extend(wwwutils.get_attr_renderer().keys()) | |
dag_model_attrs: List[Tuple[str, Any]] = [ | |
(attr_name, attr) | |
for attr_name, attr in ( | |
(attr_name, getattr(dag_model, attr_name)) | |
for attr_name in dir(dag_model) | |
if not attr_name.startswith("_") and attr_name not in attrs_to_avoid | |
) | |
if not callable(attr) | |
] | |
return self.render_template( | |
'airflow/dag_details.html', | |
dag=dag, | |
title=title, | |
root=root, | |
states=states, | |
State=State, | |
active_runs=active_runs, | |
tags=tags, | |
dag_model_attrs=dag_model_attrs, | |
) | |
@expose('/rendered-templates') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
] | |
) | |
@action_logging | |
@provide_session | |
def rendered_templates(self, session): | |
"""Get rendered Dag.""" | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
map_index = request.args.get('map_index', -1, type=int) | |
execution_date = request.args.get('execution_date') | |
dttm = _safe_parse_datetime(execution_date) | |
form = DateTimeForm(data={'execution_date': dttm}) | |
root = request.args.get('root', '') | |
logging.info("Retrieving rendered templates.") | |
dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) | |
dag_run = dag.get_dagrun(execution_date=dttm, session=session) | |
raw_task = dag.get_task(task_id).prepare_for_execution() | |
ti: TaskInstance | |
if dag_run is None: | |
# No DAG run matching given logical date. This usually means this | |
# DAG has never been run. Task instance rendering does not really | |
# make sense in this situation, but "works" prior to AIP-39. This | |
# "fakes" a temporary DagRun-TaskInstance association (not saved to | |
# database) for presentation only. | |
ti = TaskInstance(raw_task, map_index=map_index) | |
ti.dag_run = DagRun(dag_id=dag_id, execution_date=dttm) | |
else: | |
ti = dag_run.get_task_instance(task_id=task_id, map_index=map_index, session=session) | |
ti.refresh_from_task(raw_task) | |
try: | |
ti.get_rendered_template_fields(session=session) | |
except AirflowException as e: | |
msg = "Error rendering template: " + escape(e) | |
if e.__cause__: | |
msg += Markup("<br><br>OriginalError: ") + escape(e.__cause__) | |
flash(msg, "error") | |
except Exception as e: | |
flash("Error rendering template: " + str(e), "error") | |
# Ensure we are rendering the unmapped operator. Unmapping should be | |
# done automatically if template fields are rendered successfully; this | |
# only matters if get_rendered_template_fields() raised an exception. | |
# The following rendering won't show useful values in this case anyway, | |
# but we'll display some quasi-meaingful field names. | |
task = ti.task.unmap() | |
title = "Rendered Template" | |
html_dict = {} | |
renderers = wwwutils.get_attr_renderer() | |
for template_field in task.template_fields: | |
content = getattr(task, template_field) | |
renderer = task.template_fields_renderers.get(template_field, template_field) | |
if renderer in renderers: | |
if isinstance(content, (dict, list)): | |
json_content = json.dumps(content, sort_keys=True, indent=4) | |
html_dict[template_field] = renderers[renderer](json_content) | |
else: | |
html_dict[template_field] = renderers[renderer](content) | |
else: | |
html_dict[template_field] = Markup("<pre><code>{}</pre></code>").format(pformat(content)) | |
if isinstance(content, dict): | |
if template_field == 'op_kwargs': | |
for key, value in content.items(): | |
renderer = task.template_fields_renderers.get(key, key) | |
if renderer in renderers: | |
html_dict['.'.join([template_field, key])] = renderers[renderer](value) | |
else: | |
html_dict['.'.join([template_field, key])] = Markup( | |
"<pre><code>{}</pre></code>" | |
).format(pformat(value)) | |
else: | |
for dict_keys in get_key_paths(content): | |
template_path = '.'.join((template_field, dict_keys)) | |
renderer = task.template_fields_renderers.get(template_path, template_path) | |
if renderer in renderers: | |
content_value = get_value_from_path(dict_keys, content) | |
html_dict[template_path] = renderers[renderer](content_value) | |
return self.render_template( | |
'airflow/ti_code.html', | |
html_dict=html_dict, | |
dag=dag, | |
task_id=task_id, | |
execution_date=execution_date, | |
map_index=map_index, | |
form=form, | |
root=root, | |
title=title, | |
) | |
@expose('/rendered-k8s') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
] | |
) | |
@action_logging | |
@provide_session | |
def rendered_k8s(self, session: Session = NEW_SESSION): | |
"""Get rendered k8s yaml.""" | |
if not settings.IS_K8S_OR_K8SCELERY_EXECUTOR: | |
abort(404) | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
if task_id is None: | |
logging.warning("Task id not passed in the request") | |
abort(400) | |
execution_date = request.args.get('execution_date') | |
dttm = _safe_parse_datetime(execution_date) | |
form = DateTimeForm(data={'execution_date': dttm}) | |
root = request.args.get('root', '') | |
map_index = request.args.get('map_index', -1, type=int) | |
logging.info("Retrieving rendered templates.") | |
dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) | |
task = dag.get_task(task_id) | |
dag_run = dag.get_dagrun(execution_date=dttm, session=session) | |
ti = dag_run.get_task_instance(task_id=task.task_id, map_index=map_index, session=session) | |
pod_spec = None | |
try: | |
pod_spec = ti.get_rendered_k8s_spec(session=session) | |
except AirflowException as e: | |
msg = "Error rendering Kubernetes POD Spec: " + escape(e) | |
if e.__cause__: | |
msg += Markup("<br><br>OriginalError: ") + escape(e.__cause__) | |
flash(msg, "error") | |
except Exception as e: | |
flash("Error rendering Kubernetes Pod Spec: " + str(e), "error") | |
title = "Rendered K8s Pod Spec" | |
html_dict = {} | |
renderers = wwwutils.get_attr_renderer() | |
if pod_spec: | |
content = yaml.dump(pod_spec) | |
content = renderers["yaml"](content) | |
else: | |
content = Markup("<pre><code>Error rendering Kubernetes POD Spec</pre></code>") | |
html_dict['k8s'] = content | |
return self.render_template( | |
'airflow/ti_code.html', | |
html_dict=html_dict, | |
dag=dag, | |
task_id=task_id, | |
execution_date=execution_date, | |
map_index=map_index, | |
form=form, | |
root=root, | |
title=title, | |
) | |
@expose('/get_logs_with_metadata') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG), | |
] | |
) | |
@action_logging | |
@provide_session | |
def get_logs_with_metadata(self, session=None): | |
"""Retrieve logs including metadata.""" | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
execution_date = request.args.get('execution_date') | |
map_index = request.args.get('map_index', -1, type=int) | |
try_number = request.args.get('try_number', type=int) | |
metadata = request.args.get('metadata', '{}') | |
response_format = request.args.get('format', 'json') | |
# Validate JSON metadata | |
try: | |
metadata = json.loads(metadata) | |
# metadata may be null | |
if not metadata: | |
metadata = {} | |
except json.decoder.JSONDecodeError: | |
error_message = "Invalid JSON metadata" | |
response = jsonify({"error": error_message}) | |
response.status_code = 400 | |
return response | |
# Convert string datetime into actual datetime | |
try: | |
execution_date = timezone.parse(execution_date) | |
except ValueError: | |
error_message = ( | |
f'Given execution date, {execution_date}, could not be identified as a date. ' | |
'Example date format: 2015-11-16T14:34:15+00:00' | |
) | |
response = jsonify({'error': error_message}) | |
response.status_code = 400 | |
return response | |
task_log_reader = TaskLogReader() | |
if not task_log_reader.supports_read: | |
return jsonify( | |
message="Task log handler does not support read logs.", | |
error=True, | |
metadata={"end_of_log": True}, | |
) | |
ti = ( | |
session.query(models.TaskInstance) | |
.filter_by(dag_id=dag_id, task_id=task_id, execution_date=execution_date, map_index=map_index) | |
.first() | |
) | |
if ti is None: | |
return jsonify( | |
message="*** Task instance did not exist in the DB\n", | |
error=True, | |
metadata={"end_of_log": True}, | |
) | |
try: | |
dag = get_airflow_app().dag_bag.get_dag(dag_id) | |
if dag: | |
ti.task = dag.get_task(ti.task_id) | |
if response_format == 'json': | |
logs, metadata = task_log_reader.read_log_chunks(ti, try_number, metadata) | |
message = logs[0] if try_number is not None else logs | |
return jsonify(message=message, metadata=metadata) | |
metadata['download_logs'] = True | |
attachment_filename = task_log_reader.render_log_filename(ti, try_number, session=session) | |
log_stream = task_log_reader.read_log_stream(ti, try_number, metadata) | |
return Response( | |
response=log_stream, | |
mimetype="text/plain", | |
headers={"Content-Disposition": f"attachment; filename={attachment_filename}"}, | |
) | |
except AttributeError as e: | |
error_message = [f"Task log handler does not support read logs.\n{str(e)}\n"] | |
metadata['end_of_log'] = True | |
return jsonify(message=error_message, error=True, metadata=metadata) | |
@expose('/log') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG), | |
] | |
) | |
@action_logging | |
@provide_session | |
def log(self, session=None): | |
"""Retrieve log.""" | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
map_index = request.args.get('map_index', -1, type=int) | |
execution_date = request.args.get('execution_date') | |
if execution_date: | |
dttm = _safe_parse_datetime(execution_date) | |
else: | |
dttm = None | |
form = DateTimeForm(data={'execution_date': dttm}) | |
dag_model = DagModel.get_dagmodel(dag_id) | |
ti = ( | |
session.query(models.TaskInstance) | |
.filter_by(dag_id=dag_id, task_id=task_id, execution_date=dttm, map_index=map_index) | |
.first() | |
) | |
num_logs = 0 | |
if ti is not None: | |
num_logs = ti.next_try_number - 1 | |
if ti.state in (State.UP_FOR_RESCHEDULE, State.DEFERRED): | |
# Tasks in reschedule state decremented the try number | |
num_logs += 1 | |
logs = [''] * num_logs | |
root = request.args.get('root', '') | |
return self.render_template( | |
'airflow/ti_log.html', | |
logs=logs, | |
dag=dag_model, | |
title="Log by attempts", | |
dag_id=dag_id, | |
task_id=task_id, | |
execution_date=execution_date, | |
map_index=map_index, | |
form=form, | |
root=root, | |
wrapped=conf.getboolean('webserver', 'default_wrap'), | |
) | |
@expose('/redirect_to_external_log') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_LOG), | |
] | |
) | |
@action_logging | |
@provide_session | |
def redirect_to_external_log(self, session=None): | |
"""Redirects to external log.""" | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
execution_date = request.args.get('execution_date') | |
dttm = _safe_parse_datetime(execution_date) | |
map_index = request.args.get('map_index', -1, type=int) | |
try_number = request.args.get('try_number', 1) | |
ti = ( | |
session.query(models.TaskInstance) | |
.filter_by(dag_id=dag_id, task_id=task_id, execution_date=dttm, map_index=map_index) | |
.first() | |
) | |
if not ti: | |
flash(f"Task [{dag_id}.{task_id}] does not exist", "error") | |
return redirect(url_for('Airflow.index')) | |
task_log_reader = TaskLogReader() | |
if not task_log_reader.supports_external_link: | |
flash("Task log handler does not support external links", "error") | |
return redirect(url_for('Airflow.index')) | |
handler = task_log_reader.log_handler | |
url = handler.get_external_log_url(ti, try_number) | |
return redirect(url) | |
@expose('/task') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
] | |
) | |
@action_logging | |
@provide_session | |
def task(self, session): | |
"""Retrieve task.""" | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
execution_date = request.args.get('execution_date') | |
dttm = _safe_parse_datetime(execution_date) | |
map_index = request.args.get('map_index', -1, type=int) | |
form = DateTimeForm(data={'execution_date': dttm}) | |
root = request.args.get('root', '') | |
dag = get_airflow_app().dag_bag.get_dag(dag_id) | |
if not dag or task_id not in dag.task_ids: | |
flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error") | |
return redirect(url_for('Airflow.index')) | |
task = copy.copy(dag.get_task(task_id)) | |
task.resolve_template_files() | |
ti: Optional[TaskInstance] = ( | |
session.query(TaskInstance) | |
.options( | |
# HACK: Eager-load relationships. This is needed because | |
# multiple properties mis-use provide_session() that destroys | |
# the session object ti is bounded to. | |
joinedload(TaskInstance.queued_by_job, innerjoin=False), | |
joinedload(TaskInstance.trigger, innerjoin=False), | |
) | |
.filter_by(execution_date=dttm, dag_id=dag_id, task_id=task_id, map_index=map_index) | |
.one_or_none() | |
) | |
if ti is None: | |
ti_attrs: Optional[List[Tuple[str, Any]]] = None | |
else: | |
ti.refresh_from_task(task) | |
ti_attrs_to_skip = [ | |
'dag_id', | |
'key', | |
'mark_success_url', | |
'log', | |
'log_url', | |
'task', | |
] | |
# Some fields on TI are deprecated, but we don't want those warnings here. | |
with warnings.catch_warnings(): | |
warnings.simplefilter("ignore", DeprecationWarning) | |
all_ti_attrs = ( | |
(name, getattr(ti, name)) | |
for name in dir(ti) | |
if not name.startswith("_") and name not in ti_attrs_to_skip | |
) | |
ti_attrs = sorted((name, attr) for name, attr in all_ti_attrs if not callable(attr)) | |
attr_renderers = wwwutils.get_attr_renderer() | |
attrs_to_skip = getattr(task, 'HIDE_ATTRS_FROM_UI', set()) | |
def include_task_attrs(attr_name): | |
return not ( | |
attr_name == 'HIDE_ATTRS_FROM_UI' | |
or attr_name.startswith("_") | |
or attr_name in attr_renderers | |
or attr_name in attrs_to_skip | |
) | |
task_attrs = [ | |
(attr_name, attr) | |
for attr_name, attr in ( | |
(attr_name, getattr(task, attr_name)) for attr_name in filter(include_task_attrs, dir(task)) | |
) | |
if not callable(attr) | |
] | |
# Color coding the special attributes that are code | |
special_attrs_rendered = { | |
attr_name: renderer(getattr(task, attr_name)) | |
for attr_name, renderer in attr_renderers.items() | |
if hasattr(task, attr_name) | |
} | |
no_failed_deps_result = [ | |
( | |
"Unknown", | |
"All dependencies are met but the task instance is not running. In most " | |
"cases this just means that the task will probably be scheduled soon " | |
"unless:<br>\n- The scheduler is down or under heavy load<br>\n{}\n" | |
"<br>\nIf this task instance does not start soon please contact your " | |
"Airflow administrator for assistance.".format( | |
"- This task instance already ran and had it's state changed manually " | |
"(e.g. cleared in the UI)<br>" | |
if ti and ti.state == State.NONE | |
else "" | |
), | |
) | |
] | |
# Use the scheduler's context to figure out which dependencies are not met | |
if ti is None: | |
failed_dep_reasons: List[Tuple[str, str]] = [] | |
else: | |
dep_context = DepContext(SCHEDULER_QUEUED_DEPS) | |
failed_dep_reasons = [ | |
(dep.dep_name, dep.reason) for dep in ti.get_failed_dep_statuses(dep_context=dep_context) | |
] | |
title = "Task Instance Details" | |
return self.render_template( | |
'airflow/task.html', | |
task_attrs=task_attrs, | |
ti_attrs=ti_attrs, | |
failed_dep_reasons=failed_dep_reasons or no_failed_deps_result, | |
task_id=task_id, | |
execution_date=execution_date, | |
map_index=map_index, | |
special_attrs_rendered=special_attrs_rendered, | |
form=form, | |
root=root, | |
dag=dag, | |
title=title, | |
) | |
@expose('/xcom') | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_TASK_INSTANCE), | |
(permissions.ACTION_CAN_READ, permissions.RESOURCE_XCOM), | |
] | |
) | |
@action_logging | |
@provide_session | |
def xcom(self, session=None): | |
"""Retrieve XCOM.""" | |
dag_id = request.args.get('dag_id') | |
task_id = request.args.get('task_id') | |
map_index = request.args.get('map_index', -1, type=int) | |
# Carrying execution_date through, even though it's irrelevant for | |
# this context | |
execution_date = request.args.get('execution_date') | |
dttm = _safe_parse_datetime(execution_date) | |
form = DateTimeForm(data={'execution_date': dttm}) | |
root = request.args.get('root', '') | |
dag = DagModel.get_dagmodel(dag_id) | |
ti = session.query(TaskInstance).filter_by(dag_id=dag_id, task_id=task_id).first() | |
if not ti: | |
flash(f"Task [{dag_id}.{task_id}] doesn't seem to exist at the moment", "error") | |
return redirect(url_for('Airflow.index')) | |
xcomlist = ( | |
session.query(XCom) | |
.filter_by(dag_id=dag_id, task_id=task_id, execution_date=dttm, map_index=map_index) | |
.all() | |
) | |
attributes = [] | |
for xcom in xcomlist: | |
if not xcom.key.startswith('_'): | |
attributes.append((xcom.key, xcom.value)) | |
title = "XCom" | |
return self.render_template( | |
'airflow/xcom.html', | |
attributes=attributes, | |
task_id=task_id, | |
execution_date=execution_date, | |
map_index=map_index, | |
form=form, | |
root=root, | |
dag=dag, | |
title=title, | |
) | |
@expose('/run', methods=['POST']) | |
@auth.has_access( | |
[ | |
(permissions.ACTION_CAN_EDIT, permissions.RESOURCE_DAG), | |
(permissions.ACTION_CAN_CREATE, permissions.RESOURCE_TASK_INSTANCE), | |
] | |
) | |
@action_logging | |
@provide_session | |
def run(self, session=None): | |
"""Runs Task Instance.""" | |
dag_id = request.form.get('dag_id') | |
task_id = request.form.get('task_id') | |
dag_run_id = request.form.get('dag_run_id') | |
map_index = request.args.get('map_index', -1, type=int) | |
origin = get_safe_url(request.form.get('origin')) | |
dag: DAG = get_airflow_app().dag_bag.get_dag(dag_id) | |
task = dag.get_task(task_id) | |
ignore_all_deps = request.form.get('ignore_all_deps') == "true" | |
ignore_task_deps = request.form.get('ignore_task_deps') == "true" | |
ignore_ti_state = request.form.get('ignore_ti_state') == "true" | |
executor = ExecutorLoader.get_default_executor() | |
if not getattr(executor, "supports_ad_hoc_ti_run", False): | |
msg = "Only works with the Celery, CeleryKubernetes or Kubernetes executors" | |
return redirect_or_json(origin, msg, "error", 400) | |
dag_run = dag.get_dagrun(run_id=dag_run_id) | |
ti = dag_run.get_task_instance(task_id=task.task_id, map_index=map_index) | |
if not ti: | |
msg = "Could not queue task instance for execution, task instance is missing" | |
return redirect_or_json(origin, msg, "error", 400) | |
ti.refresh_from_task(task) | |
# Make sure the task instance can be run | |
dep_context = DepContext( | |
deps=RUNNING_DEPS, | |
ignore_all_deps=ignore_all_deps, | |
ignore_task_deps=ignore_task_deps, | |
ignore_ti_state=ignore_ti_state, | |
) | |
failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context)) | |
if failed_deps: | |
failed_deps_str = ", ".join(f"{dep.dep_name}: {dep.reason}" for dep in |