Skip to content

Commit

Permalink
Remove references from the code to Jira Issues (#37807)
Browse files Browse the repository at this point in the history
* Remove references from the code to Jira Issues

* slightly change _parse_netloc_to_hostname description
  • Loading branch information
Taragolis committed Mar 1, 2024
1 parent 119f1e8 commit b610c58
Show file tree
Hide file tree
Showing 13 changed files with 25 additions and 32 deletions.
7 changes: 3 additions & 4 deletions airflow/executors/base_executor.py
Expand Up @@ -164,10 +164,6 @@ def queue_task_instance(
"""Queues task instance."""
pool = pool or task_instance.pool

# TODO (edgarRd): AIRFLOW-1985:
# cfg_path is needed to propagate the config values if using impersonation
# (run_as_user), given that there are different code paths running tasks.
# For a long term solution we need to address AIRFLOW-1986
command_list_to_run = task_instance.command_as_list(
local=True,
mark_success=mark_success,
Expand All @@ -178,6 +174,9 @@ def queue_task_instance(
ignore_ti_state=ignore_ti_state,
pool=pool,
pickle_id=pickle_id,
# cfg_path is needed to propagate the config values if using impersonation
# (run_as_user), given that there are different code paths running tasks.
# https://github.com/apache/airflow/pull/2991
cfg_path=cfg_path,
)
self.log.debug("created command %s", command_list_to_run)
Expand Down
Expand Up @@ -37,8 +37,8 @@

def upgrade():
"""
Increase column size from 50 to 256 characters, closing AIRFLOW-4737 caused
by broker backends that might use unusually large queue names.
Increase column size from 50 to 256 characters,
caused by broker backends that might use unusually large queue names.
"""
# use batch_alter_table to support SQLite workaround
with op.batch_alter_table("task_instance") as batch_op:
Expand Down
11 changes: 8 additions & 3 deletions airflow/models/connection.py
Expand Up @@ -79,10 +79,15 @@ def sanitize_conn_id(conn_id: str | None, max_length=CONN_ID_MAX_LEN) -> str | N
return res.group(0)


# Python automatically converts all letters to lowercase in hostname
# See: https://issues.apache.org/jira/browse/AIRFLOW-3615
def _parse_netloc_to_hostname(uri_parts):
"""Parse a URI string to get the correct Hostname."""
"""
Parse a URI string to get the correct Hostname.
``urlparse(...).hostname`` or ``urlsplit(...).hostname`` returns value into the lowercase in most cases,
there are some exclusion exists for specific cases such as https://bugs.python.org/issue32323
In case if expected to get a path as part of hostname path,
then default behavior ``urlparse``/``urlsplit`` is unexpected.
"""
hostname = unquote(uri_parts.hostname or "")
if "/" in hostname:
hostname = uri_parts.netloc
Expand Down
1 change: 0 additions & 1 deletion airflow/models/taskinstance.py
Expand Up @@ -1479,7 +1479,6 @@ def prev_attempted_tries(self) -> int:
Expose this for the Task Tries and Gantt graph views.
Using `try_number` throws off the counts for non-running tasks.
Also useful in error logging contexts to get the try number for the last try that was attempted.
https://issues.apache.org/jira/browse/AIRFLOW-2143
"""
return self._try_number

Expand Down
7 changes: 2 additions & 5 deletions airflow/providers/apache/hive/hooks/hive.py
Expand Up @@ -478,8 +478,7 @@ def load_file(
pvals = ", ".join(f"{k}='{v}'" for k, v in partition.items())
hql += f"PARTITION ({pvals})"

# As a workaround for HIVE-10541, add a newline character
# at the end of hql (AIRFLOW-2412).
# Add a newline character as a workaround for https://issues.apache.org/jira/browse/HIVE-10541,
hql += ";\n"

self.log.info(hql)
Expand Down Expand Up @@ -907,10 +906,8 @@ def _get_results(
with contextlib.closing(self.get_conn(schema)) as conn, contextlib.closing(conn.cursor()) as cur:
cur.arraysize = fetch_size or 1000

# not all query services (e.g. impala AIRFLOW-4434) support the set command

db = self.get_connection(self.hiveserver2_conn_id) # type: ignore

# Not all query services (e.g. impala) support the set command
if db.extra_dejson.get("run_set_variable_statements", True):
env_context = get_context_from_env_var()
if hive_conf:
Expand Down
2 changes: 1 addition & 1 deletion airflow/timetables/simple.py
Expand Up @@ -103,7 +103,7 @@ def next_dagrun_info(
return None
# "@once" always schedule to the start_date determined by the DAG and
# tasks, regardless of catchup or not. This has been the case since 1.10
# and we're inheriting it. See AIRFLOW-1928.
# and we're inheriting it.
run_after = restriction.earliest
if restriction.latest is not None and run_after > restriction.latest:
return None
Expand Down
5 changes: 2 additions & 3 deletions airflow/utils/sqlalchemy.py
Expand Up @@ -72,9 +72,8 @@ def process_bind_param(self, value, dialect):
elif value.tzinfo is None:
raise ValueError("naive datetime is disallowed")
elif dialect.name == "mysql":
# For mysql we should store timestamps as naive values
# In MySQL 5.7 inserting timezone value fails with 'invalid-date'
# See https://issues.apache.org/jira/browse/AIRFLOW-7001
# For mysql versions prior 8.0.19 we should send timestamps as naive values in UTC
# see: https://dev.mysql.com/doc/refman/8.0/en/date-and-time-literals.html
return make_naive(value, timezone=utc)
return value.astimezone(utc)

Expand Down
1 change: 0 additions & 1 deletion airflow/www/utils.py
Expand Up @@ -655,7 +655,6 @@ def get_chart_height(dag):
Without this the charts are tiny and unreadable when DAGs have a large number of tasks).
Ideally nvd3 should allow for dynamic-height charts, that is charts that take up space
based on the size of the components within.
TODO(aoen): See [AIRFLOW-1263].
"""
return 600 + len(dag.tasks) * 10

Expand Down
2 changes: 1 addition & 1 deletion docs/apache-airflow/img/airflow_erd.sha256
@@ -1 +1 @@
ed425ca6e30d06e1a1f64b9fd30043db7fb3e3a0196c57a77a4f1edc6a1d8ada
8cf665c41c065c9368adf2e96450e8cc111dc0653bfabdee977fd6e4964f5646
4 changes: 2 additions & 2 deletions docs/apache-airflow/img/airflow_erd.svg
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion tests/dags_corrupted/test_impersonation_custom.py
Expand Up @@ -19,7 +19,7 @@

from datetime import datetime

# AIRFLOW-1893 - Originally, impersonation tests were incomplete missing the use case when
# Originally, impersonation tests were incomplete missing the use case when
# DAGs access custom packages usually made available through the PYTHONPATH environment
# variable. This file includes a DAG that imports a custom package made available and if
# run via the previous implementation of impersonation, will fail by not being able to
Expand Down
7 changes: 2 additions & 5 deletions tests/models/test_taskinstance.py
Expand Up @@ -1612,12 +1612,9 @@ def test_xcom_pull_after_success(self, create_task_instance):
ti.xcom_push(key=key, value=value)
assert ti.xcom_pull(task_ids="test_xcom", key=key) == value
ti.run()
# The second run and assert is to handle AIRFLOW-131 (don't clear on
# prior success)
# Check that we do not clear Xcom until the task is certain to execute
assert ti.xcom_pull(task_ids="test_xcom", key=key) == value

# Test AIRFLOW-703: Xcom shouldn't be cleared if the task doesn't
# execute, even if dependencies are ignored
# Xcom shouldn't be cleared if the task doesn't execute, even if dependencies are ignored
ti.run(ignore_all_deps=True, mark_success=True)
assert ti.xcom_pull(task_ids="test_xcom", key=key) == value
# Xcom IS finally cleared once task has executed
Expand Down
4 changes: 1 addition & 3 deletions tests/www/views/test_views_tasks.py
Expand Up @@ -599,9 +599,7 @@ def new_id_example_bash_operator():


def test_delete_dag_button_for_dag_on_scheduler_only(admin_client, new_id_example_bash_operator):
# Test for JIRA AIRFLOW-3233 (PR 4069):
# The delete-dag URL should be generated correctly for DAGs
# that exist on the scheduler (DB) but not the webserver DagBag
# The delete-dag URL should be generated correctly
test_dag_id = new_id_example_bash_operator
resp = admin_client.get("/", follow_redirects=True)
check_content_in_response(f"/delete?dag_id={test_dag_id}", resp)
Expand Down

0 comments on commit b610c58

Please sign in to comment.