From 5d61580c572118ed97b9ff32d7e3684be1fcb755 Mon Sep 17 00:00:00 2001 From: Kaxil Naik Date: Sun, 21 Jun 2020 15:44:27 +0100 Subject: [PATCH] Enable 'Public function Missing Docstrings' PyDocStyle Check (#9463) --- .pre-commit-config.yaml | 2 +- airflow/configuration.py | 23 +++++++++++-------- airflow/example_dags/libs/helper.py | 2 +- airflow/lineage/entities.py | 2 +- airflow/logging_config.py | 2 ++ .../versions/03bc53e68815_add_sm_dag_index.py | 4 ++-- .../versions/05f30312d566_merge_heads.py | 4 ++-- .../0a2a5b66e19d_add_task_reschedule_table.py | 10 ++++---- .../0e2a74e0fc9f_add_time_zone_awareness.py | 4 ++-- ...add_dag_id_state_index_on_dag_run_table.py | 4 ++-- .../13eb55f81627_for_compatibility.py | 4 ++-- .../1507a7289a2f_create_is_encrypted.py | 4 ++-- ...e3_add_is_encrypted_column_to_variable_.py | 4 ++-- .../versions/1b38cef5b76e_add_dagrun.py | 4 ++-- .../211e584da130_add_ti_state_index.py | 4 ++-- ...24_add_executor_config_to_task_instance.py | 4 ++-- .../versions/2e541a1dcfed_task_duration.py | 4 ++-- .../2e82aab8ef20_rename_user_table.py | 4 ++-- ...0f54d61_more_logging_into_task_isntance.py | 4 ++-- ...4_add_kubernetes_resource_checkpointing.py | 4 ++-- .../versions/40e67319e3a9_dagrun_config.py | 4 ++-- .../41f5f12752f8_add_superuser_field.py | 4 ++-- .../versions/4446e08588_dagrun_start_end.py | 4 ++-- ..._add_fractional_seconds_to_mysql_tables.py | 4 ++-- .../versions/4ebbffe0a39a_merge_heads.py | 4 ++-- .../502898887f84_adding_extra_to_log.py | 4 ++-- .../versions/52d714495f0_job_id_indices.py | 4 ++-- ...61833c1c74b_add_password_column_to_user.py | 4 ++-- ...d17757c7a_add_pid_field_to_taskinstance.py | 2 ++ ...de9cddf6c9_add_task_fails_journal_table.py | 4 ++-- .../8504051e801b_xcom_dag_task_indices.py | 2 ++ .../856955da8476_fix_sqlite_foreign_key.py | 3 ++- ...5c0_add_kubernetes_scheduler_uniqueness.py | 4 ++-- ...c8_task_reschedule_fk_on_cascade_delete.py | 4 ++-- .../947454bf1dff_add_ti_job_id_index.py | 4 ++-- .../952da73b5eff_add_dag_code_table.py | 1 + .../versions/9635ae0956e7_index_faskfail.py | 4 ++-- ...b_add_pool_slots_field_to_task_instance.py | 4 ++-- .../a56c9515abdc_remove_dag_stat_table.py | 2 ++ .../versions/b0125267960b_merge_heads.py | 4 ++-- ...6_add_a_column_to_track_the_encryption_.py | 4 ++-- ...dd_notification_sent_column_to_sla_miss.py | 4 ++-- ...6_make_xcom_value_column_a_large_binary.py | 4 ++-- .../bf00311e1990_add_index_to_taskinstance.py | 4 ++-- .../c8ffec048a3b_add_fields_to_dag.py | 4 ++-- ...7_add_max_tries_column_to_task_instance.py | 4 ++-- .../cf5dc11e79ad_drop_user_and_chart.py | 4 ++-- ...ae31099d61_increase_text_size_for_mysql.py | 4 ++-- .../versions/dd25f486b8ea_add_idx_log_dag.py | 4 ++-- ...4ecb8fbee3_add_schedule_interval_to_dag.py | 4 ++-- .../versions/e3a246e0dc1_current_schema.py | 4 ++-- ...433877c24_fix_mysql_not_null_constraint.py | 4 ++-- .../f2ca10b85618_add_dag_stats_table.py | 4 ++-- airflow/models/connection.py | 1 + .../example_dags/example_docker_copy_data.py | 1 + airflow/stats.py | 7 ++++++ airflow/www/utils.py | 22 +++++++++++++----- airflow/www/views.py | 3 +++ 58 files changed, 144 insertions(+), 109 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 231eb6318779d..293577aea12a0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -182,7 +182,7 @@ metastore_browser/templates/.*\\.html$|.*\\.jinja2" name: Run pydocstyle args: - --convention=pep257 - - --add-ignore=D100,D102,D103,D104,D105,D106,D107,D200,D202,D204,D205,D207,D208,D210,D400,D401,D403 + - --add-ignore=D100,D102,D104,D105,D106,D107,D200,D202,D204,D205,D207,D208,D210,D400,D401,D403 exclude: ^tests/.*\.py$|^scripts/.*\.py$|^dev|^backport_packages|^kubernetes_tests - repo: local hooks: diff --git a/airflow/configuration.py b/airflow/configuration.py index dcf4e1fb0c17b..d933cdd9deb37 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -590,10 +590,12 @@ def _warn_deprecate(section, key, deprecated_section, deprecated_name): def get_airflow_home(): + """Get path to Airflow Home""" return expand_env_var(os.environ.get('AIRFLOW_HOME', '~/airflow')) def get_airflow_config(airflow_home): + """Get Path to airflow.cfg path""" if 'AIRFLOW_CONFIG' not in os.environ: return os.path.join(airflow_home, 'airflow.cfg') return expand_env_var(os.environ['AIRFLOW_CONFIG']) @@ -641,6 +643,7 @@ def parameterized_config(template): def get_airflow_test_config(airflow_home): + """Get path to unittests.cfg""" if 'AIRFLOW_TEST_CONFIG' not in os.environ: return os.path.join(airflow_home, 'unittests.cfg') return expand_env_var(os.environ['AIRFLOW_TEST_CONFIG']) @@ -714,7 +717,7 @@ def get_airflow_test_config(airflow_home): # Historical convenience functions to access config entries -def load_test_config(): +def load_test_config(): # noqa: D103 warnings.warn( "Accessing configuration method 'load_test_config' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -725,7 +728,7 @@ def load_test_config(): conf.load_test_config() -def get(*args, **kwargs): +def get(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'get' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -736,7 +739,7 @@ def get(*args, **kwargs): return conf.get(*args, **kwargs) -def getboolean(*args, **kwargs): +def getboolean(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'getboolean' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -747,7 +750,7 @@ def getboolean(*args, **kwargs): return conf.getboolean(*args, **kwargs) -def getfloat(*args, **kwargs): +def getfloat(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'getfloat' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -758,7 +761,7 @@ def getfloat(*args, **kwargs): return conf.getfloat(*args, **kwargs) -def getint(*args, **kwargs): +def getint(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'getint' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -769,7 +772,7 @@ def getint(*args, **kwargs): return conf.getint(*args, **kwargs) -def getsection(*args, **kwargs): +def getsection(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'getsection' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -780,7 +783,7 @@ def getsection(*args, **kwargs): return conf.getint(*args, **kwargs) -def has_option(*args, **kwargs): +def has_option(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'has_option' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -791,7 +794,7 @@ def has_option(*args, **kwargs): return conf.has_option(*args, **kwargs) -def remove_option(*args, **kwargs): +def remove_option(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'remove_option' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -802,7 +805,7 @@ def remove_option(*args, **kwargs): return conf.remove_option(*args, **kwargs) -def as_dict(*args, **kwargs): +def as_dict(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'as_dict' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " @@ -813,7 +816,7 @@ def as_dict(*args, **kwargs): return conf.as_dict(*args, **kwargs) -def set(*args, **kwargs): +def set(*args, **kwargs): # noqa: D103 warnings.warn( "Accessing configuration method 'set' directly from the configuration module is " "deprecated. Please access the configuration from the 'configuration.conf' object via " diff --git a/airflow/example_dags/libs/helper.py b/airflow/example_dags/libs/helper.py index e4ba8c06b261d..f80194e056920 100644 --- a/airflow/example_dags/libs/helper.py +++ b/airflow/example_dags/libs/helper.py @@ -18,5 +18,5 @@ # pylint: disable=missing-docstring -def print_stuff(): +def print_stuff(): # noqa: D103 print("annotated!") diff --git a/airflow/lineage/entities.py b/airflow/lineage/entities.py index 5575ad328e314..96a8507b1022f 100644 --- a/airflow/lineage/entities.py +++ b/airflow/lineage/entities.py @@ -83,7 +83,7 @@ class Column: # https://github.com/python/mypy/issues/6136 is resolved, use # `attr.converters.default_if_none(default=False)` # pylint: disable=missing-docstring -def default_if_none(arg: Optional[bool]) -> bool: +def default_if_none(arg: Optional[bool]) -> bool: # noqa: D103 return arg or False diff --git a/airflow/logging_config.py b/airflow/logging_config.py index 5af7c4abd14de..c49cb5ff287e5 100644 --- a/airflow/logging_config.py +++ b/airflow/logging_config.py @@ -28,6 +28,7 @@ def configure_logging(): + """Configure & Validate Airflow Logging""" logging_class_path = '' try: logging_class_path = conf.get('logging', 'logging_config_class') @@ -72,6 +73,7 @@ def configure_logging(): def validate_logging_config(logging_config): + """Validate the provided Logging Config""" # Now lets validate the other logging-related settings task_log_reader = conf.get('logging', 'task_log_reader') diff --git a/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py b/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py index fc8468155c235..f3e7330ac11e3 100644 --- a/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py +++ b/airflow/migrations/versions/03bc53e68815_add_sm_dag_index.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('sm_dag', 'sla_miss', ['dag_id'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('sm_dag', table_name='sla_miss') diff --git a/airflow/migrations/versions/05f30312d566_merge_heads.py b/airflow/migrations/versions/05f30312d566_merge_heads.py index 36940c67d8605..68e7dbd3a0943 100644 --- a/airflow/migrations/versions/05f30312d566_merge_heads.py +++ b/airflow/migrations/versions/05f30312d566_merge_heads.py @@ -30,9 +30,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 pass -def downgrade(): +def downgrade(): # noqa: D103 pass diff --git a/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py b/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py index 6ada398506097..378c62dfde65e 100644 --- a/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py +++ b/airflow/migrations/versions/0a2a5b66e19d_add_task_reschedule_table.py @@ -41,19 +41,19 @@ # For Microsoft SQL Server, TIMESTAMP is a row-id type, # having nothing to do with date-time. DateTime() will # be sufficient. -def mssql_timestamp(): +def mssql_timestamp(): # noqa: D103 return sa.DateTime() -def mysql_timestamp(): +def mysql_timestamp(): # noqa: D103 return mysql.TIMESTAMP(fsp=6) -def sa_timestamp(): +def sa_timestamp(): # noqa: D103 return sa.TIMESTAMP(timezone=True) -def upgrade(): +def upgrade(): # noqa: D103 # See 0e2a74e0fc9f_add_time_zone_awareness conn = op.get_bind() if conn.dialect.name == 'mysql': @@ -89,6 +89,6 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index(INDEX_NAME, table_name=TABLE_NAME) op.drop_table(TABLE_NAME) diff --git a/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py b/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py index 3bbb7a46380a4..daa6a36f1ea6d 100644 --- a/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py +++ b/airflow/migrations/versions/0e2a74e0fc9f_add_time_zone_awareness.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 conn = op.get_bind() if conn.dialect.name == "mysql": conn.execute("SET time_zone = '+00:00'") @@ -305,7 +305,7 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 conn = op.get_bind() if conn.dialect.name == "mysql": conn.execute("SET time_zone = '+00:00'") diff --git a/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py b/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py index 288a0b60aa821..df79a9512c965 100644 --- a/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py +++ b/airflow/migrations/versions/127d2bf2dfa7_add_dag_id_state_index_on_dag_run_table.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('dag_id_state', 'dag_run', ['dag_id', 'state'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('dag_id_state', table_name='dag_run') diff --git a/airflow/migrations/versions/13eb55f81627_for_compatibility.py b/airflow/migrations/versions/13eb55f81627_for_compatibility.py index a15b20c6c6ad8..ec43294be6111 100644 --- a/airflow/migrations/versions/13eb55f81627_for_compatibility.py +++ b/airflow/migrations/versions/13eb55f81627_for_compatibility.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 pass -def downgrade(): +def downgrade(): # noqa: D103 pass diff --git a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py index 882d7ce3007a4..876f79f63a15b 100644 --- a/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py +++ b/airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py @@ -41,7 +41,7 @@ ) -def upgrade(): +def upgrade(): # noqa: D103 # first check if the user already has this done. This should only be # true for users who are upgrading from a previous version of Airflow # that predates Alembic integration @@ -65,5 +65,5 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('connection', 'is_encrypted') diff --git a/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py b/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py index 3e7c694eb018a..d9b4fe015932e 100644 --- a/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py +++ b/airflow/migrations/versions/1968acfc09e3_add_is_encrypted_column_to_variable_.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('variable', sa.Column('is_encrypted', sa.Boolean, default=False)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('variable', 'is_encrypted') diff --git a/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py b/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py index dc839e5507124..8edcbb4d59618 100644 --- a/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py +++ b/airflow/migrations/versions/1b38cef5b76e_add_dagrun.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_table('dag_run', sa.Column('id', sa.Integer(), nullable=False), sa.Column('dag_id', sa.String(length=250), nullable=True), @@ -47,5 +47,5 @@ def upgrade(): sa.UniqueConstraint('dag_id', 'run_id')) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_table('dag_run') diff --git a/airflow/migrations/versions/211e584da130_add_ti_state_index.py b/airflow/migrations/versions/211e584da130_add_ti_state_index.py index 86c2e7de3ef75..a6f946332118b 100644 --- a/airflow/migrations/versions/211e584da130_add_ti_state_index.py +++ b/airflow/migrations/versions/211e584da130_add_ti_state_index.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('ti_state', 'task_instance', ['state'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('ti_state', table_name='task_instance') diff --git a/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py b/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py index c02a1f5242eac..768a67fe529bd 100644 --- a/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py +++ b/airflow/migrations/versions/27c6a30d7c24_add_executor_config_to_task_instance.py @@ -38,9 +38,9 @@ NEW_COLUMN = "executor_config" -def upgrade(): +def upgrade(): # noqa: D103 op.add_column(TASK_INSTANCE_TABLE, sa.Column(NEW_COLUMN, sa.PickleType(pickler=dill))) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column(TASK_INSTANCE_TABLE, NEW_COLUMN) diff --git a/airflow/migrations/versions/2e541a1dcfed_task_duration.py b/airflow/migrations/versions/2e541a1dcfed_task_duration.py index d31a8d98c5bad..b071f1c4b8d1e 100644 --- a/airflow/migrations/versions/2e541a1dcfed_task_duration.py +++ b/airflow/migrations/versions/2e541a1dcfed_task_duration.py @@ -35,7 +35,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 # use batch_alter_table to support SQLite workaround with op.batch_alter_table("task_instance") as batch_op: batch_op.alter_column('duration', @@ -44,5 +44,5 @@ def upgrade(): existing_nullable=True) -def downgrade(): +def downgrade(): # noqa: D103 pass diff --git a/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py b/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py index cf461c0323031..0acc346d9abb4 100644 --- a/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py +++ b/airflow/migrations/versions/2e82aab8ef20_rename_user_table.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.rename_table('user', 'users') -def downgrade(): +def downgrade(): # noqa: D103 op.rename_table('users', 'user') diff --git a/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py b/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py index b8ab2908c404b..50245a3a8cb83 100644 --- a/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py +++ b/airflow/migrations/versions/338e90f54d61_more_logging_into_task_isntance.py @@ -33,11 +33,11 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('task_instance', sa.Column('operator', sa.String(length=1000), nullable=True)) op.add_column('task_instance', sa.Column('queued_dttm', sa.DateTime(), nullable=True)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('task_instance', 'queued_dttm') op.drop_column('task_instance', 'operator') diff --git a/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py b/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py index dfaa4c51a73a7..d22d87864a859 100644 --- a/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py +++ b/airflow/migrations/versions/33ae817a1ff4_add_kubernetes_resource_checkpointing.py @@ -35,7 +35,7 @@ RESOURCE_TABLE = "kube_resource_version" -def upgrade(): +def upgrade(): # noqa: D103 columns_and_constraints = [ sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True), sa.Column("resource_version", sa.String(255)) @@ -63,5 +63,5 @@ def upgrade(): ]) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_table(RESOURCE_TABLE) diff --git a/airflow/migrations/versions/40e67319e3a9_dagrun_config.py b/airflow/migrations/versions/40e67319e3a9_dagrun_config.py index 7ce183827fbe8..d123f8e383d55 100644 --- a/airflow/migrations/versions/40e67319e3a9_dagrun_config.py +++ b/airflow/migrations/versions/40e67319e3a9_dagrun_config.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('dag_run', sa.Column('conf', sa.PickleType(), nullable=True)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('dag_run', 'conf') diff --git a/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py b/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py index 8b3e9b4a41de5..a7d5767f99bd1 100644 --- a/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py +++ b/airflow/migrations/versions/41f5f12752f8_add_superuser_field.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('users', sa.Column('superuser', sa.Boolean(), default=False)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('users', 'superuser') diff --git a/airflow/migrations/versions/4446e08588_dagrun_start_end.py b/airflow/migrations/versions/4446e08588_dagrun_start_end.py index 48c85d5c94b70..ec20c807b7a03 100644 --- a/airflow/migrations/versions/4446e08588_dagrun_start_end.py +++ b/airflow/migrations/versions/4446e08588_dagrun_start_end.py @@ -34,11 +34,11 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('dag_run', sa.Column('end_date', sa.DateTime(), nullable=True)) op.add_column('dag_run', sa.Column('start_date', sa.DateTime(), nullable=True)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('dag_run', 'start_date') op.drop_column('dag_run', 'end_date') diff --git a/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py b/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py index 551c0beb40b6d..20d77ce037286 100644 --- a/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py +++ b/airflow/migrations/versions/4addfa1236f1_add_fractional_seconds_to_mysql_tables.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 if context.config.get_main_option('sqlalchemy.url').startswith('mysql'): op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME(fsp=6)) @@ -97,7 +97,7 @@ def upgrade(): type_=mysql.DATETIME(fsp=6)) -def downgrade(): +def downgrade(): # noqa: D103 if context.config.get_main_option('sqlalchemy.url').startswith('mysql'): op.alter_column(table_name='dag', column_name='last_scheduler_run', type_=mysql.DATETIME()) diff --git a/airflow/migrations/versions/4ebbffe0a39a_merge_heads.py b/airflow/migrations/versions/4ebbffe0a39a_merge_heads.py index 951d7581d664c..28bc60716f507 100644 --- a/airflow/migrations/versions/4ebbffe0a39a_merge_heads.py +++ b/airflow/migrations/versions/4ebbffe0a39a_merge_heads.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 pass -def downgrade(): +def downgrade(): # noqa: D103 pass diff --git a/airflow/migrations/versions/502898887f84_adding_extra_to_log.py b/airflow/migrations/versions/502898887f84_adding_extra_to_log.py index 1bf5f0d3924ae..606b6b8b5a1be 100644 --- a/airflow/migrations/versions/502898887f84_adding_extra_to_log.py +++ b/airflow/migrations/versions/502898887f84_adding_extra_to_log.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('log', sa.Column('extra', sa.Text(), nullable=True)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('log', 'extra') diff --git a/airflow/migrations/versions/52d714495f0_job_id_indices.py b/airflow/migrations/versions/52d714495f0_job_id_indices.py index b6bdfbb851d97..2006271a48cee 100644 --- a/airflow/migrations/versions/52d714495f0_job_id_indices.py +++ b/airflow/migrations/versions/52d714495f0_job_id_indices.py @@ -32,10 +32,10 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('idx_job_state_heartbeat', 'job', ['state', 'latest_heartbeat'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('idx_job_state_heartbeat', table_name='job') diff --git a/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py b/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py index 5a83b113b8ea8..bbf835c5cca62 100644 --- a/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py +++ b/airflow/migrations/versions/561833c1c74b_add_password_column_to_user.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('user', sa.Column('password', sa.String(255))) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('user', 'password') diff --git a/airflow/migrations/versions/5e7d17757c7a_add_pid_field_to_taskinstance.py b/airflow/migrations/versions/5e7d17757c7a_add_pid_field_to_taskinstance.py index 062ec93aa6f71..7078f8132be25 100644 --- a/airflow/migrations/versions/5e7d17757c7a_add_pid_field_to_taskinstance.py +++ b/airflow/migrations/versions/5e7d17757c7a_add_pid_field_to_taskinstance.py @@ -35,8 +35,10 @@ def upgrade(): + """Add pid column to task_instance table.""" op.add_column('task_instance', sa.Column('pid', sa.Integer)) def downgrade(): + """Drop pid column from task_instance table.""" op.drop_column('task_instance', 'pid') diff --git a/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py b/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py index 9d93736eb2190..d798a7b67b36e 100644 --- a/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py +++ b/airflow/migrations/versions/64de9cddf6c9_add_task_fails_journal_table.py @@ -35,7 +35,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_table( 'task_fail', sa.Column('id', sa.Integer(), nullable=False), @@ -49,5 +49,5 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_table('task_fail') diff --git a/airflow/migrations/versions/8504051e801b_xcom_dag_task_indices.py b/airflow/migrations/versions/8504051e801b_xcom_dag_task_indices.py index 20d8111cc205e..7eadb56000137 100644 --- a/airflow/migrations/versions/8504051e801b_xcom_dag_task_indices.py +++ b/airflow/migrations/versions/8504051e801b_xcom_dag_task_indices.py @@ -34,9 +34,11 @@ def upgrade(): + """Create Index.""" op.create_index('idx_xcom_dag_task_date', 'xcom', ['dag_id', 'task_id', 'execution_date'], unique=False) def downgrade(): + """Drop Index.""" op.drop_index('idx_xcom_dag_task_date', table_name='xcom') diff --git a/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py b/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py index 1cb84f7480678..9b804ce9c9243 100644 --- a/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py +++ b/airflow/migrations/versions/856955da8476_fix_sqlite_foreign_key.py @@ -34,6 +34,7 @@ def upgrade(): + """Fix broken foreign-key constraint for existing SQLite DBs.""" conn = op.get_bind() if conn.dialect.name == 'sqlite': # Fix broken foreign-key constraint for existing SQLite DBs. @@ -65,6 +66,6 @@ def upgrade(): ['user_id'], ['id']) -def downgrade(): +def downgrade(): # noqa: D103 # Downgrade would fail because the broken FK constraint can't be re-created. pass diff --git a/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py b/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py index 520c505f55b2b..b5928fdcd82ea 100644 --- a/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py +++ b/airflow/migrations/versions/86770d1215c0_add_kubernetes_scheduler_uniqueness.py @@ -35,7 +35,7 @@ RESOURCE_TABLE = "kube_worker_uuid" -def upgrade(): +def upgrade(): # noqa: D103 columns_and_constraints = [ sa.Column("one_row_id", sa.Boolean, server_default=sa.true(), primary_key=True), @@ -64,5 +64,5 @@ def upgrade(): ]) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_table(RESOURCE_TABLE) diff --git a/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py b/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py index 547e55ae35698..797c1f3e66bba 100644 --- a/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py +++ b/airflow/migrations/versions/939bb1e647c8_task_reschedule_fk_on_cascade_delete.py @@ -32,7 +32,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 with op.batch_alter_table('task_reschedule') as batch_op: batch_op.drop_constraint( 'task_reschedule_dag_task_date_fkey', @@ -47,7 +47,7 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 with op.batch_alter_table('task_reschedule') as batch_op: batch_op.drop_constraint( 'task_reschedule_dag_task_date_fkey', diff --git a/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py b/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py index e7c948e66afdc..b18224e6a6158 100644 --- a/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py +++ b/airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py @@ -32,9 +32,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('ti_job_id', 'task_instance', ['job_id'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('ti_job_id', table_name='task_instance') diff --git a/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py b/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py index 81d6ec9fdf833..7875015c6cba6 100644 --- a/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py +++ b/airflow/migrations/versions/952da73b5eff_add_dag_code_table.py @@ -37,6 +37,7 @@ def upgrade(): + """Create DagCode Table.""" from sqlalchemy.ext.declarative import declarative_base Base = declarative_base() diff --git a/airflow/migrations/versions/9635ae0956e7_index_faskfail.py b/airflow/migrations/versions/9635ae0956e7_index_faskfail.py index 3de600f4bb3dc..9508170dcf0a8 100644 --- a/airflow/migrations/versions/9635ae0956e7_index_faskfail.py +++ b/airflow/migrations/versions/9635ae0956e7_index_faskfail.py @@ -31,11 +31,11 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('idx_task_fail_dag_task_date', 'task_fail', ['dag_id', 'task_id', 'execution_date'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('idx_task_fail_dag_task_date', table_name='task_fail') diff --git a/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py b/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py index 4dd825e8b7de4..1b742e4245649 100644 --- a/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py +++ b/airflow/migrations/versions/a4c2fd67d16b_add_pool_slots_field_to_task_instance.py @@ -34,9 +34,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('task_instance', sa.Column('pool_slots', sa.Integer, default=1)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('task_instance', 'pool_slots') diff --git a/airflow/migrations/versions/a56c9515abdc_remove_dag_stat_table.py b/airflow/migrations/versions/a56c9515abdc_remove_dag_stat_table.py index a8c95d7cf7dba..05adcae8b93ff 100644 --- a/airflow/migrations/versions/a56c9515abdc_remove_dag_stat_table.py +++ b/airflow/migrations/versions/a56c9515abdc_remove_dag_stat_table.py @@ -35,10 +35,12 @@ def upgrade(): + """Drop dag_stats table""" op.drop_table("dag_stats") def downgrade(): + """Create dag_stats table""" op.create_table('dag_stats', sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('state', sa.String(length=50), nullable=False), diff --git a/airflow/migrations/versions/b0125267960b_merge_heads.py b/airflow/migrations/versions/b0125267960b_merge_heads.py index ee91e236c9c00..d0f546e065b04 100644 --- a/airflow/migrations/versions/b0125267960b_merge_heads.py +++ b/airflow/migrations/versions/b0125267960b_merge_heads.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 pass -def downgrade(): +def downgrade(): # noqa: D103 pass diff --git a/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py b/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py index 22d1f0d045e6a..b03b42c61d7cf 100644 --- a/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py +++ b/airflow/migrations/versions/bba5a7cfc896_add_a_column_to_track_the_encryption_.py @@ -34,10 +34,10 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('connection', sa.Column('is_extra_encrypted', sa.Boolean, default=False)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('connection', 'is_extra_encrypted') diff --git a/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py b/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py index 4c6393836bfff..f5d94f208ace2 100644 --- a/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py +++ b/airflow/migrations/versions/bbc73705a13e_add_notification_sent_column_to_sla_miss.py @@ -33,9 +33,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('sla_miss', sa.Column('notification_sent', sa.Boolean, default=False)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('sla_miss', 'notification_sent') diff --git a/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py b/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py index d69d24aed6e91..1bb0cdcd21dc9 100644 --- a/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py +++ b/airflow/migrations/versions/bdaa763e6c56_make_xcom_value_column_a_large_binary.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 # There can be data truncation here as LargeBinary can be smaller than the pickle # type. # use batch_alter_table to support SQLite workaround @@ -42,7 +42,7 @@ def upgrade(): batch_op.alter_column('value', type_=sa.LargeBinary()) -def downgrade(): +def downgrade(): # noqa: D103 # use batch_alter_table to support SQLite workaround with op.batch_alter_table("xcom") as batch_op: batch_op.alter_column('value', type_=sa.PickleType(pickler=dill)) diff --git a/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py b/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py index 528bd53b366e5..d03868241ed36 100644 --- a/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py +++ b/airflow/migrations/versions/bf00311e1990_add_index_to_taskinstance.py @@ -33,7 +33,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index( 'ti_dag_date', 'task_instance', @@ -42,5 +42,5 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('ti_dag_date', table_name='task_instance') diff --git a/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py b/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py index ff02efbf9f1a0..bd0453a07dd37 100644 --- a/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py +++ b/airflow/migrations/versions/c8ffec048a3b_add_fields_to_dag.py @@ -34,11 +34,11 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('dag', sa.Column('description', sa.Text(), nullable=True)) op.add_column('dag', sa.Column('default_view', sa.String(25), nullable=True)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('dag', 'description') op.drop_column('dag', 'default_view') diff --git a/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py b/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py index 133631d8051be..83c15a9e7abed 100644 --- a/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py +++ b/airflow/migrations/versions/cc1e65623dc7_add_max_tries_column_to_task_instance.py @@ -54,7 +54,7 @@ class TaskInstance(Base): # noqa: D101 try_number = Column(Integer, default=0) -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('task_instance', sa.Column('max_tries', sa.Integer, server_default="-1")) # Check if table task_instance exist before data migration. This check is # needed for database that does not create table until migration finishes. @@ -100,7 +100,7 @@ def upgrade(): session.commit() -def downgrade(): +def downgrade(): # noqa: D103 engine = settings.engine if engine.dialect.has_table(engine, 'task_instance'): connection = op.get_bind() diff --git a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py index 3c40f77ea6eb9..1a48f3629142a 100644 --- a/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py +++ b/airflow/migrations/versions/cf5dc11e79ad_drop_user_and_chart.py @@ -34,7 +34,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 # We previously had a KnownEvent's table, but we deleted the table without # a down migration to remove it (so we didn't delete anyone's data if they # were happing to use the feature. @@ -51,7 +51,7 @@ def upgrade(): op.drop_table("users") -def downgrade(): +def downgrade(): # noqa: D103 conn = op.get_bind() op.create_table( diff --git a/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py b/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py index fb198d0557faf..bb6f2a54f902b 100644 --- a/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py +++ b/airflow/migrations/versions/d2ae31099d61_increase_text_size_for_mysql.py @@ -33,11 +33,11 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 if context.config.get_main_option('sqlalchemy.url').startswith('mysql'): op.alter_column(table_name='variable', column_name='val', type_=mysql.MEDIUMTEXT) -def downgrade(): +def downgrade(): # noqa: D103 if context.config.get_main_option('sqlalchemy.url').startswith('mysql'): op.alter_column(table_name='variable', column_name='val', type_=mysql.TEXT) diff --git a/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py b/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py index 560b763963dd6..c3530945d8269 100644 --- a/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py +++ b/airflow/migrations/versions/dd25f486b8ea_add_idx_log_dag.py @@ -31,9 +31,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_index('idx_log_dag', 'log', ['dag_id'], unique=False) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_index('idx_log_dag', table_name='log') diff --git a/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py b/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py index 776527e16633b..ed335d2e3508b 100644 --- a/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py +++ b/airflow/migrations/versions/dd4ecb8fbee3_add_schedule_interval_to_dag.py @@ -34,9 +34,9 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.add_column('dag', sa.Column('schedule_interval', sa.Text(), nullable=True)) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_column('dag', 'schedule_interval') diff --git a/airflow/migrations/versions/e3a246e0dc1_current_schema.py b/airflow/migrations/versions/e3a246e0dc1_current_schema.py index a7cd8648f0f76..dfa4f5818c69f 100644 --- a/airflow/migrations/versions/e3a246e0dc1_current_schema.py +++ b/airflow/migrations/versions/e3a246e0dc1_current_schema.py @@ -38,7 +38,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 conn = op.get_bind() inspector = Inspector.from_engine(conn) tables = inspector.get_table_names() @@ -240,7 +240,7 @@ def upgrade(): ) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_table('chart') op.drop_table('variable') op.drop_table('user') diff --git a/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py b/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py index 77424ec76505c..f9725d38e4118 100644 --- a/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py +++ b/airflow/migrations/versions/f23433877c24_fix_mysql_not_null_constraint.py @@ -32,7 +32,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 conn = op.get_bind() if conn.dialect.name == 'mysql': conn.execute("SET time_zone = '+00:00'") @@ -41,7 +41,7 @@ def upgrade(): op.alter_column('xcom', 'timestamp', existing_type=mysql.TIMESTAMP(fsp=6), nullable=False) -def downgrade(): +def downgrade(): # noqa: D103 conn = op.get_bind() if conn.dialect.name == 'mysql': conn.execute("SET time_zone = '+00:00'") diff --git a/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py b/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py index e3679830bc8ad..7ab2fe943726f 100644 --- a/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py +++ b/airflow/migrations/versions/f2ca10b85618_add_dag_stats_table.py @@ -33,7 +33,7 @@ depends_on = None -def upgrade(): +def upgrade(): # noqa: D103 op.create_table('dag_stats', sa.Column('dag_id', sa.String(length=250), nullable=False), sa.Column('state', sa.String(length=50), nullable=False), @@ -42,5 +42,5 @@ def upgrade(): sa.PrimaryKeyConstraint('dag_id', 'state')) -def downgrade(): +def downgrade(): # noqa: D103 op.drop_table('dag_stats') diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 6a59d564435f8..e8951dd344add 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -89,6 +89,7 @@ # Python automatically converts all letters to lowercase in hostname # See: https://issues.apache.org/jira/browse/AIRFLOW-3615 def parse_netloc_to_hostname(uri_parts): + """Parse a URI string to get correct Hostname.""" hostname = unquote(uri_parts.hostname or '') if '/' in hostname: hostname = uri_parts.netloc diff --git a/airflow/providers/docker/example_dags/example_docker_copy_data.py b/airflow/providers/docker/example_dags/example_docker_copy_data.py index e3a61269161ad..0ae47eaf61474 100644 --- a/airflow/providers/docker/example_dags/example_docker_copy_data.py +++ b/airflow/providers/docker/example_dags/example_docker_copy_data.py @@ -61,6 +61,7 @@ def is_data_available(*args, **kwargs): + """Return True if data exists in XCom table for view_file task, false otherwise.""" ti = kwargs["ti"] data = ti.xcom_pull(key=None, task_ids="view_file") return not data == "" diff --git a/airflow/stats.py b/airflow/stats.py index c3c0d5f33cc5c..feabc40e1dbee 100644 --- a/airflow/stats.py +++ b/airflow/stats.py @@ -71,6 +71,9 @@ def timing(cls, stat, dt): def stat_name_default_handler(stat_name, max_length=250) -> str: + """A function that validate the statsd stat name, apply changes to the stat name + if necessary and return the transformed stat name. + """ if not isinstance(stat_name, str): raise InvalidStatsNameException('The stat_name has to be a string') if len(stat_name) > max_length: @@ -87,10 +90,14 @@ def stat_name_default_handler(stat_name, max_length=250) -> str: def get_current_handle_stat_name_func() -> Callable[[str], str]: + """Get Stat Name Handler from airflow.cfg""" return conf.getimport('scheduler', 'stat_name_handler') or stat_name_default_handler def validate_stat(fn): + """Check if stat name contains invalid characters. + Log and not emit stats if name is invalid + """ @wraps(fn) def wrapper(_self, stat, *args, **kwargs): try: diff --git a/airflow/www/utils.py b/airflow/www/utils.py index 14c565d5e5442..a65260e67e0ec 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -51,6 +51,7 @@ def get_sensitive_variables_fields(): + """Get comma-separated sensitive Variable Fields from airflow.cfg.""" sensitive_fields = set(DEFAULT_SENSITIVE_VARIABLE_FIELDS) sensitive_variable_fields = conf.get('admin', 'sensitive_variable_fields') if sensitive_variable_fields: @@ -59,6 +60,7 @@ def get_sensitive_variables_fields(): def should_hide_value_for_key(key_name): + """Returns True if hide_sensitive_variable_fields is True, else False """ # It is possible via importing variables from file that a key is empty. if key_name: config_set = conf.getboolean('admin', 'hide_sensitive_variable_fields') @@ -69,6 +71,7 @@ def should_hide_value_for_key(key_name): def get_params(**kwargs): + """Return URL-encoded params""" return urlencode({d: v for d, v in kwargs.items() if v is not None}) @@ -205,6 +208,7 @@ def make_cache_key(*args, **kwargs): def task_instance_link(attr): + """Generates a URL to the Graph View for a TaskInstance.""" dag_id = attr.get('dag_id') task_id = attr.get('task_id') execution_date = attr.get('execution_date') @@ -231,6 +235,7 @@ def task_instance_link(attr): def state_token(state): + """Returns a formatted string with HTML for a given State""" color = State.color(state) return Markup( '' @@ -238,11 +243,13 @@ def state_token(state): def state_f(attr): + """Gets 'state' & returns a formatted string with HTML for a given State""" state = attr.get('state') return state_token(state) def nobr_f(attr_name): + """Returns a formatted string with HTML with a Non-breaking Text element""" def nobr(attr): f = attr.get(attr_name) return Markup("{}").format(f) @@ -250,6 +257,7 @@ def nobr(attr): def datetime_f(attr_name): + """Returns a formatted string with HTML for given DataTime""" def dt(attr): f = attr.get(attr_name) as_iso = f.isoformat() if f else '' @@ -264,6 +272,7 @@ def dt(attr): def dag_link(attr): + """Generates a URL to the Graph View for a Dag.""" dag_id = attr.get('dag_id') execution_date = attr.get('execution_date') url = url_for( @@ -275,6 +284,7 @@ def dag_link(attr): def dag_run_link(attr): + """Generates a URL to the Graph View for a DagRun.""" dag_id = attr.get('dag_id') run_id = attr.get('run_id') execution_date = attr.get('execution_date') @@ -288,14 +298,12 @@ def dag_run_link(attr): def pygment_html_render(s, lexer=lexers.TextLexer): - return highlight( - s, - lexer(), - HtmlFormatter(linenos=True), - ) + """Highlight text using a given Lexer""" + return highlight(s, lexer(), HtmlFormatter(linenos=True)) def render(obj, lexer): + """Render a given Python object with a given Pygments lexer""" out = "" if isinstance(obj, str): out = Markup(pygment_html_render(obj, lexer)) @@ -311,6 +319,7 @@ def render(obj, lexer): def wrapped_markdown(s, css_class=None): + """Convert a Markdown string to HTML.""" if s is None: return None @@ -320,6 +329,7 @@ def wrapped_markdown(s, css_class=None): def get_attr_renderer(): + """Return Dictionary containing different Pygements Lexers for Rendering & Highlighting""" return { 'bash_command': lambda x: render(x, lexers.BashLexer), 'hql': lambda x: render(x, lexers.SqlLexer), @@ -333,7 +343,7 @@ def get_attr_renderer(): } -def recurse_tasks(tasks, task_ids, dag_ids, task_id_to_dag): +def recurse_tasks(tasks, task_ids, dag_ids, task_id_to_dag): # noqa: D103 if isinstance(tasks, list): for task in tasks: recurse_tasks(task, task_ids, dag_ids, task_id_to_dag) diff --git a/airflow/www/views.py b/airflow/www/views.py index f00773568033d..3fdf4f78c145b 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -83,6 +83,7 @@ def get_date_time_num_runs_dag_runs_form_data(request, session, dag): + """Get Execution Data, Base Date & Number of runs from a Request """ dttm = request.args.get('execution_date') if dttm: dttm = timezone.parse(dttm) @@ -139,6 +140,7 @@ def get_date_time_num_runs_dag_runs_form_data(request, session, dag): ###################################################################################### def circles(error): + """Show Circles on screen for any error in the Webserver""" return render_template( 'airflow/circles.html', hostname=socket.getfqdn() if conf.getboolean( 'webserver', @@ -147,6 +149,7 @@ def circles(error): def show_traceback(error): + """Show Traceback for a given error""" from airflow.utils import asciiart as ascii_ return render_template( 'airflow/traceback.html',