diff --git a/airflow/providers/amazon/aws/hooks/batch_client.py b/airflow/providers/amazon/aws/hooks/batch_client.py index 1f9917e86bf2e..624869a06b128 100644 --- a/airflow/providers/amazon/aws/hooks/batch_client.py +++ b/airflow/providers/amazon/aws/hooks/batch_client.py @@ -419,7 +419,7 @@ def get_job_awslogs_info(self, job_id: str) -> dict[str, str] | None: return None if len(all_info) > 1: self.log.warning( - f"AWS Batch job ({job_id}) has more than one log stream, " f"only returning the first one." + f"AWS Batch job ({job_id}) has more than one log stream, only returning the first one." ) return all_info[0] diff --git a/airflow/providers/amazon/aws/operators/batch.py b/airflow/providers/amazon/aws/operators/batch.py index 6acd2fd3a93de..272122d1093b9 100644 --- a/airflow/providers/amazon/aws/operators/batch.py +++ b/airflow/providers/amazon/aws/operators/batch.py @@ -314,7 +314,7 @@ def monitor_job(self, context: Context): if len(awslogs) > 1: # there can be several log streams on multi-node jobs self.log.warning( - "out of all those logs, we can only link to one in the UI. " "Using the first one." + "out of all those logs, we can only link to one in the UI. Using the first one." ) CloudWatchEventsLink.persist( diff --git a/airflow/providers/openlineage/plugins/adapter.py b/airflow/providers/openlineage/plugins/adapter.py index b74ac0c481b5a..1cb7ccf84bdf6 100644 --- a/airflow/providers/openlineage/plugins/adapter.py +++ b/airflow/providers/openlineage/plugins/adapter.py @@ -52,7 +52,7 @@ "openlineage", "namespace", fallback=os.getenv("OPENLINEAGE_NAMESPACE", _DAG_DEFAULT_NAMESPACE) ) -_PRODUCER = f"https://github.com/apache/airflow/tree/providers-openlineage/" f"{OPENLINEAGE_PROVIDER_VERSION}" +_PRODUCER = f"https://github.com/apache/airflow/tree/providers-openlineage/{OPENLINEAGE_PROVIDER_VERSION}" set_producer(_PRODUCER) diff --git a/airflow/providers/openlineage/utils/utils.py b/airflow/providers/openlineage/utils/utils.py index 2a180a59733b9..84ad41e23701f 100644 --- a/airflow/providers/openlineage/utils/utils.py +++ b/airflow/providers/openlineage/utils/utils.py @@ -367,7 +367,7 @@ def _redact(self, item: Redactable, name: str | None, depth: int, max_depth: int return super()._redact(item, name, depth, max_depth) except Exception as e: log.warning( - "Unable to redact %s" "Error was: %s: %s", + "Unable to redact %s. Error was: %s: %s", repr(item), type(e).__name__, str(e), diff --git a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py index 8af5360284f0f..c9bf21589ed7d 100644 --- a/dev/breeze/src/airflow_breeze/commands/release_management_commands.py +++ b/dev/breeze/src/airflow_breeze/commands/release_management_commands.py @@ -608,7 +608,7 @@ def install_provider_packages( package_format=package_format, install_selected_providers=install_selected_providers ) get_console().print( - f"[info]Splitting {len(list_of_all_providers)} " f"providers into max {parallelism} chunks" + f"[info]Splitting {len(list_of_all_providers)} providers into max {parallelism} chunks" ) provider_chunks = [sorted(list_of_all_providers[i::parallelism]) for i in range(parallelism)] # filter out empty ones diff --git a/pyproject.toml b/pyproject.toml index 0568ed62f9674..b479dc7ac02df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,6 +46,8 @@ extend-select = [ "UP", # Pyupgrade "RUF100", # Unused noqa (auto-fixable) + # impocit single-line string concatenation + "ISC001", # We ignore more pydocstyle than we enable, so be more selective at what we enable "D101", "D106", diff --git a/tests/providers/google/cloud/triggers/test_gcs.py b/tests/providers/google/cloud/triggers/test_gcs.py index 9a83cf473a9b3..f40173b5748f1 100644 --- a/tests/providers/google/cloud/triggers/test_gcs.py +++ b/tests/providers/google/cloud/triggers/test_gcs.py @@ -160,7 +160,7 @@ def test_gcs_prefix_blob_trigger_serialization(self): @pytest.mark.asyncio @async_mock.patch( - "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix" + "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix" ) async def test_gcs_prefix_blob_trigger_success(self, mock_list_blobs_with_prefixs): """ @@ -177,7 +177,7 @@ async def test_gcs_prefix_blob_trigger_success(self, mock_list_blobs_with_prefix @pytest.mark.asyncio @async_mock.patch( - "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix" + "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix" ) async def test_gcs_prefix_blob_trigger_exception(self, mock_list_blobs_with_prefixs): """ @@ -191,7 +191,7 @@ async def test_gcs_prefix_blob_trigger_exception(self, mock_list_blobs_with_pref @pytest.mark.asyncio @async_mock.patch( - "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix" + "airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix" ) async def test_gcs_prefix_blob_trigger_pending(self, mock_list_blobs_with_prefixs): """ diff --git a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py index 73001ae21bfc2..4dfe1e1e7cfd3 100644 --- a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py +++ b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py @@ -113,7 +113,7 @@ def test_wasb_read(self, mock_hook_cls, ti): [ ( "localhost", - "*** Found remote logs:\n" "*** * wasb://wasb-container/abc/hello.log\n" "Log line", + "*** Found remote logs:\n*** * wasb://wasb-container/abc/hello.log\nLog line", ) ] ],