Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/hooks/batch_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ def get_job_awslogs_info(self, job_id: str) -> dict[str, str] | None:
return None
if len(all_info) > 1:
self.log.warning(
f"AWS Batch job ({job_id}) has more than one log stream, " f"only returning the first one."
f"AWS Batch job ({job_id}) has more than one log stream, only returning the first one."
)
return all_info[0]

Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/amazon/aws/operators/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,7 @@ def monitor_job(self, context: Context):
if len(awslogs) > 1:
# there can be several log streams on multi-node jobs
self.log.warning(
"out of all those logs, we can only link to one in the UI. " "Using the first one."
"out of all those logs, we can only link to one in the UI. Using the first one."
)

CloudWatchEventsLink.persist(
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/openlineage/plugins/adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
"openlineage", "namespace", fallback=os.getenv("OPENLINEAGE_NAMESPACE", _DAG_DEFAULT_NAMESPACE)
)

_PRODUCER = f"https://github.com/apache/airflow/tree/providers-openlineage/" f"{OPENLINEAGE_PROVIDER_VERSION}"
_PRODUCER = f"https://github.com/apache/airflow/tree/providers-openlineage/{OPENLINEAGE_PROVIDER_VERSION}"

set_producer(_PRODUCER)

Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/openlineage/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ def _redact(self, item: Redactable, name: str | None, depth: int, max_depth: int
return super()._redact(item, name, depth, max_depth)
except Exception as e:
log.warning(
"Unable to redact %s" "Error was: %s: %s",
"Unable to redact %s. Error was: %s: %s",
repr(item),
type(e).__name__,
str(e),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ def install_provider_packages(
package_format=package_format, install_selected_providers=install_selected_providers
)
get_console().print(
f"[info]Splitting {len(list_of_all_providers)} " f"providers into max {parallelism} chunks"
f"[info]Splitting {len(list_of_all_providers)} providers into max {parallelism} chunks"
)
provider_chunks = [sorted(list_of_all_providers[i::parallelism]) for i in range(parallelism)]
# filter out empty ones
Expand Down
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ extend-select = [
"UP", # Pyupgrade
"RUF100", # Unused noqa (auto-fixable)

# impocit single-line string concatenation
"ISC001",
# We ignore more pydocstyle than we enable, so be more selective at what we enable
"D101",
"D106",
Expand Down
6 changes: 3 additions & 3 deletions tests/providers/google/cloud/triggers/test_gcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def test_gcs_prefix_blob_trigger_serialization(self):

@pytest.mark.asyncio
@async_mock.patch(
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix"
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix"
)
async def test_gcs_prefix_blob_trigger_success(self, mock_list_blobs_with_prefixs):
"""
Expand All @@ -177,7 +177,7 @@ async def test_gcs_prefix_blob_trigger_success(self, mock_list_blobs_with_prefix

@pytest.mark.asyncio
@async_mock.patch(
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix"
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix"
)
async def test_gcs_prefix_blob_trigger_exception(self, mock_list_blobs_with_prefixs):
"""
Expand All @@ -191,7 +191,7 @@ async def test_gcs_prefix_blob_trigger_exception(self, mock_list_blobs_with_pref

@pytest.mark.asyncio
@async_mock.patch(
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger" "._list_blobs_with_prefix"
"airflow.providers.google.cloud.triggers.gcs.GCSPrefixBlobTrigger._list_blobs_with_prefix"
)
async def test_gcs_prefix_blob_trigger_pending(self, mock_list_blobs_with_prefixs):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def test_wasb_read(self, mock_hook_cls, ti):
[
(
"localhost",
"*** Found remote logs:\n" "*** * wasb://wasb-container/abc/hello.log\n" "Log line",
"*** Found remote logs:\n*** * wasb://wasb-container/abc/hello.log\nLog line",
)
]
],
Expand Down