From 7c0ed930f46b5337c2763333f0df88982f8960d4 Mon Sep 17 00:00:00 2001 From: Alex Stephen Date: Tue, 11 Nov 2025 14:46:53 -0800 Subject: [PATCH] Re-enable rule B208 --- pyiceberg/avro/decoder.py | 2 +- pyiceberg/io/__init__.py | 2 +- pyiceberg/io/pyarrow.py | 4 ++-- pyiceberg/table/__init__.py | 8 ++++---- pyiceberg/table/snapshots.py | 2 +- ruff.toml | 1 - tests/io/test_pyarrow.py | 2 +- 7 files changed, 10 insertions(+), 11 deletions(-) diff --git a/pyiceberg/avro/decoder.py b/pyiceberg/avro/decoder.py index 75b3209027..d30475acf1 100644 --- a/pyiceberg/avro/decoder.py +++ b/pyiceberg/avro/decoder.py @@ -181,6 +181,6 @@ def new_decoder(b: bytes) -> BinaryDecoder: except ModuleNotFoundError: import warnings - warnings.warn("Falling back to pure Python Avro decoder, missing Cython implementation") + warnings.warn("Falling back to pure Python Avro decoder, missing Cython implementation", stacklevel=2) return StreamingBinaryDecoder(b) diff --git a/pyiceberg/io/__init__.py b/pyiceberg/io/__init__.py index 1915afcd0b..981e03394f 100644 --- a/pyiceberg/io/__init__.py +++ b/pyiceberg/io/__init__.py @@ -340,7 +340,7 @@ def _infer_file_io_from_scheme(path: str, properties: Properties) -> FileIO | No if file_io := _import_file_io(file_io_path, properties): return file_io else: - warnings.warn(f"No preferred file implementation for scheme: {parsed_url.scheme}") + warnings.warn(f"No preferred file implementation for scheme: {parsed_url.scheme}", stacklevel=2) return None diff --git a/pyiceberg/io/pyarrow.py b/pyiceberg/io/pyarrow.py index 5be4c5d241..179e9e8928 100644 --- a/pyiceberg/io/pyarrow.py +++ b/pyiceberg/io/pyarrow.py @@ -232,7 +232,7 @@ def _import_retry_strategy(impl: str) -> S3RetryStrategy | None: class_ = getattr(module, class_name) return class_() except (ModuleNotFoundError, AttributeError): - warnings.warn(f"Could not initialize S3 retry strategy: {impl}") + warnings.warn(f"Could not initialize S3 retry strategy: {impl}", stacklevel=2) return None @@ -2768,7 +2768,7 @@ def _get_parquet_writer_kwargs(table_properties: Properties) -> Dict[str, Any]: f"{TableProperties.PARQUET_BLOOM_FILTER_COLUMN_ENABLED_PREFIX}.*", ]: if unsupported_keys := fnmatch.filter(table_properties, key_pattern): - warnings.warn(f"Parquet writer option(s) {unsupported_keys} not implemented") + warnings.warn(f"Parquet writer option(s) {unsupported_keys} not implemented", stacklevel=2) compression_codec = table_properties.get(TableProperties.PARQUET_COMPRESSION, TableProperties.PARQUET_COMPRESSION_DEFAULT) compression_level = property_as_int( diff --git a/pyiceberg/table/__init__.py b/pyiceberg/table/__init__.py index cc7c1c6af0..29b862ea1b 100644 --- a/pyiceberg/table/__init__.py +++ b/pyiceberg/table/__init__.py @@ -663,7 +663,7 @@ def delete( self.table_metadata.properties.get(TableProperties.DELETE_MODE, TableProperties.DELETE_MODE_DEFAULT) == TableProperties.DELETE_MODE_MERGE_ON_READ ): - warnings.warn("Merge on read is not yet supported, falling back to copy-on-write") + warnings.warn("Merge on read is not yet supported, falling back to copy-on-write", stacklevel=2) if isinstance(delete_filter, str): delete_filter = _parse_row_filter(delete_filter) @@ -731,7 +731,7 @@ def delete( overwrite_snapshot.append_data_file(replaced_data_file) if not delete_snapshot.files_affected and not delete_snapshot.rewrites_needed: - warnings.warn("Delete operation did not match any records") + warnings.warn("Delete operation did not match any records", stacklevel=2) def upsert( self, @@ -1502,7 +1502,7 @@ def _do_commit(self, updates: Tuple[TableUpdate, ...], requirements: Tuple[Table try: self.catalog._delete_old_metadata(self.io, self.metadata, response.metadata) except Exception as e: - warnings.warn(f"Failed to delete old metadata after commit: {e}") + warnings.warn(f"Failed to delete old metadata after commit: {e}", stacklevel=2) self.metadata = response.metadata self.metadata_location = response.metadata_location @@ -1728,7 +1728,7 @@ def projection(self) -> Schema: schema for schema in self.table_metadata.schemas if schema.schema_id == snapshot.schema_id ) except StopIteration: - warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}") + warnings.warn(f"Metadata does not contain schema with id: {snapshot.schema_id}", stacklevel=2) else: raise ValueError(f"Snapshot not found: {self.snapshot_id}") diff --git a/pyiceberg/table/snapshots.py b/pyiceberg/table/snapshots.py index 14b5fa833c..bc76569211 100644 --- a/pyiceberg/table/snapshots.py +++ b/pyiceberg/table/snapshots.py @@ -187,7 +187,7 @@ class Summary(IcebergBaseModel, Mapping[str, str]): def __init__(self, operation: Operation | None = None, **data: Any) -> None: if operation is None: - warnings.warn("Encountered invalid snapshot summary: operation is missing, defaulting to overwrite") + warnings.warn("Encountered invalid snapshot summary: operation is missing, defaulting to overwrite", stacklevel=2) operation = Operation.OVERWRITE super().__init__(operation=operation, **data) self._additional_properties = data diff --git a/ruff.toml b/ruff.toml index ab0ef4f90c..a52fb5a138 100644 --- a/ruff.toml +++ b/ruff.toml @@ -60,7 +60,6 @@ select = [ ignore = [ "E501", "B024", - "B028", "UP037", "UP035", "UP006" diff --git a/tests/io/test_pyarrow.py b/tests/io/test_pyarrow.py index 3765ea6de6..3bec6fd157 100644 --- a/tests/io/test_pyarrow.py +++ b/tests/io/test_pyarrow.py @@ -2804,7 +2804,7 @@ def test_pyarrow_io_multi_fs() -> None: class SomeRetryStrategy(AwsDefaultS3RetryStrategy): def __init__(self) -> None: super().__init__() - warnings.warn("Initialized SomeRetryStrategy 👍") + warnings.warn("Initialized SomeRetryStrategy 👍", stacklevel=2) def test_retry_strategy() -> None: