Skip to content

Commit 2817c61

Browse files
committed
Cleanup
1 parent 6dd9308 commit 2817c61

File tree

1 file changed

+1
-6
lines changed

1 file changed

+1
-6
lines changed

pyiceberg/io/pyarrow.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1595,14 +1595,10 @@ def to_record_batches(self, tasks: Iterable[FileScanTask]) -> Iterator[pa.Record
15951595
ValueError: When a field type in the file cannot be projected to the schema type
15961596
"""
15971597
deletes_per_file = _read_all_delete_files(self._io, tasks)
1598-
# Always use large types, since we cannot infer it in a streaming fashion,
1599-
# without fetching all the schemas first, which defeats the purpose of streaming
16001598
return self._record_batches_from_scan_tasks_and_deletes(tasks, deletes_per_file)
16011599

16021600
def _record_batches_from_scan_tasks_and_deletes(
1603-
self,
1604-
tasks: Iterable[FileScanTask],
1605-
deletes_per_file: Dict[str, List[ChunkedArray]],
1601+
self, tasks: Iterable[FileScanTask], deletes_per_file: Dict[str, List[ChunkedArray]]
16061602
) -> Iterator[pa.RecordBatch]:
16071603
total_row_count = 0
16081604
for task in tasks:
@@ -1650,7 +1646,6 @@ class ArrowProjectionVisitor(SchemaWithPartnerVisitor[pa.Array, Optional[pa.Arra
16501646
_file_schema: Schema
16511647
_include_field_ids: bool
16521648
_downcast_ns_timestamp_to_us: bool
1653-
_use_large_types: Optional[bool]
16541649

16551650
def __init__(
16561651
self,

0 commit comments

Comments
 (0)