From afed13dc86b12a04d44b565e3d25f09f9582dcab Mon Sep 17 00:00:00 2001 From: Matthijs Brobbel Date: Mon, 25 Aug 2025 14:05:57 +0200 Subject: [PATCH 01/12] Bump MSRV to 1.85 --- Cargo.toml | 2 +- arrow-array/src/arithmetic.rs | 8 ++++---- arrow-pyarrow-integration-testing/Cargo.toml | 2 +- arrow-pyarrow-testing/Cargo.toml | 2 +- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 722a1cd7ea19..de3d5e8b77b9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -81,7 +81,7 @@ include = [ "NOTICE.txt", ] edition = "2021" -rust-version = "1.84" +rust-version = "1.85" [workspace.dependencies] arrow = { version = "56.1.0", path = "./arrow", default-features = false } diff --git a/arrow-array/src/arithmetic.rs b/arrow-array/src/arithmetic.rs index 031864cb0809..0e2aa5a28ca9 100644 --- a/arrow-array/src/arithmetic.rs +++ b/arrow-array/src/arithmetic.rs @@ -420,13 +420,13 @@ native_type_float_op!( 1., unsafe { // Need to allow in clippy because - // current MSRV (Minimum Supported Rust Version) is `1.84.0` but this item is stable since `1.87.0` + // current MSRV (Minimum Supported Rust Version) is `1.85.0` but this item is stable since `1.87.0` #[allow(unnecessary_transmutes)] std::mem::transmute(-1_i32) }, unsafe { // Need to allow in clippy because - // current MSRV (Minimum Supported Rust Version) is `1.84.0` but this item is stable since `1.87.0` + // current MSRV (Minimum Supported Rust Version) is `1.85.0` but this item is stable since `1.87.0` #[allow(unnecessary_transmutes)] std::mem::transmute(i32::MAX) } @@ -437,13 +437,13 @@ native_type_float_op!( 1., unsafe { // Need to allow in clippy because - // current MSRV (Minimum Supported Rust Version) is `1.84.0` but this item is stable since `1.87.0` + // current MSRV (Minimum Supported Rust Version) is `1.85.0` but this item is stable since `1.87.0` #[allow(unnecessary_transmutes)] std::mem::transmute(-1_i64) }, unsafe { // Need to allow in clippy because - // current MSRV (Minimum Supported Rust Version) is `1.84.0` but this item is stable since `1.87.0` + // current MSRV (Minimum Supported Rust Version) is `1.85.0` but this item is stable since `1.87.0` #[allow(unnecessary_transmutes)] std::mem::transmute(i64::MAX) } diff --git a/arrow-pyarrow-integration-testing/Cargo.toml b/arrow-pyarrow-integration-testing/Cargo.toml index c757f6739373..38a7a9bd3492 100644 --- a/arrow-pyarrow-integration-testing/Cargo.toml +++ b/arrow-pyarrow-integration-testing/Cargo.toml @@ -25,7 +25,7 @@ authors = ["Apache Arrow "] license = "Apache-2.0" keywords = ["arrow"] edition = "2021" -rust-version = "1.84" +rust-version = "1.85" publish = false [lib] diff --git a/arrow-pyarrow-testing/Cargo.toml b/arrow-pyarrow-testing/Cargo.toml index 8bbf364f2e08..499182ffda9a 100644 --- a/arrow-pyarrow-testing/Cargo.toml +++ b/arrow-pyarrow-testing/Cargo.toml @@ -40,7 +40,7 @@ authors = ["Apache Arrow "] license = "Apache-2.0" keywords = ["arrow"] edition = "2021" -rust-version = "1.84" +rust-version = "1.85" publish = false From 965b4c236a702d4d9a66628476251e0d0a864f4e Mon Sep 17 00:00:00 2001 From: Matthijs Brobbel Date: Tue, 26 Aug 2025 10:15:04 +0200 Subject: [PATCH 02/12] Migrate to Rust 2024 --- Cargo.toml | 2 +- arrow-array/src/array/binary_array.rs | 4 +- arrow-array/src/array/boolean_array.rs | 12 +- arrow-array/src/array/byte_array.rs | 8 +- arrow-array/src/array/byte_view_array.rs | 24 +- arrow-array/src/array/dictionary_array.rs | 4 +- .../src/array/fixed_size_binary_array.rs | 8 +- arrow-array/src/array/list_array.rs | 4 +- arrow-array/src/array/list_view_array.rs | 8 +- arrow-array/src/array/map_array.rs | 4 +- arrow-array/src/array/mod.rs | 4 +- arrow-array/src/array/primitive_array.rs | 16 +- arrow-array/src/array/run_array.rs | 4 +- arrow-array/src/array/string_array.rs | 4 +- arrow-array/src/array/union_array.rs | 4 +- .../src/builder/generic_bytes_view_builder.rs | 25 +- arrow-array/src/ffi.rs | 8 +- arrow-array/src/ffi_stream.rs | 12 +- arrow-array/src/trusted_len.rs | 4 +- arrow-array/src/types.rs | 4 +- arrow-buffer/src/buffer/immutable.rs | 16 +- arrow-buffer/src/buffer/mutable.rs | 16 +- arrow-buffer/src/util/bit_mask.rs | 12 +- arrow-buffer/src/util/bit_util.rs | 12 +- arrow-cast/src/cast/mod.rs | 150 ++++++++---- arrow-cast/src/pretty.rs | 6 +- arrow-data/src/data.rs | 12 +- arrow-data/src/ffi.rs | 8 +- arrow-data/src/transform/utils.rs | 4 +- arrow-flight/src/client.rs | 8 +- arrow-flight/src/lib.rs | 60 ++--- arrow-integration-test/src/field.rs | 14 +- arrow-integration-testing/src/lib.rs | 14 +- arrow-ipc/benches/ipc_reader.rs | 4 +- arrow-ipc/src/convert.rs | 14 +- arrow-ipc/src/gen/File.rs | 26 +-- arrow-ipc/src/gen/Message.rs | 66 +++--- arrow-ipc/src/gen/Schema.rs | 220 +++++++++--------- arrow-ipc/src/gen/SparseTensor.rs | 44 ++-- arrow-ipc/src/gen/Tensor.rs | 18 +- arrow-ipc/src/lib.rs | 12 +- arrow-ipc/src/reader.rs | 40 ++-- arrow-ipc/src/writer.rs | 35 ++- arrow-ord/src/cmp.rs | 16 +- arrow-pyarrow-integration-testing/Cargo.toml | 2 +- arrow-pyarrow-testing/Cargo.toml | 2 +- arrow-row/src/fixed.rs | 4 +- arrow-row/src/lib.rs | 8 +- arrow-row/src/list.rs | 8 +- arrow-row/src/run.rs | 4 +- arrow-row/src/variable.rs | 8 +- arrow-schema/src/ffi.rs | 8 +- arrow/src/util/test_util.rs | 12 +- parquet/src/arrow/arrow_reader/mod.rs | 37 +-- parquet/src/arrow/arrow_writer/mod.rs | 75 +++--- parquet/src/arrow/buffer/view_buffer.rs | 4 +- parquet/src/arrow/schema/mod.rs | 23 +- parquet/src/column/page.rs | 10 +- parquet/src/encodings/decoding.rs | 8 +- parquet/src/encodings/rle.rs | 6 +- parquet/src/errors.rs | 2 +- parquet/src/file/metadata/mod.rs | 8 +- parquet/src/record/api.rs | 54 +++-- parquet/src/record/reader.rs | 6 +- parquet/src/schema/types.rs | 2 +- parquet/src/util/test_common/rand_gen.rs | 25 +- parquet_derive/src/lib.rs | 4 +- parquet_derive/src/parquet_field.rs | 68 +++--- 68 files changed, 720 insertions(+), 658 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index de3d5e8b77b9..3e8be50b4240 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -80,7 +80,7 @@ include = [ "LICENSE.txt", "NOTICE.txt", ] -edition = "2021" +edition = "2024" rust-version = "1.85" [workspace.dependencies] diff --git a/arrow-array/src/array/binary_array.rs b/arrow-array/src/array/binary_array.rs index 8e2158416f49..48a2602a425e 100644 --- a/arrow-array/src/array/binary_array.rs +++ b/arrow-array/src/array/binary_array.rs @@ -89,9 +89,9 @@ impl GenericBinaryArray { pub unsafe fn take_iter_unchecked<'a>( &'a self, indexes: impl Iterator> + 'a, - ) -> impl Iterator> { + ) -> impl Iterator> { unsafe { indexes.map(|opt_index| opt_index.map(|index| self.value_unchecked(index))) - } + }} } impl From>> for GenericBinaryArray { diff --git a/arrow-array/src/array/boolean_array.rs b/arrow-array/src/array/boolean_array.rs index fe7ad85b7a05..8353fb7f2515 100644 --- a/arrow-array/src/array/boolean_array.rs +++ b/arrow-array/src/array/boolean_array.rs @@ -183,9 +183,9 @@ impl BooleanArray { /// /// # Safety /// This doesn't check bounds, the caller must ensure that index < self.len() - pub unsafe fn value_unchecked(&self, i: usize) -> bool { + pub unsafe fn value_unchecked(&self, i: usize) -> bool { unsafe { self.values.value_unchecked(i) - } + }} /// Returns the boolean value at index `i`. /// @@ -221,9 +221,9 @@ impl BooleanArray { pub unsafe fn take_iter_unchecked<'a>( &'a self, indexes: impl Iterator> + 'a, - ) -> impl Iterator> + 'a { + ) -> impl Iterator> + 'a { unsafe { indexes.map(|opt_index| opt_index.map(|index| self.value_unchecked(index))) - } + }} /// Create a [`BooleanArray`] by evaluating the operation for /// each element of the provided array @@ -354,9 +354,9 @@ impl ArrayAccessor for &BooleanArray { BooleanArray::value(self, index) } - unsafe fn value_unchecked(&self, index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { BooleanArray::value_unchecked(self, index) - } + }} } impl From> for BooleanArray { diff --git a/arrow-array/src/array/byte_array.rs b/arrow-array/src/array/byte_array.rs index 2ff9e9f4f658..2c580165f422 100644 --- a/arrow-array/src/array/byte_array.rs +++ b/arrow-array/src/array/byte_array.rs @@ -282,7 +282,7 @@ impl GenericByteArray { /// /// # Safety /// Caller is responsible for ensuring that the index is within the bounds of the array - pub unsafe fn value_unchecked(&self, i: usize) -> &T::Native { + pub unsafe fn value_unchecked(&self, i: usize) -> &T::Native { unsafe { let end = *self.value_offsets().get_unchecked(i + 1); let start = *self.value_offsets().get_unchecked(i); @@ -305,7 +305,7 @@ impl GenericByteArray { // SAFETY: // ArrayData is valid T::Native::from_bytes_unchecked(b) - } + }} /// Returns the element at index `i` /// @@ -508,9 +508,9 @@ impl<'a, T: ByteArrayType> ArrayAccessor for &'a GenericByteArray { GenericByteArray::value(self, index) } - unsafe fn value_unchecked(&self, index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { GenericByteArray::value_unchecked(self, index) - } + }} } impl From for GenericByteArray { diff --git a/arrow-array/src/array/byte_view_array.rs b/arrow-array/src/array/byte_view_array.rs index 7c8993d6028e..855c28f3008b 100644 --- a/arrow-array/src/array/byte_view_array.rs +++ b/arrow-array/src/array/byte_view_array.rs @@ -323,7 +323,7 @@ impl GenericByteViewArray { /// /// Caller is responsible for ensuring that the index is within the bounds /// of the array - pub unsafe fn value_unchecked(&self, idx: usize) -> &T::Native { + pub unsafe fn value_unchecked(&self, idx: usize) -> &T::Native { unsafe { let v = self.views.get_unchecked(idx); let len = *v as u32; let b = if len <= MAX_INLINE_VIEW_LEN { @@ -335,7 +335,7 @@ impl GenericByteViewArray { data.get_unchecked(offset..offset + len as usize) }; T::Native::from_bytes_unchecked(b) - } + }} /// Returns the first `len` bytes the inline value of the view. /// @@ -343,10 +343,10 @@ impl GenericByteViewArray { /// - The `view` must be a valid element from `Self::views()` that adheres to the view layout. /// - The `len` must be the length of the inlined value. It should never be larger than [`MAX_INLINE_VIEW_LEN`]. #[inline(always)] - pub unsafe fn inline_value(view: &u128, len: usize) -> &[u8] { + pub unsafe fn inline_value(view: &u128, len: usize) -> &[u8] { unsafe { debug_assert!(len <= MAX_INLINE_VIEW_LEN as usize); std::slice::from_raw_parts((view as *const u128 as *const u8).wrapping_add(4), len) - } + }} /// Constructs a new iterator for iterating over the values of this array pub fn iter(&self) -> ArrayIter<&Self> { @@ -539,7 +539,7 @@ impl GenericByteViewArray { /// `buffer_index` reset to `0` and its `offset` updated so that it points /// into the bytes just appended at the end of `data_buf`. #[inline(always)] - unsafe fn copy_view_to_buffer(&self, i: usize, data_buf: &mut Vec) -> u128 { + unsafe fn copy_view_to_buffer(&self, i: usize, data_buf: &mut Vec) -> u128 { unsafe { // SAFETY: `i < self.len()` ensures this is in‑bounds. let raw_view = *self.views().get_unchecked(i); let mut bv = ByteView::from(raw_view); @@ -563,7 +563,7 @@ impl GenericByteViewArray { bv.offset = new_offset; bv.into() } - } + }} /// Returns the total number of bytes used by all non inlined views in all /// buffers. @@ -623,7 +623,7 @@ impl GenericByteViewArray { left_idx: usize, right: &GenericByteViewArray, right_idx: usize, - ) -> Ordering { + ) -> Ordering { unsafe { let l_view = left.views().get_unchecked(left_idx); let l_byte_view = ByteView::from(*l_view); @@ -654,7 +654,7 @@ impl GenericByteViewArray { let r_full_data: &[u8] = unsafe { right.value_unchecked(right_idx).as_ref() }; l_full_data.cmp(r_full_data) - } + }} /// Builds a 128-bit composite key for an inline value: /// @@ -852,9 +852,9 @@ impl<'a, T: ByteViewType + ?Sized> ArrayAccessor for &'a GenericByteViewArray GenericByteViewArray::value(self, index) } - unsafe fn value_unchecked(&self, index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { GenericByteViewArray::value_unchecked(self, index) - } + }} } impl<'a, T: ByteViewType + ?Sized> IntoIterator for &'a GenericByteViewArray { @@ -998,9 +998,9 @@ impl BinaryViewArray { /// Convert the [`BinaryViewArray`] to [`StringViewArray`] /// # Safety /// Caller is responsible for ensuring that items in array are utf8 data. - pub unsafe fn to_string_view_unchecked(self) -> StringViewArray { + pub unsafe fn to_string_view_unchecked(self) -> StringViewArray { unsafe { StringViewArray::new_unchecked(self.views, self.buffers, self.nulls) - } + }} } impl From> for BinaryViewArray { diff --git a/arrow-array/src/array/dictionary_array.rs b/arrow-array/src/array/dictionary_array.rs index acbdcb8b60fa..34b245d2cfcc 100644 --- a/arrow-array/src/array/dictionary_array.rs +++ b/arrow-array/src/array/dictionary_array.rs @@ -946,7 +946,7 @@ where unsafe { self.value_unchecked(index) } } - unsafe fn value_unchecked(&self, index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { let val = self.dictionary.keys.value_unchecked(index); let value_idx = val.as_usize(); @@ -956,7 +956,7 @@ where true => self.values.value_unchecked(value_idx), false => Default::default(), } - } + }} } /// A [`DictionaryArray`] with the key type erased diff --git a/arrow-array/src/array/fixed_size_binary_array.rs b/arrow-array/src/array/fixed_size_binary_array.rs index 76d9db04704e..37beb8bdd9a8 100644 --- a/arrow-array/src/array/fixed_size_binary_array.rs +++ b/arrow-array/src/array/fixed_size_binary_array.rs @@ -167,14 +167,14 @@ impl FixedSizeBinaryArray { /// /// Caller is responsible for ensuring that the index is within the bounds /// of the array - pub unsafe fn value_unchecked(&self, i: usize) -> &[u8] { + pub unsafe fn value_unchecked(&self, i: usize) -> &[u8] { unsafe { let offset = i + self.offset(); let pos = self.value_offset_at(offset); std::slice::from_raw_parts( self.value_data.as_ptr().offset(pos as isize), (self.value_offset_at(offset + 1) - pos) as usize, ) - } + }} /// Returns the offset for the element at index `i`. /// @@ -653,9 +653,9 @@ impl<'a> ArrayAccessor for &'a FixedSizeBinaryArray { FixedSizeBinaryArray::value(self, index) } - unsafe fn value_unchecked(&self, index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { FixedSizeBinaryArray::value_unchecked(self, index) - } + }} } impl<'a> IntoIterator for &'a FixedSizeBinaryArray { diff --git a/arrow-array/src/array/list_array.rs b/arrow-array/src/array/list_array.rs index 8836b5b0f73d..990733acfbb8 100644 --- a/arrow-array/src/array/list_array.rs +++ b/arrow-array/src/array/list_array.rs @@ -333,11 +333,11 @@ impl GenericListArray { /// /// # Safety /// Caller must ensure that the index is within the array bounds - pub unsafe fn value_unchecked(&self, i: usize) -> ArrayRef { + pub unsafe fn value_unchecked(&self, i: usize) -> ArrayRef { unsafe { let end = self.value_offsets().get_unchecked(i + 1).as_usize(); let start = self.value_offsets().get_unchecked(i).as_usize(); self.values.slice(start, end - start) - } + }} /// Returns ith value of this list array. /// diff --git a/arrow-array/src/array/list_view_array.rs b/arrow-array/src/array/list_view_array.rs index 7d66d10d263c..1522402fb126 100644 --- a/arrow-array/src/array/list_view_array.rs +++ b/arrow-array/src/array/list_view_array.rs @@ -289,11 +289,11 @@ impl GenericListViewArray { /// /// # Safety /// Caller must ensure that the index is within the array bounds - pub unsafe fn value_unchecked(&self, i: usize) -> ArrayRef { + pub unsafe fn value_unchecked(&self, i: usize) -> ArrayRef { unsafe { let offset = self.value_offsets().get_unchecked(i).as_usize(); let length = self.value_sizes().get_unchecked(i).as_usize(); self.values.slice(offset, length) - } + }} /// Returns ith value of this list view array. /// @@ -365,9 +365,9 @@ impl ArrayAccessor for &GenericListViewArray Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { GenericListViewArray::value_unchecked(self, index) - } + }} } impl Array for GenericListViewArray { diff --git a/arrow-array/src/array/map_array.rs b/arrow-array/src/array/map_array.rs index 9a1e04c7f1c0..586c65f6eee9 100644 --- a/arrow-array/src/array/map_array.rs +++ b/arrow-array/src/array/map_array.rs @@ -190,12 +190,12 @@ impl MapArray { /// /// # Safety /// Caller must ensure that the index is within the array bounds - pub unsafe fn value_unchecked(&self, i: usize) -> StructArray { + pub unsafe fn value_unchecked(&self, i: usize) -> StructArray { unsafe { let end = *self.value_offsets().get_unchecked(i + 1); let start = *self.value_offsets().get_unchecked(i); self.entries .slice(start.to_usize().unwrap(), (end - start).to_usize().unwrap()) - } + }} /// Returns ith value of this map array. /// diff --git a/arrow-array/src/array/mod.rs b/arrow-array/src/array/mod.rs index 5fdfb9fb2244..df2a60da0e73 100644 --- a/arrow-array/src/array/mod.rs +++ b/arrow-array/src/array/mod.rs @@ -815,7 +815,7 @@ pub fn make_array(data: ArrayData) -> ArrayRef { DataType::Map(_, _) => Arc::new(MapArray::from(data)) as ArrayRef, DataType::Union(_, _) => Arc::new(UnionArray::from(data)) as ArrayRef, DataType::FixedSizeList(_, _) => Arc::new(FixedSizeListArray::from(data)) as ArrayRef, - DataType::Dictionary(ref key_type, _) => match key_type.as_ref() { + DataType::Dictionary(key_type, _) => match key_type.as_ref() { DataType::Int8 => Arc::new(DictionaryArray::::from(data)) as ArrayRef, DataType::Int16 => Arc::new(DictionaryArray::::from(data)) as ArrayRef, DataType::Int32 => Arc::new(DictionaryArray::::from(data)) as ArrayRef, @@ -826,7 +826,7 @@ pub fn make_array(data: ArrayData) -> ArrayRef { DataType::UInt64 => Arc::new(DictionaryArray::::from(data)) as ArrayRef, dt => panic!("Unexpected dictionary key type {dt:?}"), }, - DataType::RunEndEncoded(ref run_ends_type, _) => match run_ends_type.data_type() { + DataType::RunEndEncoded(run_ends_type, _) => match run_ends_type.data_type() { DataType::Int16 => Arc::new(RunArray::::from(data)) as ArrayRef, DataType::Int32 => Arc::new(RunArray::::from(data)) as ArrayRef, DataType::Int64 => Arc::new(RunArray::::from(data)) as ArrayRef, diff --git a/arrow-array/src/array/primitive_array.rs b/arrow-array/src/array/primitive_array.rs index 42594e7a129d..fa262a5fbdc8 100644 --- a/arrow-array/src/array/primitive_array.rs +++ b/arrow-array/src/array/primitive_array.rs @@ -727,9 +727,9 @@ impl PrimitiveArray { /// /// caller must ensure that the passed in offset is less than the array len() #[inline] - pub unsafe fn value_unchecked(&self, i: usize) -> T::Native { + pub unsafe fn value_unchecked(&self, i: usize) -> T::Native { unsafe { *self.values.get_unchecked(i) - } + }} /// Returns the primitive value at index `i`. /// @@ -795,9 +795,9 @@ impl PrimitiveArray { pub unsafe fn take_iter_unchecked<'a>( &'a self, indexes: impl Iterator> + 'a, - ) -> impl Iterator> + 'a { + ) -> impl Iterator> + 'a { unsafe { indexes.map(|opt_index| opt_index.map(|index| self.value_unchecked(index))) - } + }} /// Returns a zero-copy slice of this array with the indicated offset and length. pub fn slice(&self, offset: usize, length: usize) -> Self { @@ -1229,9 +1229,9 @@ impl ArrayAccessor for &PrimitiveArray { } #[inline] - unsafe fn value_unchecked(&self, index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, index: usize) -> Self::Item { unsafe { PrimitiveArray::value_unchecked(self, index) - } + }} } impl PrimitiveArray @@ -1466,7 +1466,7 @@ impl PrimitiveArray { where P: std::borrow::Borrow::Native>>, I: IntoIterator, - { + { unsafe { let iterator = iter.into_iter(); let (_, upper) = iterator.size_hint(); let len = upper.expect("trusted_len_unzip requires an upper limit"); @@ -1476,7 +1476,7 @@ impl PrimitiveArray { let data = ArrayData::new_unchecked(T::DATA_TYPE, len, None, Some(null), 0, vec![buffer], vec![]); PrimitiveArray::from(data) - } + }} } // TODO: the macro is needed here because we'd get "conflicting implementations" error diff --git a/arrow-array/src/array/run_array.rs b/arrow-array/src/array/run_array.rs index 05cfa2d17135..c63f0b327180 100644 --- a/arrow-array/src/array/run_array.rs +++ b/arrow-array/src/array/run_array.rs @@ -639,10 +639,10 @@ where unsafe { self.value_unchecked(logical_index) } } - unsafe fn value_unchecked(&self, logical_index: usize) -> Self::Item { + unsafe fn value_unchecked(&self, logical_index: usize) -> Self::Item { unsafe { let physical_index = self.run_array.get_physical_index(logical_index); self.values().value_unchecked(physical_index) - } + }} } impl<'a, R, V> IntoIterator for TypedRunArray<'a, R, V> diff --git a/arrow-array/src/array/string_array.rs b/arrow-array/src/array/string_array.rs index ed70e5744fff..05daf813b5af 100644 --- a/arrow-array/src/array/string_array.rs +++ b/arrow-array/src/array/string_array.rs @@ -47,9 +47,9 @@ impl GenericStringArray { pub unsafe fn take_iter_unchecked<'a>( &'a self, indexes: impl Iterator> + 'a, - ) -> impl Iterator> { + ) -> impl Iterator> { unsafe { indexes.map(|opt_index| opt_index.map(|index| self.value_unchecked(index))) - } + }} /// Fallibly creates a [`GenericStringArray`] from a [`GenericBinaryArray`] returning /// an error if [`GenericBinaryArray`] contains invalid UTF-8 data diff --git a/arrow-array/src/array/union_array.rs b/arrow-array/src/array/union_array.rs index d105876723da..091953ddfec0 100644 --- a/arrow-array/src/array/union_array.rs +++ b/arrow-array/src/array/union_array.rs @@ -151,7 +151,7 @@ impl UnionArray { type_ids: ScalarBuffer, offsets: Option>, children: Vec, - ) -> Self { + ) -> Self { unsafe { let mode = if offsets.is_some() { UnionMode::Dense } else { @@ -169,7 +169,7 @@ impl UnionArray { None => builder.build_unchecked(), }; Self::from(data) - } + }} /// Attempts to create a new `UnionArray`, validating the inputs provided. /// diff --git a/arrow-array/src/builder/generic_bytes_view_builder.rs b/arrow-array/src/builder/generic_bytes_view_builder.rs index cba2bb428e53..419fc1cfbb2a 100644 --- a/arrow-array/src/builder/generic_bytes_view_builder.rs +++ b/arrow-array/src/builder/generic_bytes_view_builder.rs @@ -22,8 +22,8 @@ use std::sync::Arc; use arrow_buffer::{Buffer, NullBufferBuilder, ScalarBuffer}; use arrow_data::{ByteView, MAX_INLINE_VIEW_LEN}; use arrow_schema::ArrowError; -use hashbrown::hash_table::Entry; use hashbrown::HashTable; +use hashbrown::hash_table::Entry; use crate::builder::ArrayBuilder; use crate::types::bytes::ByteArrayNativeType; @@ -121,7 +121,7 @@ impl GenericByteViewBuilder { /// growing buffer size exponentially from 8KB up to 2MB. The /// first buffer allocated is 8KB, then 16KB, then 32KB, etc up to 2MB. /// - /// If this method is used, any new buffers allocated are + /// If this method is used, any new buffers allocated are /// exactly this size. This can be useful for advanced users /// that want to control the memory usage and buffer count. /// @@ -187,7 +187,7 @@ impl GenericByteViewBuilder { /// (1) The block must have been added using [`Self::append_block`] /// (2) The range `offset..offset+length` must be within the bounds of the block /// (3) The data in the block must be valid of type `T` - pub unsafe fn append_view_unchecked(&mut self, block: u32, offset: u32, len: u32) { + pub unsafe fn append_view_unchecked(&mut self, block: u32, offset: u32, len: u32) { unsafe { let b = self.completed.get_unchecked(block as usize); let start = offset as usize; let end = start.saturating_add(len as usize); @@ -196,7 +196,7 @@ impl GenericByteViewBuilder { let view = make_view(b, block, offset); self.views_buffer.push(view); self.null_buffer_builder.append_non_null(); - } + }} /// Appends an array to the builder. /// This will flush any in-progress block and append the data buffers @@ -390,7 +390,7 @@ impl GenericByteViewBuilder { self.flush_in_progress(); let completed = std::mem::take(&mut self.completed); let nulls = self.null_buffer_builder.finish(); - if let Some((ref mut ht, _)) = self.string_tracker.as_mut() { + if let Some((ht, _)) = self.string_tracker.as_mut() { ht.clear(); } let views = std::mem::take(&mut self.views_buffer); @@ -688,7 +688,10 @@ mod tests { ); let err = v.try_append_view(0, u32::MAX, 1).unwrap_err(); - assert_eq!(err.to_string(), "Invalid argument error: Range 4294967295..4294967296 out of bounds for block of length 17"); + assert_eq!( + err.to_string(), + "Invalid argument error: Range 4294967295..4294967296 out of bounds for block of length 17" + ); let err = v.try_append_view(0, 1, u32::MAX).unwrap_err(); assert_eq!( @@ -739,10 +742,12 @@ mod tests { assert_eq!(fixed_builder.completed.len(), 2_usize.pow(i + 1) - 1); // Every buffer is fixed size - assert!(fixed_builder - .completed - .iter() - .all(|b| b.len() == STARTING_BLOCK_SIZE as usize)); + assert!( + fixed_builder + .completed + .iter() + .all(|b| b.len() == STARTING_BLOCK_SIZE as usize) + ); } // Add one more value, and the buffer stop growing. diff --git a/arrow-array/src/ffi.rs b/arrow-array/src/ffi.rs index 83eaa3d6544a..e9f2ae64c0b4 100644 --- a/arrow-array/src/ffi.rs +++ b/arrow-array/src/ffi.rs @@ -129,7 +129,7 @@ pub unsafe fn export_array_into_raw( src: ArrayRef, out_array: *mut FFI_ArrowArray, out_schema: *mut FFI_ArrowSchema, -) -> Result<()> { +) -> Result<()> { unsafe { let data = src.to_data(); let array = FFI_ArrowArray::new(&data); let schema = FFI_ArrowSchema::try_from(data.data_type())?; @@ -138,7 +138,7 @@ pub unsafe fn export_array_into_raw( std::ptr::write_unaligned(out_schema, schema); Ok(()) -} +}} // returns the number of bits that buffer `i` (in the C data interface) is expected to have. // This is set by the Arrow specification @@ -244,13 +244,13 @@ unsafe fn create_buffer( array: &FFI_ArrowArray, index: usize, len: usize, -) -> Option { +) -> Option { unsafe { if array.num_buffers() == 0 { return None; } NonNull::new(array.buffer(index) as _) .map(|ptr| Buffer::from_custom_allocation(ptr, len, owner)) -} +}} /// Export to the C Data Interface pub fn to_ffi(data: &ArrayData) -> Result<(FFI_ArrowArray, FFI_ArrowSchema)> { diff --git a/arrow-array/src/ffi_stream.rs b/arrow-array/src/ffi_stream.rs index 3d4e89e80b89..6ec916e8b2fa 100644 --- a/arrow-array/src/ffi_stream.rs +++ b/arrow-array/src/ffi_stream.rs @@ -101,7 +101,7 @@ pub struct FFI_ArrowArrayStream { unsafe impl Send for FFI_ArrowArrayStream {} // callback used to drop [FFI_ArrowArrayStream] when it is exported. -unsafe extern "C" fn release_stream(stream: *mut FFI_ArrowArrayStream) { +unsafe extern "C" fn release_stream(stream: *mut FFI_ArrowArrayStream) { unsafe { if stream.is_null() { return; } @@ -115,7 +115,7 @@ unsafe extern "C" fn release_stream(stream: *mut FFI_ArrowArrayStream) { drop(private_data); stream.release = None; -} +}} struct StreamPrivateData { batch_reader: Box, @@ -187,9 +187,9 @@ impl FFI_ArrowArrayStream { /// /// [move]: https://arrow.apache.org/docs/format/CDataInterface.html#moving-an-array /// [valid]: https://doc.rust-lang.org/std/ptr/index.html#safety - pub unsafe fn from_raw(raw_stream: *mut FFI_ArrowArrayStream) -> Self { + pub unsafe fn from_raw(raw_stream: *mut FFI_ArrowArrayStream) -> Self { unsafe { std::ptr::replace(raw_stream, Self::empty()) - } + }} /// Creates a new empty [FFI_ArrowArrayStream]. Used to import from the C Stream Interface. pub fn empty() -> Self { @@ -329,9 +329,9 @@ impl ArrowArrayStreamReader { /// # Safety /// /// See [`FFI_ArrowArrayStream::from_raw`] - pub unsafe fn from_raw(raw_stream: *mut FFI_ArrowArrayStream) -> Result { + pub unsafe fn from_raw(raw_stream: *mut FFI_ArrowArrayStream) -> Result { unsafe { Self::try_new(FFI_ArrowArrayStream::from_raw(raw_stream)) - } + }} /// Get the last error from `ArrowArrayStreamReader` fn get_stream_last_error(&mut self) -> Option { diff --git a/arrow-array/src/trusted_len.rs b/arrow-array/src/trusted_len.rs index 781cad38f7e9..31fd53243dc2 100644 --- a/arrow-array/src/trusted_len.rs +++ b/arrow-array/src/trusted_len.rs @@ -28,7 +28,7 @@ where T: ArrowNativeType, P: std::borrow::Borrow>, I: Iterator, -{ +{ unsafe { let (_, upper) = iterator.size_hint(); let upper = upper.expect("trusted_len_unzip requires an upper limit"); let len = upper * std::mem::size_of::(); @@ -55,7 +55,7 @@ where ); buffer.set_len(len); (null.into(), buffer.into()) -} +}} #[cfg(test)] mod tests { diff --git a/arrow-array/src/types.rs b/arrow-array/src/types.rs index 144de8dbecbd..c8a54a6bb630 100644 --- a/arrow-array/src/types.rs +++ b/arrow-array/src/types.rs @@ -1578,9 +1578,9 @@ pub(crate) mod bytes { } #[inline] - unsafe fn from_bytes_unchecked(b: &[u8]) -> &Self { + unsafe fn from_bytes_unchecked(b: &[u8]) -> &Self { unsafe { std::str::from_utf8_unchecked(b) - } + }} } } diff --git a/arrow-buffer/src/buffer/immutable.rs b/arrow-buffer/src/buffer/immutable.rs index 57f30edf1eb8..41b358ce0482 100644 --- a/arrow-buffer/src/buffer/immutable.rs +++ b/arrow-buffer/src/buffer/immutable.rs @@ -171,16 +171,16 @@ impl Buffer { ptr: NonNull, len: usize, owner: Arc, - ) -> Self { + ) -> Self { unsafe { Buffer::build_with_arguments(ptr, len, Deallocation::Custom(owner, len)) - } + }} /// Auxiliary method to create a new Buffer unsafe fn build_with_arguments( ptr: NonNull, len: usize, deallocation: Deallocation, - ) -> Self { + ) -> Self { unsafe { let bytes = Bytes::new(ptr, len, deallocation); let ptr = bytes.as_ptr(); Buffer { @@ -188,7 +188,7 @@ impl Buffer { data: Arc::new(bytes), length: len, } - } + }} /// Returns the number of bytes in the buffer #[inline] @@ -560,9 +560,9 @@ impl Buffer { #[inline] pub unsafe fn from_trusted_len_iter>( iterator: I, - ) -> Self { + ) -> Self { unsafe { MutableBuffer::from_trusted_len_iter(iterator).into() - } + }} /// Creates a [`Buffer`] from an [`Iterator`] with a trusted (upper) length or errors /// if any of the items of the iterator is an error. @@ -577,9 +577,9 @@ impl Buffer { I: Iterator>, >( iterator: I, - ) -> Result { + ) -> Result { unsafe { Ok(MutableBuffer::try_from_trusted_len_iter(iterator)?.into()) - } + }} } impl FromIterator for Buffer { diff --git a/arrow-buffer/src/buffer/mutable.rs b/arrow-buffer/src/buffer/mutable.rs index 63fdbf598bdb..d01c2fe6da1f 100644 --- a/arrow-buffer/src/buffer/mutable.rs +++ b/arrow-buffer/src/buffer/mutable.rs @@ -457,13 +457,13 @@ impl MutableBuffer { /// # Safety /// Caller must ensure that the capacity()-len()>=`size_of`() #[inline] - pub unsafe fn push_unchecked(&mut self, item: T) { + pub unsafe fn push_unchecked(&mut self, item: T) { unsafe { let additional = std::mem::size_of::(); let src = item.to_byte_slice().as_ptr(); let dst = self.data.as_ptr().add(self.len); std::ptr::copy_nonoverlapping(src, dst, additional); self.len += additional; - } + }} /// Extends the buffer by `additional` bytes equal to `0u8`, incrementing its capacity if needed. #[inline] @@ -628,7 +628,7 @@ impl MutableBuffer { #[inline] pub unsafe fn from_trusted_len_iter>( iterator: I, - ) -> Self { + ) -> Self { unsafe { let item_size = std::mem::size_of::(); let (_, upper) = iterator.size_hint(); let upper = upper.expect("from_trusted_len_iter requires an upper limit"); @@ -650,7 +650,7 @@ impl MutableBuffer { ); buffer.len = len; buffer - } + }} /// Creates a [`MutableBuffer`] from a boolean [`Iterator`] with a trusted (upper) length. /// # use arrow_buffer::buffer::MutableBuffer; @@ -690,7 +690,7 @@ impl MutableBuffer { I: Iterator>, >( iterator: I, - ) -> Result { + ) -> Result { unsafe { let item_size = std::mem::size_of::(); let (_, upper) = iterator.size_hint(); let upper = upper.expect("try_from_trusted_len_iter requires an upper limit"); @@ -708,17 +708,17 @@ impl MutableBuffer { } // try_from_trusted_len_iter is instantiated a lot, so we extract part of it into a less // generic method to reduce compile time - unsafe fn finalize_buffer(dst: *mut u8, buffer: &mut MutableBuffer, len: usize) { + unsafe fn finalize_buffer(dst: *mut u8, buffer: &mut MutableBuffer, len: usize) { unsafe { assert_eq!( dst.offset_from(buffer.data.as_ptr()) as usize, len, "Trusted iterator length was not accurately reported" ); buffer.len = len; - } + }} finalize_buffer(dst, &mut buffer, len); Ok(buffer) - } + }} } impl Default for MutableBuffer { diff --git a/arrow-buffer/src/util/bit_mask.rs b/arrow-buffer/src/util/bit_mask.rs index 6030cb4b1b8c..864641a6cde2 100644 --- a/arrow-buffer/src/util/bit_mask.rs +++ b/arrow-buffer/src/util/bit_mask.rs @@ -129,7 +129,7 @@ unsafe fn set_upto_64bits( /// # Safety /// The caller must ensure `data` has `offset..(offset + 8)` range, and `count <= 8`. #[inline] -unsafe fn read_bytes_to_u64(data: &[u8], offset: usize, count: usize) -> u64 { +unsafe fn read_bytes_to_u64(data: &[u8], offset: usize, count: usize) -> u64 { unsafe { debug_assert!(count <= 8); let mut tmp: u64 = 0; let src = data.as_ptr().add(offset); @@ -137,15 +137,15 @@ unsafe fn read_bytes_to_u64(data: &[u8], offset: usize, count: usize) -> u64 { std::ptr::copy_nonoverlapping(src, &mut tmp as *mut _ as *mut u8, count); } tmp -} +}} /// # Safety /// The caller must ensure `data` has `offset..(offset + 8)` range #[inline] -unsafe fn write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) { +unsafe fn write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) { unsafe { let ptr = data.as_mut_ptr().add(offset) as *mut u64; ptr.write_unaligned(chunk); -} +}} /// Similar to `write_u64_bytes`, but this method ORs the offset addressed `data` and `chunk` /// instead of overwriting @@ -153,11 +153,11 @@ unsafe fn write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) { /// # Safety /// The caller must ensure `data` has `offset..(offset + 8)` range #[inline] -unsafe fn or_write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) { +unsafe fn or_write_u64_bytes(data: &mut [u8], offset: usize, chunk: u64) { unsafe { let ptr = data.as_mut_ptr().add(offset); let chunk = chunk | (*ptr) as u64; (ptr as *mut u64).write_unaligned(chunk); -} +}} #[cfg(test)] mod tests { diff --git a/arrow-buffer/src/util/bit_util.rs b/arrow-buffer/src/util/bit_util.rs index c297321bdcf9..d9b5d22263aa 100644 --- a/arrow-buffer/src/util/bit_util.rs +++ b/arrow-buffer/src/util/bit_util.rs @@ -46,9 +46,9 @@ pub fn get_bit(data: &[u8], i: usize) -> bool { /// Note this doesn't do any bound checking, for performance reason. The caller is /// responsible to guarantee that `i` is within bounds. #[inline] -pub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool { +pub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool { unsafe { (*data.add(i / 8) & (1 << (i % 8))) != 0 -} +}} /// Sets bit at position `i` for `data` to 1 #[inline] @@ -63,9 +63,9 @@ pub fn set_bit(data: &mut [u8], i: usize) { /// Note this doesn't do any bound checking, for performance reason. The caller is /// responsible to guarantee that `i` is within bounds. #[inline] -pub unsafe fn set_bit_raw(data: *mut u8, i: usize) { +pub unsafe fn set_bit_raw(data: *mut u8, i: usize) { unsafe { *data.add(i / 8) |= 1 << (i % 8); -} +}} /// Sets bit at position `i` for `data` to 0 #[inline] @@ -80,9 +80,9 @@ pub fn unset_bit(data: &mut [u8], i: usize) { /// Note this doesn't do any bound checking, for performance reason. The caller is /// responsible to guarantee that `i` is within bounds. #[inline] -pub unsafe fn unset_bit_raw(data: *mut u8, i: usize) { +pub unsafe fn unset_bit_raw(data: *mut u8, i: usize) { unsafe { *data.add(i / 8) &= !(1 << (i % 8)); -} +}} /// Returns the ceil of `value`/`divisor` #[inline] diff --git a/arrow-cast/src/cast/mod.rs b/arrow-cast/src/cast/mod.rs index e2bb3db85984..48923e671e0b 100644 --- a/arrow-cast/src/cast/mod.rs +++ b/arrow-cast/src/cast/mod.rs @@ -56,13 +56,13 @@ use std::sync::Arc; use crate::display::{ArrayFormatter, FormatOptions}; use crate::parse::{ - parse_interval_day_time, parse_interval_month_day_nano, parse_interval_year_month, - string_to_datetime, Parser, + Parser, parse_interval_day_time, parse_interval_month_day_nano, parse_interval_year_month, + string_to_datetime, }; use arrow_array::{builder::*, cast::*, temporal_conversions::*, timezone::Tz, types::*, *}; -use arrow_buffer::{i256, ArrowNativeType, OffsetBuffer}; -use arrow_data::transform::MutableArrayData; +use arrow_buffer::{ArrowNativeType, OffsetBuffer, i256}; use arrow_data::ArrayData; +use arrow_data::transform::MutableArrayData; use arrow_schema::*; use arrow_select::take::take; use num::cast::AsPrimitive; @@ -869,9 +869,9 @@ pub fn cast_with_options( array.nulls().cloned(), )?)) } - (_, List(ref to)) => cast_values_to_list::(array, to, cast_options), - (_, LargeList(ref to)) => cast_values_to_list::(array, to, cast_options), - (_, FixedSizeList(ref to, size)) if *size == 1 => { + (_, List(to)) => cast_values_to_list::(array, to, cast_options), + (_, LargeList(to)) => cast_values_to_list::(array, to, cast_options), + (_, FixedSizeList(to, size)) if *size == 1 => { cast_values_to_fixed_size_list(array, to, *size, cast_options) } (FixedSizeList(_, size), _) if *size == 1 => { @@ -2922,8 +2922,10 @@ mod tests { }; let result_unsafe = cast_with_options(&array, &DataType::Decimal32(2, 2), &options); - assert_eq!("Invalid argument error: 12345600 is too large to store in a Decimal32 of precision 2. Max is 99", - result_unsafe.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 12345600 is too large to store in a Decimal32 of precision 2. Max is 99", + result_unsafe.unwrap_err().to_string() + ); } #[test] @@ -2956,8 +2958,10 @@ mod tests { }; let result_unsafe = cast_with_options(&array, &DataType::Decimal64(2, 2), &options); - assert_eq!("Invalid argument error: 12345600 is too large to store in a Decimal64 of precision 2. Max is 99", - result_unsafe.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 12345600 is too large to store in a Decimal64 of precision 2. Max is 99", + result_unsafe.unwrap_err().to_string() + ); } #[test] @@ -2990,8 +2994,10 @@ mod tests { }; let result_unsafe = cast_with_options(&array, &DataType::Decimal128(2, 2), &options); - assert_eq!("Invalid argument error: 12345600 is too large to store in a Decimal128 of precision 2. Max is 99", - result_unsafe.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 12345600 is too large to store in a Decimal128 of precision 2. Max is 99", + result_unsafe.unwrap_err().to_string() + ); } #[test] @@ -3118,8 +3124,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Cast error: Cannot cast to Decimal128(38, 38). Overflowing on 170141183460469231731687303715884105727", - result.unwrap_err().to_string()); + assert_eq!( + "Cast error: Cannot cast to Decimal128(38, 38). Overflowing on 170141183460469231731687303715884105727", + result.unwrap_err().to_string() + ); } #[test] @@ -3138,8 +3146,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Cast error: Cannot cast to Decimal256(76, 76). Overflowing on 170141183460469231731687303715884105727", - result.unwrap_err().to_string()); + assert_eq!( + "Cast error: Cannot cast to Decimal256(76, 76). Overflowing on 170141183460469231731687303715884105727", + result.unwrap_err().to_string() + ); } #[test] @@ -3215,8 +3225,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Cast error: Cannot cast to Decimal128(38, 7). Overflowing on 170141183460469231731687303715884105727", - result.unwrap_err().to_string()); + assert_eq!( + "Cast error: Cannot cast to Decimal128(38, 7). Overflowing on 170141183460469231731687303715884105727", + result.unwrap_err().to_string() + ); } #[test] @@ -3234,8 +3246,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Cast error: Cannot cast to Decimal256(76, 55). Overflowing on 170141183460469231731687303715884105727", - result.unwrap_err().to_string()); + assert_eq!( + "Cast error: Cannot cast to Decimal256(76, 55). Overflowing on 170141183460469231731687303715884105727", + result.unwrap_err().to_string() + ); } #[test] @@ -4297,9 +4311,11 @@ mod tests { match casted { Ok(_) => panic!("expected error"), Err(e) => { - assert!(e - .to_string() - .contains("Cast error: Cannot cast value 'invalid' to value of Boolean type")) + assert!( + e.to_string().contains( + "Cast error: Cannot cast value 'invalid' to value of Boolean type" + ) + ) } } } @@ -4877,7 +4893,10 @@ mod tests { format_options: FormatOptions::default(), }; let err = cast_with_options(array, &to_type, &options).unwrap_err(); - assert_eq!(err.to_string(), "Cast error: Cannot cast string '08:08:61.091323414' to value of Time32(Second) type"); + assert_eq!( + err.to_string(), + "Cast error: Cannot cast string '08:08:61.091323414' to value of Time32(Second) type" + ); } } @@ -4919,7 +4938,10 @@ mod tests { format_options: FormatOptions::default(), }; let err = cast_with_options(array, &to_type, &options).unwrap_err(); - assert_eq!(err.to_string(), "Cast error: Cannot cast string '08:08:61.091323414' to value of Time32(Millisecond) type"); + assert_eq!( + err.to_string(), + "Cast error: Cannot cast string '08:08:61.091323414' to value of Time32(Millisecond) type" + ); } } @@ -4953,7 +4975,10 @@ mod tests { format_options: FormatOptions::default(), }; let err = cast_with_options(array, &to_type, &options).unwrap_err(); - assert_eq!(err.to_string(), "Cast error: Cannot cast string 'Not a valid time' to value of Time64(Microsecond) type"); + assert_eq!( + err.to_string(), + "Cast error: Cannot cast string 'Not a valid time' to value of Time64(Microsecond) type" + ); } } @@ -4987,7 +5012,10 @@ mod tests { format_options: FormatOptions::default(), }; let err = cast_with_options(array, &to_type, &options).unwrap_err(); - assert_eq!(err.to_string(), "Cast error: Cannot cast string 'Not a valid time' to value of Time64(Nanosecond) type"); + assert_eq!( + err.to_string(), + "Cast error: Cannot cast string 'Not a valid time' to value of Time64(Nanosecond) type" + ); } } @@ -8369,8 +8397,10 @@ mod tests { }, ); assert!(res.is_err()); - assert!(format!("{res:?}") - .contains("Cannot cast to FixedSizeList(3): value at index 1 has length 2")); + assert!( + format!("{res:?}") + .contains("Cannot cast to FixedSizeList(3): value at index 1 has length 2") + ); // When safe=true (default), the cast will fill nulls for lists that are // too short and truncate lists that are too long. @@ -9584,16 +9614,20 @@ mod tests { format_options: FormatOptions::default(), }; let casted_err = cast_with_options(&array, &output_type, &option).unwrap_err(); - assert!(casted_err - .to_string() - .contains("Cannot cast string '4.4.5' to value of Decimal128(38, 10) type")); + assert!( + casted_err + .to_string() + .contains("Cannot cast string '4.4.5' to value of Decimal128(38, 10) type") + ); let str_array = StringArray::from(vec![". 0.123"]); let array = Arc::new(str_array) as ArrayRef; let casted_err = cast_with_options(&array, &output_type, &option).unwrap_err(); - assert!(casted_err - .to_string() - .contains("Cannot cast string '. 0.123' to value of Decimal128(38, 10) type")); + assert!( + casted_err + .to_string() + .contains("Cannot cast string '. 0.123' to value of Decimal128(38, 10) type") + ); } fn test_cast_string_to_decimal128_overflow(overflow_array: ArrayRef) { @@ -9637,7 +9671,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Invalid argument error: 100000000000 is too large to store in a Decimal128 of precision 10. Max is 9999999999", err.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 100000000000 is too large to store in a Decimal128 of precision 10. Max is 9999999999", + err.unwrap_err().to_string() + ); } #[test] @@ -9720,7 +9757,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Invalid argument error: 100000000000 is too large to store in a Decimal256 of precision 10. Max is 9999999999", err.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 100000000000 is too large to store in a Decimal256 of precision 10. Max is 9999999999", + err.unwrap_err().to_string() + ); } #[test] @@ -10125,7 +10165,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Invalid argument error: 1234567000 is too large to store in a Decimal128 of precision 7. Max is 9999999", err.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 1234567000 is too large to store in a Decimal128 of precision 7. Max is 9999999", + err.unwrap_err().to_string() + ); } #[test] @@ -10151,7 +10194,10 @@ mod tests { format_options: FormatOptions::default(), }, ); - assert_eq!("Invalid argument error: 1234567000 is too large to store in a Decimal256 of precision 7. Max is 9999999", err.unwrap_err().to_string()); + assert_eq!( + "Invalid argument error: 1234567000 is too large to store in a Decimal256 of precision 7. Max is 9999999", + err.unwrap_err().to_string() + ); } /// helper function to test casting from duration to interval @@ -11066,8 +11112,10 @@ mod tests { ..Default::default() }; let result = cast_with_options(&array, &output_type, &options); - assert_eq!(result.unwrap_err().to_string(), - "Invalid argument error: 123456789 is too large to store in a Decimal128 of precision 6. Max is 999999"); + assert_eq!( + result.unwrap_err().to_string(), + "Invalid argument error: 123456789 is too large to store in a Decimal128 of precision 6. Max is 999999" + ); } #[test] @@ -11112,8 +11160,10 @@ mod tests { ..Default::default() }; let result = cast_with_options(&array, &output_type, &options); - assert_eq!(result.unwrap_err().to_string(), - "Invalid argument error: 1234568 is too large to store in a Decimal128 of precision 6. Max is 999999"); + assert_eq!( + result.unwrap_err().to_string(), + "Invalid argument error: 1234568 is too large to store in a Decimal128 of precision 6. Max is 999999" + ); } #[test] @@ -11129,8 +11179,10 @@ mod tests { ..Default::default() }; let result = cast_with_options(&array, &output_type, &options); - assert_eq!(result.unwrap_err().to_string(), - "Invalid argument error: 1234567890 is too large to store in a Decimal128 of precision 6. Max is 999999"); + assert_eq!( + result.unwrap_err().to_string(), + "Invalid argument error: 1234567890 is too large to store in a Decimal128 of precision 6. Max is 999999" + ); } #[test] @@ -11146,8 +11198,10 @@ mod tests { ..Default::default() }; let result = cast_with_options(&array, &output_type, &options); - assert_eq!(result.unwrap_err().to_string(), - "Invalid argument error: 123456789 is too large to store in a Decimal256 of precision 6. Max is 999999"); + assert_eq!( + result.unwrap_err().to_string(), + "Invalid argument error: 123456789 is too large to store in a Decimal256 of precision 6. Max is 999999" + ); } #[test] diff --git a/arrow-cast/src/pretty.rs b/arrow-cast/src/pretty.rs index eee1bd959198..99130ee9b300 100644 --- a/arrow-cast/src/pretty.rs +++ b/arrow-cast/src/pretty.rs @@ -130,7 +130,7 @@ pub fn pretty_format_batches_with_schema( pub fn pretty_format_batches_with_options( results: &[RecordBatch], options: &FormatOptions, -) -> Result { +) -> Result, ArrowError> { create_table(None, results, options) } @@ -154,7 +154,7 @@ pub fn pretty_format_columns_with_options( col_name: &str, results: &[ArrayRef], options: &FormatOptions, -) -> Result { +) -> Result, ArrowError> { create_column(col_name, results, options) } @@ -265,7 +265,7 @@ mod tests { use arrow_buffer::{IntervalDayTime, IntervalMonthDayNano, ScalarBuffer}; use arrow_schema::*; - use crate::display::{array_value_to_string, DurationFormat}; + use crate::display::{DurationFormat, array_value_to_string}; use super::*; diff --git a/arrow-data/src/data.rs b/arrow-data/src/data.rs index fca19bc3aafe..0f62bce85562 100644 --- a/arrow-data/src/data.rs +++ b/arrow-data/src/data.rs @@ -278,7 +278,7 @@ impl ArrayData { offset: usize, buffers: Vec, child_data: Vec, - ) -> Self { + ) -> Self { unsafe { let mut skip_validation = UnsafeFlag::new(); // SAFETY: caller responsible for ensuring data is valid skip_validation.set(true); @@ -297,7 +297,7 @@ impl ArrayData { } .build() .unwrap() - } + }} /// Create a new ArrayData, validating that the provided buffers form a valid /// Arrow array of the specified data type. @@ -1998,9 +1998,9 @@ impl ArrayDataBuilder { /// /// The same caveats as [`ArrayData::new_unchecked`] /// apply. - pub unsafe fn build_unchecked(self) -> ArrayData { + pub unsafe fn build_unchecked(self) -> ArrayData { unsafe { self.skip_validation(true).build().unwrap() - } + }} /// Creates an `ArrayData`, consuming `self` /// @@ -2097,10 +2097,10 @@ impl ArrayDataBuilder { /// /// If validation is skipped, the buffers must form a valid Arrow array, /// otherwise undefined behavior will result - pub unsafe fn skip_validation(mut self, skip_validation: bool) -> Self { + pub unsafe fn skip_validation(mut self, skip_validation: bool) -> Self { unsafe { self.skip_validation.set(skip_validation); self - } + }} } impl From for ArrayDataBuilder { diff --git a/arrow-data/src/ffi.rs b/arrow-data/src/ffi.rs index 3b446ef255fe..9da9c23559f2 100644 --- a/arrow-data/src/ffi.rs +++ b/arrow-data/src/ffi.rs @@ -67,7 +67,7 @@ unsafe impl Send for FFI_ArrowArray {} unsafe impl Sync for FFI_ArrowArray {} // callback used to drop [FFI_ArrowArray] when it is exported -unsafe extern "C" fn release_array(array: *mut FFI_ArrowArray) { +unsafe extern "C" fn release_array(array: *mut FFI_ArrowArray) { unsafe { if array.is_null() { return; } @@ -83,7 +83,7 @@ unsafe extern "C" fn release_array(array: *mut FFI_ArrowArray) { } array.release = None; -} +}} /// Aligns the provided `nulls` to the provided `data_offset` /// @@ -221,9 +221,9 @@ impl FFI_ArrowArray { /// /// [move]: https://arrow.apache.org/docs/format/CDataInterface.html#moving-an-array /// [valid]: https://doc.rust-lang.org/std/ptr/index.html#safety - pub unsafe fn from_raw(array: *mut FFI_ArrowArray) -> Self { + pub unsafe fn from_raw(array: *mut FFI_ArrowArray) -> Self { unsafe { std::ptr::replace(array, Self::empty()) - } + }} /// create an empty `FFI_ArrowArray`, which can be used to import data into pub fn empty() -> Self { diff --git a/arrow-data/src/transform/utils.rs b/arrow-data/src/transform/utils.rs index 5407f68e0d0c..67341952d085 100644 --- a/arrow-data/src/transform/utils.rs +++ b/arrow-data/src/transform/utils.rs @@ -45,7 +45,7 @@ pub(super) fn extend_offsets( } #[inline] -pub(super) unsafe fn get_last_offset(offset_buffer: &MutableBuffer) -> T { +pub(super) unsafe fn get_last_offset(offset_buffer: &MutableBuffer) -> T { unsafe { // JUSTIFICATION // Benefit // 20% performance improvement extend of variable sized arrays (see bench `mutable_array`) @@ -55,7 +55,7 @@ pub(super) unsafe fn get_last_offset(offset_buffer: &Mutable let (prefix, offsets, suffix) = offset_buffer.as_slice().align_to::(); debug_assert!(prefix.is_empty() && suffix.is_empty()); *offsets.get_unchecked(offsets.len() - 1) -} +}} #[cfg(test)] mod tests { diff --git a/arrow-flight/src/client.rs b/arrow-flight/src/client.rs index 9b4c10e9a093..dac086271cb7 100644 --- a/arrow-flight/src/client.rs +++ b/arrow-flight/src/client.rs @@ -16,19 +16,19 @@ // under the License. use crate::{ + Action, ActionType, Criteria, Empty, FlightData, FlightDescriptor, FlightEndpoint, FlightInfo, + HandshakeRequest, PollInfo, PutResult, Ticket, decode::FlightRecordBatchStream, flight_service_client::FlightServiceClient, - gen::{CancelFlightInfoRequest, CancelFlightInfoResult, RenewFlightEndpointRequest}, + r#gen::{CancelFlightInfoRequest, CancelFlightInfoResult, RenewFlightEndpointRequest}, trailers::extract_lazy_trailers, - Action, ActionType, Criteria, Empty, FlightData, FlightDescriptor, FlightEndpoint, FlightInfo, - HandshakeRequest, PollInfo, PutResult, Ticket, }; use arrow_schema::Schema; use bytes::Bytes; use futures::{ + Stream, StreamExt, TryStreamExt, future::ready, stream::{self, BoxStream}, - Stream, StreamExt, TryStreamExt, }; use prost::Message; use tonic::{metadata::MetadataMap, transport::Channel}; diff --git a/arrow-flight/src/lib.rs b/arrow-flight/src/lib.rs index 8043d5b4a72b..c527b57d16b7 100644 --- a/arrow-flight/src/lib.rs +++ b/arrow-flight/src/lib.rs @@ -51,8 +51,8 @@ use arrow_ipc::{convert, writer, writer::EncodedData, writer::IpcWriteOptions}; use arrow_schema::{ArrowError, Schema}; use arrow_ipc::convert::try_schema_from_ipc_buffer; -use base64::prelude::BASE64_STANDARD; use base64::Engine; +use base64::prelude::BASE64_STANDARD; use bytes::Bytes; use prost_types::Timestamp; use std::{fmt, ops::Deref}; @@ -60,7 +60,7 @@ use std::{fmt, ops::Deref}; type ArrowResult = std::result::Result; #[allow(clippy::all)] -mod gen { +mod r#gen { // Since this file is auto-generated, we suppress all warnings #![allow(missing_docs)] include!("arrow.flight.protocol.rs"); @@ -68,22 +68,22 @@ mod gen { /// Defines a `Flight` for generation or retrieval. pub mod flight_descriptor { - use super::gen; - pub use gen::flight_descriptor::DescriptorType; + use super::r#gen; + pub use r#gen::flight_descriptor::DescriptorType; } /// Low Level [tonic] [`FlightServiceClient`](gen::flight_service_client::FlightServiceClient). pub mod flight_service_client { - use super::gen; - pub use gen::flight_service_client::FlightServiceClient; + use super::r#gen; + pub use r#gen::flight_service_client::FlightServiceClient; } /// Low Level [tonic] [`FlightServiceServer`](gen::flight_service_server::FlightServiceServer) /// and [`FlightService`](gen::flight_service_server::FlightService). pub mod flight_service_server { - use super::gen; - pub use gen::flight_service_server::FlightService; - pub use gen::flight_service_server::FlightServiceServer; + use super::r#gen; + pub use r#gen::flight_service_server::FlightService; + pub use r#gen::flight_service_server::FlightServiceServer; } /// Mid Level [`FlightClient`] @@ -101,27 +101,27 @@ pub mod encode; /// Common error types pub mod error; -pub use gen::Action; -pub use gen::ActionType; -pub use gen::BasicAuth; -pub use gen::CancelFlightInfoRequest; -pub use gen::CancelFlightInfoResult; -pub use gen::CancelStatus; -pub use gen::Criteria; -pub use gen::Empty; -pub use gen::FlightData; -pub use gen::FlightDescriptor; -pub use gen::FlightEndpoint; -pub use gen::FlightInfo; -pub use gen::HandshakeRequest; -pub use gen::HandshakeResponse; -pub use gen::Location; -pub use gen::PollInfo; -pub use gen::PutResult; -pub use gen::RenewFlightEndpointRequest; -pub use gen::Result; -pub use gen::SchemaResult; -pub use gen::Ticket; +pub use r#gen::Action; +pub use r#gen::ActionType; +pub use r#gen::BasicAuth; +pub use r#gen::CancelFlightInfoRequest; +pub use r#gen::CancelFlightInfoResult; +pub use r#gen::CancelStatus; +pub use r#gen::Criteria; +pub use r#gen::Empty; +pub use r#gen::FlightData; +pub use r#gen::FlightDescriptor; +pub use r#gen::FlightEndpoint; +pub use r#gen::FlightInfo; +pub use r#gen::HandshakeRequest; +pub use r#gen::HandshakeResponse; +pub use r#gen::Location; +pub use r#gen::PollInfo; +pub use r#gen::PutResult; +pub use r#gen::RenewFlightEndpointRequest; +pub use r#gen::Result; +pub use r#gen::SchemaResult; +pub use r#gen::Ticket; /// Helper to extract HTTP/gRPC trailers from a tonic stream. mod trailers; diff --git a/arrow-integration-test/src/field.rs b/arrow-integration-test/src/field.rs index 4b896ed391be..8b0ca264e02e 100644 --- a/arrow-integration-test/src/field.rs +++ b/arrow-integration-test/src/field.rs @@ -142,7 +142,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -158,7 +158,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -177,15 +177,15 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { } t => { return Err(ArrowError::ParseError(format!( - "Map children should be a struct with 2 fields, found {t:?}" - ))) + "Map children should be a struct with 2 fields, found {t:?}" + ))); } } } Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array with 1 element".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -207,7 +207,7 @@ pub fn field_from_json(json: &serde_json::Value) -> Result { Some(_) => { return Err(ArrowError::ParseError( "Field 'children' must be an array".to_string(), - )) + )); } None => { return Err(ArrowError::ParseError( @@ -275,7 +275,7 @@ pub fn field_to_json(field: &Field) -> serde_json::Value { }; match field.data_type() { - DataType::Dictionary(ref index_type, ref value_type) => { + DataType::Dictionary(index_type, value_type) => { #[allow(deprecated)] let dict_id = field.dict_id().unwrap(); serde_json::json!({ diff --git a/arrow-integration-testing/src/lib.rs b/arrow-integration-testing/src/lib.rs index 10512a00eb9d..cf572d769df5 100644 --- a/arrow-integration-testing/src/lib.rs +++ b/arrow-integration-testing/src/lib.rs @@ -25,12 +25,12 @@ use serde_json::Value; use arrow::array::{Array, StructArray}; use arrow::datatypes::{DataType, Field, Fields, Schema}; use arrow::error::{ArrowError, Result}; -use arrow::ffi::{from_ffi_and_data_type, FFI_ArrowArray, FFI_ArrowSchema}; +use arrow::ffi::{FFI_ArrowArray, FFI_ArrowSchema, from_ffi_and_data_type}; use arrow::record_batch::RecordBatch; use arrow::util::test_util::arrow_test_data; use arrow_integration_test::*; use std::collections::HashMap; -use std::ffi::{c_char, c_int, CStr, CString}; +use std::ffi::{CStr, CString, c_char, c_int}; use std::fs::File; use std::io::BufReader; use std::iter::zip; @@ -261,7 +261,7 @@ fn result_to_c_error(result: &std::result::Result /// # Safety /// /// The pointer is assumed to have been obtained using CString::into_raw. -#[no_mangle] +#[unsafe(no_mangle)] pub unsafe extern "C" fn arrow_rs_free_error(c_error: *mut c_char) { if !c_error.is_null() { drop(unsafe { CString::from_raw(c_error) }); @@ -269,7 +269,7 @@ pub unsafe extern "C" fn arrow_rs_free_error(c_error: *mut c_char) { } /// A C-ABI for exporting an Arrow schema from a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_export_schema_from_json( c_json_name: *const c_char, out: *mut FFI_ArrowSchema, @@ -279,7 +279,7 @@ pub extern "C" fn arrow_rs_cdata_integration_export_schema_from_json( } /// A C-ABI to compare an Arrow schema against a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_import_schema_and_compare_to_json( c_json_name: *const c_char, c_schema: *mut FFI_ArrowSchema, @@ -289,7 +289,7 @@ pub extern "C" fn arrow_rs_cdata_integration_import_schema_and_compare_to_json( } /// A C-ABI for exporting a RecordBatch from a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_export_batch_from_json( c_json_name: *const c_char, batch_num: c_int, @@ -300,7 +300,7 @@ pub extern "C" fn arrow_rs_cdata_integration_export_batch_from_json( } /// A C-ABI to compare a RecordBatch against a JSON file -#[no_mangle] +#[unsafe(no_mangle)] pub extern "C" fn arrow_rs_cdata_integration_import_batch_and_compare_to_json( c_json_name: *const c_char, batch_num: c_int, diff --git a/arrow-ipc/benches/ipc_reader.rs b/arrow-ipc/benches/ipc_reader.rs index ab77449eeb7d..fcde18ba00cf 100644 --- a/arrow-ipc/benches/ipc_reader.rs +++ b/arrow-ipc/benches/ipc_reader.rs @@ -239,10 +239,10 @@ impl IPCBufferDecoder { } } - unsafe fn with_skip_validation(mut self, skip_validation: bool) -> Self { + unsafe fn with_skip_validation(mut self, skip_validation: bool) -> Self { unsafe { self.decoder = self.decoder.with_skip_validation(skip_validation); self - } + }} fn num_batches(&self) -> usize { self.batches.len() diff --git a/arrow-ipc/src/convert.rs b/arrow-ipc/src/convert.rs index af0bdb1df3eb..24beb1f83adc 100644 --- a/arrow-ipc/src/convert.rs +++ b/arrow-ipc/src/convert.rs @@ -29,7 +29,7 @@ use std::fmt::{Debug, Formatter}; use std::sync::Arc; use crate::writer::DictionaryTracker; -use crate::{KeyValue, Message, CONTINUATION_MARKER}; +use crate::{CONTINUATION_MARKER, KeyValue, Message}; use DataType::*; /// Low level Arrow [Schema] to IPC bytes converter @@ -279,9 +279,9 @@ pub fn try_schema_from_ipc_buffer(buffer: &[u8]) -> Result { if buffer.len() < len as usize { let actual_len = buffer.len(); - return Err(ArrowError::ParseError( - format!("The buffer length ({actual_len}) is less than the encapsulated message's reported length ({len})") - )); + return Err(ArrowError::ParseError(format!( + "The buffer length ({actual_len}) is less than the encapsulated message's reported length ({len})" + ))); } let msg = crate::root_as_message(buffer) @@ -760,7 +760,7 @@ pub(crate) fn get_fb_field_type<'a>( children: Some(fbb.create_vector(&empty_fields[..])), } } - List(ref list_type) => { + List(list_type) => { let child = build_field(fbb, dictionary_tracker, list_type); FBFieldType { type_type: crate::Type::List, @@ -769,7 +769,7 @@ pub(crate) fn get_fb_field_type<'a>( } } ListView(_) | LargeListView(_) => unimplemented!("ListView/LargeListView not implemented"), - LargeList(ref list_type) => { + LargeList(list_type) => { let child = build_field(fbb, dictionary_tracker, list_type); FBFieldType { type_type: crate::Type::LargeList, @@ -777,7 +777,7 @@ pub(crate) fn get_fb_field_type<'a>( children: Some(fbb.create_vector(&[child])), } } - FixedSizeList(ref list_type, len) => { + FixedSizeList(list_type, len) => { let child = build_field(fbb, dictionary_tracker, list_type); let mut builder = crate::FixedSizeListBuilder::new(fbb); builder.add_listSize(*len); diff --git a/arrow-ipc/src/gen/File.rs b/arrow-ipc/src/gen/File.rs index 427cf75de096..cbbef797af87 100644 --- a/arrow-ipc/src/gen/File.rs +++ b/arrow-ipc/src/gen/File.rs @@ -18,7 +18,7 @@ #![allow(dead_code)] #![allow(unused_imports)] -use crate::gen::Schema::*; +use crate::r#gen::Schema::*; use flatbuffers::EndianScalar; use std::{cmp::Ordering, mem}; // automatically generated by the FlatBuffers compiler, do not modify @@ -48,24 +48,24 @@ impl flatbuffers::SimpleToVerifyInSlice for Block {} impl<'a> flatbuffers::Follow<'a> for Block { type Inner = &'a Block; #[inline] - unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { unsafe { <&'a Block>::follow(buf, loc) - } + }} } impl<'a> flatbuffers::Follow<'a> for &'a Block { type Inner = &'a Block; #[inline] - unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { unsafe { flatbuffers::follow_cast_ref::(buf, loc) - } + }} } impl<'b> flatbuffers::Push for Block { type Output = Block; #[inline] - unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { + unsafe fn push(&self, dst: &mut [u8], _written_len: usize) { unsafe { let src = ::core::slice::from_raw_parts(self as *const Block as *const u8, Self::size()); dst.copy_from_slice(src); - } + }} #[inline] fn alignment() -> flatbuffers::PushAlignment { flatbuffers::PushAlignment::new(8) @@ -198,11 +198,11 @@ pub struct Footer<'a> { impl<'a> flatbuffers::Follow<'a> for Footer<'a> { type Inner = Footer<'a>; #[inline] - unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { + unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner { unsafe { Self { _tab: flatbuffers::Table::new(buf, loc), } - } + }} } impl<'a> Footer<'a> { @@ -469,16 +469,16 @@ pub fn size_prefixed_root_as_footer_with_opts<'b, 'o>( /// Assumes, without verification, that a buffer of bytes contains a Footer and returns it. /// # Safety /// Callers must trust the given bytes do indeed contain a valid `Footer`. -pub unsafe fn root_as_footer_unchecked(buf: &[u8]) -> Footer { +pub unsafe fn root_as_footer_unchecked(buf: &[u8]) -> Footer { unsafe { flatbuffers::root_unchecked::