diff --git a/bindgen-tests/tests/expectations/tests/nested_flexarray.rs b/bindgen-tests/tests/expectations/tests/nested_flexarray.rs new file mode 100644 index 0000000000..a6f73eaf44 --- /dev/null +++ b/bindgen-tests/tests/expectations/tests/nested_flexarray.rs @@ -0,0 +1,271 @@ +#![allow(dead_code, non_snake_case, non_camel_case_types, non_upper_case_globals)] +#![cfg(feature = "nightly")] +#![feature(ptr_metadata, layout_for_ptr)] +#[repr(C)] +#[derive(Debug, Default)] +pub struct Field { + pub count: ::std::os::raw::c_int, + pub data: FAM, +} +#[allow(clippy::unnecessary_operation, clippy::identity_op)] +const _: () = { + ["Size of Field"][::std::mem::size_of::() - 4usize]; + ["Alignment of Field"][::std::mem::align_of::() - 4usize]; + ["Offset of field: Field::count"][::std::mem::offset_of!(Field, count) - 0usize]; + ["Offset of field: Field::data"][::std::mem::offset_of!(Field, data) - 4usize]; +}; +impl Field<[::std::os::raw::c_int]> { + pub fn layout(len: usize) -> ::std::alloc::Layout { + unsafe { + let p: *const Self = ::std::ptr::from_raw_parts( + ::std::ptr::null::<()>(), + len, + ); + ::std::alloc::Layout::for_value_raw(p) + } + } + #[inline] + pub fn fixed(&self) -> (&Field<[::std::os::raw::c_int; 0]>, usize) { + unsafe { + let (ptr, len) = (self as *const Self).to_raw_parts(); + (&*(ptr as *const Field<[::std::os::raw::c_int; 0]>), len) + } + } + #[inline] + pub fn fixed_mut(&mut self) -> (&mut Field<[::std::os::raw::c_int; 0]>, usize) { + unsafe { + let (ptr, len) = (self as *mut Self).to_raw_parts(); + (&mut *(ptr as *mut Field<[::std::os::raw::c_int; 0]>), len) + } + } +} +impl Field<[::std::os::raw::c_int; 0]> { + /// Convert a sized prefix to an unsized structure with the given length. + /// + /// SAFETY: Underlying storage is initialized up to at least `len` elements. + pub unsafe fn flex_ref(&self, len: usize) -> &Field<[::std::os::raw::c_int]> { + Self::flex_ptr(self, len) + } + /// Convert a mutable sized prefix to an unsized structure with the given length. + /// + /// SAFETY: Underlying storage is initialized up to at least `len` elements. + #[inline] + pub unsafe fn flex_ref_mut( + &mut self, + len: usize, + ) -> &mut Field<[::std::os::raw::c_int]> { + Self::flex_ptr_mut(self, len).assume_init() + } + /// Construct DST variant from a pointer and a size. + /// + /// NOTE: lifetime of returned reference is not tied to any underlying storage. + /// SAFETY: `ptr` is valid. Underlying storage is fully initialized up to at least `len` elements. + #[inline] + pub unsafe fn flex_ptr<'unbounded>( + ptr: *const Self, + len: usize, + ) -> &'unbounded Field<[::std::os::raw::c_int]> { + &*::std::ptr::from_raw_parts(ptr as *const (), len) + } + /// Construct mutable DST variant from a pointer and a + /// size. The returned `&mut` reference is initialized + /// pointing to memory referenced by `ptr`, but there's + /// no requirement that that memory be initialized. + /// + /// NOTE: lifetime of returned reference is not tied to any underlying storage. + /// SAFETY: `ptr` is valid. Underlying storage has space for at least `len` elements. + #[inline] + pub unsafe fn flex_ptr_mut<'unbounded>( + ptr: *mut Self, + len: usize, + ) -> ::std::mem::MaybeUninit<&'unbounded mut Field<[::std::os::raw::c_int]>> { + let mut uninit = ::std::mem::MaybeUninit::< + &mut Field<[::std::os::raw::c_int]>, + >::uninit(); + (uninit.as_mut_ptr() as *mut *mut Field<[::std::os::raw::c_int]>) + .write(::std::ptr::from_raw_parts_mut(ptr as *mut (), len)); + uninit + } +} +#[repr(C)] +#[derive(Debug, Default)] +pub struct Name { + pub id: ::std::os::raw::c_int, + pub field: Field, +} +#[allow(clippy::unnecessary_operation, clippy::identity_op)] +const _: () = { + ["Size of Name"][::std::mem::size_of::() - 8usize]; + ["Alignment of Name"][::std::mem::align_of::() - 4usize]; + ["Offset of field: Name::id"][::std::mem::offset_of!(Name, id) - 0usize]; + ["Offset of field: Name::field"][::std::mem::offset_of!(Name, field) - 4usize]; +}; +impl Name<[::std::os::raw::c_int]> { + pub fn layout(len: usize) -> ::std::alloc::Layout { + unsafe { + let p: *const Self = ::std::ptr::from_raw_parts( + ::std::ptr::null::<()>(), + len, + ); + ::std::alloc::Layout::for_value_raw(p) + } + } + #[inline] + pub fn fixed(&self) -> (&Name<[::std::os::raw::c_int; 0]>, usize) { + unsafe { + let (ptr, len) = (self as *const Self).to_raw_parts(); + (&*(ptr as *const Name<[::std::os::raw::c_int; 0]>), len) + } + } + #[inline] + pub fn fixed_mut(&mut self) -> (&mut Name<[::std::os::raw::c_int; 0]>, usize) { + unsafe { + let (ptr, len) = (self as *mut Self).to_raw_parts(); + (&mut *(ptr as *mut Name<[::std::os::raw::c_int; 0]>), len) + } + } +} +impl Name<[::std::os::raw::c_int; 0]> { + /// Convert a sized prefix to an unsized structure with the given length. + /// + /// SAFETY: Underlying storage is initialized up to at least `len` elements. + pub unsafe fn flex_ref(&self, len: usize) -> &Name<[::std::os::raw::c_int]> { + Self::flex_ptr(self, len) + } + /// Convert a mutable sized prefix to an unsized structure with the given length. + /// + /// SAFETY: Underlying storage is initialized up to at least `len` elements. + #[inline] + pub unsafe fn flex_ref_mut( + &mut self, + len: usize, + ) -> &mut Name<[::std::os::raw::c_int]> { + Self::flex_ptr_mut(self, len).assume_init() + } + /// Construct DST variant from a pointer and a size. + /// + /// NOTE: lifetime of returned reference is not tied to any underlying storage. + /// SAFETY: `ptr` is valid. Underlying storage is fully initialized up to at least `len` elements. + #[inline] + pub unsafe fn flex_ptr<'unbounded>( + ptr: *const Self, + len: usize, + ) -> &'unbounded Name<[::std::os::raw::c_int]> { + &*::std::ptr::from_raw_parts(ptr as *const (), len) + } + /// Construct mutable DST variant from a pointer and a + /// size. The returned `&mut` reference is initialized + /// pointing to memory referenced by `ptr`, but there's + /// no requirement that that memory be initialized. + /// + /// NOTE: lifetime of returned reference is not tied to any underlying storage. + /// SAFETY: `ptr` is valid. Underlying storage has space for at least `len` elements. + #[inline] + pub unsafe fn flex_ptr_mut<'unbounded>( + ptr: *mut Self, + len: usize, + ) -> ::std::mem::MaybeUninit<&'unbounded mut Name<[::std::os::raw::c_int]>> { + let mut uninit = ::std::mem::MaybeUninit::< + &mut Name<[::std::os::raw::c_int]>, + >::uninit(); + (uninit.as_mut_ptr() as *mut *mut Name<[::std::os::raw::c_int]>) + .write(::std::ptr::from_raw_parts_mut(ptr as *mut (), len)); + uninit + } +} +#[repr(C, packed)] +pub struct NamePacked { + pub id: ::std::os::raw::c_int, + pub field: ::std::mem::ManuallyDrop>, +} +#[allow(clippy::unnecessary_operation, clippy::identity_op)] +const _: () = { + ["Size of NamePacked"][::std::mem::size_of::() - 8usize]; + ["Alignment of NamePacked"][::std::mem::align_of::() - 1usize]; + ["Offset of field: NamePacked::id"][::std::mem::offset_of!(NamePacked, id) - 0usize]; + [ + "Offset of field: NamePacked::field", + ][::std::mem::offset_of!(NamePacked, field) - 4usize]; +}; +impl NamePacked<[::std::os::raw::c_int]> { + pub fn layout(len: usize) -> ::std::alloc::Layout { + unsafe { + let p: *const Self = ::std::ptr::from_raw_parts( + ::std::ptr::null::<()>(), + len, + ); + ::std::alloc::Layout::for_value_raw(p) + } + } + #[inline] + pub fn fixed(&self) -> (&NamePacked<[::std::os::raw::c_int; 0]>, usize) { + unsafe { + let (ptr, len) = (self as *const Self).to_raw_parts(); + (&*(ptr as *const NamePacked<[::std::os::raw::c_int; 0]>), len) + } + } + #[inline] + pub fn fixed_mut(&mut self) -> (&mut NamePacked<[::std::os::raw::c_int; 0]>, usize) { + unsafe { + let (ptr, len) = (self as *mut Self).to_raw_parts(); + (&mut *(ptr as *mut NamePacked<[::std::os::raw::c_int; 0]>), len) + } + } +} +impl NamePacked<[::std::os::raw::c_int; 0]> { + /// Convert a sized prefix to an unsized structure with the given length. + /// + /// SAFETY: Underlying storage is initialized up to at least `len` elements. + pub unsafe fn flex_ref(&self, len: usize) -> &NamePacked<[::std::os::raw::c_int]> { + Self::flex_ptr(self, len) + } + /// Convert a mutable sized prefix to an unsized structure with the given length. + /// + /// SAFETY: Underlying storage is initialized up to at least `len` elements. + #[inline] + pub unsafe fn flex_ref_mut( + &mut self, + len: usize, + ) -> &mut NamePacked<[::std::os::raw::c_int]> { + Self::flex_ptr_mut(self, len).assume_init() + } + /// Construct DST variant from a pointer and a size. + /// + /// NOTE: lifetime of returned reference is not tied to any underlying storage. + /// SAFETY: `ptr` is valid. Underlying storage is fully initialized up to at least `len` elements. + #[inline] + pub unsafe fn flex_ptr<'unbounded>( + ptr: *const Self, + len: usize, + ) -> &'unbounded NamePacked<[::std::os::raw::c_int]> { + &*::std::ptr::from_raw_parts(ptr as *const (), len) + } + /// Construct mutable DST variant from a pointer and a + /// size. The returned `&mut` reference is initialized + /// pointing to memory referenced by `ptr`, but there's + /// no requirement that that memory be initialized. + /// + /// NOTE: lifetime of returned reference is not tied to any underlying storage. + /// SAFETY: `ptr` is valid. Underlying storage has space for at least `len` elements. + #[inline] + pub unsafe fn flex_ptr_mut<'unbounded>( + ptr: *mut Self, + len: usize, + ) -> ::std::mem::MaybeUninit<&'unbounded mut NamePacked<[::std::os::raw::c_int]>> { + let mut uninit = ::std::mem::MaybeUninit::< + &mut NamePacked<[::std::os::raw::c_int]>, + >::uninit(); + (uninit.as_mut_ptr() as *mut *mut NamePacked<[::std::os::raw::c_int]>) + .write(::std::ptr::from_raw_parts_mut(ptr as *mut (), len)); + uninit + } +} +impl Default for NamePacked<[::std::os::raw::c_int; 0]> { + fn default() -> Self { + let mut s = ::std::mem::MaybeUninit::::uninit(); + unsafe { + ::std::ptr::write_bytes(s.as_mut_ptr(), 0, 1); + s.assume_init() + } + } +} diff --git a/bindgen-tests/tests/headers/nested_flexarray.hpp b/bindgen-tests/tests/headers/nested_flexarray.hpp new file mode 100644 index 0000000000..dddfa623c9 --- /dev/null +++ b/bindgen-tests/tests/headers/nested_flexarray.hpp @@ -0,0 +1,19 @@ +// bindgen-flags: --rust-target nightly --flexarray-dst --raw-line '#![cfg(feature = "nightly")]' --raw-line '#![feature(ptr_metadata, layout_for_ptr)]' + +// Test for nested flexible array members +struct Field { + int count; + int data[]; // FAM +}; + +struct Name { + int id; + struct Field field; // Last field is a struct with FAM +}; + +#pragma pack(1) +struct NamePacked { + int id; + struct Field field; // Last field is a struct with FAM, in a packed struct +}; +#pragma pack() diff --git a/bindgen/codegen/mod.rs b/bindgen/codegen/mod.rs index 712b2d01f7..c64eb99235 100644 --- a/bindgen/codegen/mod.rs +++ b/bindgen/codegen/mod.rs @@ -1540,6 +1540,28 @@ impl FieldCodegen<'_> for FieldData { syn::parse_quote! { __IncompleteArrayField<#inner> } } } + } else if let TypeKind::Comp(ref comp) = field_ty.kind() { + // Nested FAM: the field is a struct that itself has a FAM + // Only treat as FAM if it's the last field + if ctx.options().flexarray_dst && + last_field && + comp.flex_array_member(ctx).is_some() + { + let layout = parent_item.expect_type().layout(ctx); + let is_packed = parent.is_packed(ctx, layout.as_ref()); + struct_layout.saw_flexible_array(); + + // For nested FAMs, we need to parameterize the field type with FAM + // For packed structs, wrap in ManuallyDrop + if is_packed { + let prefix = ctx.trait_prefix(); + syn::parse_quote! { ::#prefix::mem::ManuallyDrop<#ty> } + } else { + syn::parse_quote! { #ty } + } + } else { + ty + } } else { ty }; diff --git a/bindgen/ir/comp.rs b/bindgen/ir/comp.rs index c67f9a2597..7ba65de9cc 100644 --- a/bindgen/ir/comp.rs +++ b/bindgen/ir/comp.rs @@ -790,6 +790,10 @@ impl CompFields { } /// Return the flex array member for the struct/class, if any. + /// + /// This method recursively checks if the last field is either: + /// 1. An incomplete array (direct FAM) + /// 2. A struct/union that itself has a FAM (nested FAM) fn flex_array_member(&self, ctx: &BindgenContext) -> Option { let fields = match self { CompFields::Before(_) => panic!("raw fields"), @@ -799,10 +803,23 @@ impl CompFields { match fields.last()? { Field::Bitfields(..) => None, - Field::DataMember(FieldData { ty, .. }) => ctx - .resolve_type(*ty) - .is_incomplete_array(ctx) - .map(|item| item.expect_type_id(ctx)), + Field::DataMember(FieldData { ty, .. }) => { + let resolved_ty = ctx.resolve_type(*ty); + + // Check if it's an incomplete array first + if let Some(item) = resolved_ty.is_incomplete_array(ctx) { + return Some(item.expect_type_id(ctx)); + } + + // Check if it's a compound type with a FAM (need to resolve through type refs) + let canonical_ty = resolved_ty.canonical_type(ctx); + if let super::ty::TypeKind::Comp(ref comp) = canonical_ty.kind() + { + return comp.flex_array_member(ctx); + } + + None + } } } }