From e5d85f917b8965a5e62513c17cbb887366b152bc Mon Sep 17 00:00:00 2001 From: Erik Desjardins Date: Sun, 16 Aug 2020 19:25:24 -0400 Subject: [PATCH] allow reordering of the last field of a MaybeUnsized struct if it's a ZST --- compiler/rustc_middle/src/ty/layout.rs | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/compiler/rustc_middle/src/ty/layout.rs b/compiler/rustc_middle/src/ty/layout.rs index 08bd131565bfa..4038deb323341 100644 --- a/compiler/rustc_middle/src/ty/layout.rs +++ b/compiler/rustc_middle/src/ty/layout.rs @@ -289,25 +289,32 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { let optimize = !repr.inhibit_struct_field_reordering_opt(); if optimize { - let end = - if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; - let optimizing = &mut inverse_memory_index[..end]; let field_align = |f: &TyAndLayout<'_>| { if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi } }; match kind { - StructKind::AlwaysSized | StructKind::MaybeUnsized => { - optimizing.sort_by_key(|&x| { + StructKind::AlwaysSized => { + inverse_memory_index.sort_by_key(|&x| { // Place ZSTs first to avoid "interesting offsets", // especially with only one or two non-ZST fields. let f = &fields[x as usize]; (!f.is_zst(), cmp::Reverse(field_align(f))) }); } + StructKind::MaybeUnsized => { + // Sort in descending alignment, except for the last field, + // which may be accessed through an unsized type. + inverse_memory_index[..fields.len() - 1] + .sort_by_key(|&x| cmp::Reverse(field_align(&fields[x as usize]))); + // Place ZSTs first to avoid "interesting offsets". + // This will reorder the last field if it is a ZST, which is okay because + // there's nothing in memory that could be accessed through an unsized type. + inverse_memory_index.sort_by_key(|&x| !fields[x as usize].is_zst()); + } StructKind::Prefixed(..) => { // Sort in ascending alignment so that the layout stay optimal // regardless of the prefix - optimizing.sort_by_key(|&x| field_align(&fields[x as usize])); + inverse_memory_index.sort_by_key(|&x| field_align(&fields[x as usize])); } } }