Skip to content

Commit

Permalink
Auto merge of #25646 - huonw:align, r=alexcrichton
Browse files Browse the repository at this point in the history
This removes a footgun, since it is a reasonable assumption to make that
pointers to `T` will be aligned to `align_of::<T>()`. This also matches
the behaviour of C/C++. `min_align_of` is now deprecated.

Closes #21611.
  • Loading branch information
bors committed Jun 26, 2015
2 parents b70d1c6 + 225b116 commit 378a370
Show file tree
Hide file tree
Showing 8 changed files with 47 additions and 53 deletions.
6 changes: 3 additions & 3 deletions src/liballoc/arc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ use core::atomic;
use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst};
use core::fmt;
use core::cmp::Ordering;
use core::mem::{min_align_of_val, size_of_val};
use core::mem::{align_of_val, size_of_val};
use core::intrinsics::drop_in_place;
use core::mem;
use core::nonzero::NonZero;
Expand Down Expand Up @@ -241,7 +241,7 @@ impl<T: ?Sized> Arc<T> {

if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
deallocate(ptr as *mut u8, size_of_val(&*ptr), min_align_of_val(&*ptr))
deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr))
}
}
}
Expand Down Expand Up @@ -565,7 +565,7 @@ impl<T: ?Sized> Drop for Weak<T> {
atomic::fence(Acquire);
unsafe { deallocate(ptr as *mut u8,
size_of_val(&*ptr),
min_align_of_val(&*ptr)) }
align_of_val(&*ptr)) }
}
}
}
Expand Down
8 changes: 4 additions & 4 deletions src/liballoc/rc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ use core::fmt;
use core::hash::{Hasher, Hash};
use core::intrinsics::{assume, drop_in_place};
use core::marker::{self, Unsize};
use core::mem::{self, min_align_of, size_of, min_align_of_val, size_of_val, forget};
use core::mem::{self, align_of, size_of, align_of_val, size_of_val, forget};
use core::nonzero::NonZero;
use core::ops::{CoerceUnsized, Deref};
use core::ptr;
Expand Down Expand Up @@ -246,7 +246,7 @@ impl<T> Rc<T> {
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
align_of::<RcBox<T>>());
forget(rc);
Ok(val)
}
Expand Down Expand Up @@ -496,7 +496,7 @@ impl<T: ?Sized> Drop for Rc<T> {
if self.weak() == 0 {
deallocate(ptr as *mut u8,
size_of_val(&*ptr),
min_align_of_val(&*ptr))
align_of_val(&*ptr))
}
}
}
Expand Down Expand Up @@ -805,7 +805,7 @@ impl<T: ?Sized> Drop for Weak<T> {
// the strong pointers have disappeared.
if self.weak() == 0 {
deallocate(ptr as *mut u8, size_of_val(&*ptr),
min_align_of_val(&*ptr))
align_of_val(&*ptr))
}
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/libarena/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ impl<'longer_than_self> Arena<'longer_than_self> {
fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
mem::align_of::<T>());
let ptr = ptr as *mut T;
ptr::write(&mut (*ptr), op());
return &mut *ptr;
Expand Down Expand Up @@ -300,7 +300,7 @@ impl<'longer_than_self> Arena<'longer_than_self> {
let tydesc = get_tydesc::<T>();
let (ty_ptr, ptr) =
self.alloc_noncopy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
mem::align_of::<T>());
let ty_ptr = ty_ptr as *mut usize;
let ptr = ptr as *mut T;
// Write in our tydesc along with a bit indicating that it
Expand Down Expand Up @@ -393,7 +393,7 @@ struct TypedArenaChunk<T> {

fn calculate_size<T>(capacity: usize) -> usize {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>());
size = round_up(size, mem::align_of::<T>());
let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(capacity).unwrap();
size = size.checked_add(elems_size).unwrap();
Expand All @@ -405,7 +405,7 @@ impl<T> TypedArenaChunk<T> {
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
-> *mut TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
let chunk = allocate(size, mem::align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
if chunk.is_null() { alloc::oom() }
(*chunk).next = next;
Expand All @@ -431,7 +431,7 @@ impl<T> TypedArenaChunk<T> {
let size = calculate_size::<T>(self.capacity);
let self_ptr: *mut TypedArenaChunk<T> = self;
deallocate(self_ptr as *mut u8, size,
mem::min_align_of::<TypedArenaChunk<T>>());
mem::align_of::<TypedArenaChunk<T>>());
if !next.is_null() {
let capacity = (*next).capacity;
(*next).destroy(capacity);
Expand All @@ -444,7 +444,7 @@ impl<T> TypedArenaChunk<T> {
let this: *const TypedArenaChunk<T> = self;
unsafe {
mem::transmute(round_up(this.offset(1) as usize,
mem::min_align_of::<T>()))
mem::align_of::<T>()))
}
}

Expand Down
10 changes: 5 additions & 5 deletions src/libcollections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,12 +163,12 @@ fn test_offset_calculation() {
}

fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::min_align_of::<K>());
let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::min_align_of::<V>());
let (keys_size, keys_align) = (capacity * mem::size_of::<K>(), mem::align_of::<K>());
let (vals_size, vals_align) = (capacity * mem::size_of::<V>(), mem::align_of::<V>());
let (edges_size, edges_align) = if is_leaf {
(0, 1)
} else {
((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::min_align_of::<Node<K, V>>())
((capacity + 1) * mem::size_of::<Node<K, V>>(), mem::align_of::<Node<K, V>>())
};

calculate_allocation(
Expand All @@ -181,11 +181,11 @@ fn calculate_allocation_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize,
fn calculate_offsets_generic<K, V>(capacity: usize, is_leaf: bool) -> (usize, usize) {
let keys_size = capacity * mem::size_of::<K>();
let vals_size = capacity * mem::size_of::<V>();
let vals_align = mem::min_align_of::<V>();
let vals_align = mem::align_of::<V>();
let edges_align = if is_leaf {
1
} else {
mem::min_align_of::<Node<K, V>>()
mem::align_of::<Node<K, V>>()
};

calculate_offsets(
Expand Down
14 changes: 7 additions & 7 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,7 +219,7 @@ impl<T> Vec<T> {
} else {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
let ptr = unsafe { allocate(size, mem::align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) }
}
Expand Down Expand Up @@ -393,7 +393,7 @@ impl<T> Vec<T> {
let ptr = reallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
mem::align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = Unique::new(ptr);
}
Expand Down Expand Up @@ -866,9 +866,9 @@ impl<T> Vec<T> {
// FIXME: Assert statically that the types `T` and `U` have the
// same minimal alignment in case they are not zero-sized.

// These asserts are necessary because the `min_align_of` of the
// These asserts are necessary because the `align_of` of the
// types are passed to the allocator by `Vec`.
assert!(mem::min_align_of::<T>() == mem::min_align_of::<U>());
assert!(mem::align_of::<T>() == mem::align_of::<U>());

// This `as isize` cast is safe, because the size of the elements of the
// vector is not 0, and:
Expand Down Expand Up @@ -1269,9 +1269,9 @@ impl<T> Vec<T> {
#[inline(never)]
unsafe fn alloc_or_realloc<T>(ptr: *mut T, old_size: usize, size: usize) -> *mut T {
if old_size == 0 {
allocate(size, mem::min_align_of::<T>()) as *mut T
allocate(size, mem::align_of::<T>()) as *mut T
} else {
reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::<T>()) as *mut T
reallocate(ptr as *mut u8, old_size, size, mem::align_of::<T>()) as *mut T
}
}

Expand All @@ -1280,7 +1280,7 @@ unsafe fn dealloc<T>(ptr: *mut T, len: usize) {
if mem::size_of::<T>() != 0 {
deallocate(ptr as *mut u8,
len * mem::size_of::<T>(),
mem::min_align_of::<T>())
mem::align_of::<T>())
}
}

Expand Down
8 changes: 4 additions & 4 deletions src/libcollections/vec_deque.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ impl<T> Drop for VecDeque<T> {
if mem::size_of::<T>() != 0 {
heap::deallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
mem::min_align_of::<T>())
mem::align_of::<T>())
}
}
}
Expand Down Expand Up @@ -172,7 +172,7 @@ impl<T> VecDeque<T> {

let ptr = unsafe {
if mem::size_of::<T>() != 0 {
let ptr = heap::allocate(size, mem::min_align_of::<T>()) as *mut T;;
let ptr = heap::allocate(size, mem::align_of::<T>()) as *mut T;;
if ptr.is_null() { ::alloc::oom() }
Unique::new(ptr)
} else {
Expand Down Expand Up @@ -340,7 +340,7 @@ impl<T> VecDeque<T> {
let ptr = heap::reallocate(*self.ptr as *mut u8,
old,
new,
mem::min_align_of::<T>()) as *mut T;
mem::align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = Unique::new(ptr);
}
Expand Down Expand Up @@ -460,7 +460,7 @@ impl<T> VecDeque<T> {
let ptr = heap::reallocate(*self.ptr as *mut u8,
old,
new_size,
mem::min_align_of::<T>()) as *mut T;
mem::align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = Unique::new(ptr);
}
Expand Down
20 changes: 7 additions & 13 deletions src/libcore/mem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ pub fn size_of_val<T: ?Sized>(val: &T) -> usize {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[deprecated(reason = "use `align_of` instead", since = "1.1.0")]
pub fn min_align_of<T>() -> usize {
unsafe { intrinsics::min_align_of::<T>() }
}
Expand All @@ -170,14 +171,14 @@ pub fn min_align_of<T>() -> usize {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[deprecated(reason = "use `align_of_val` instead", since = "1.1.0")]
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}

/// Returns the alignment in memory for a type.
///
/// This function will return the alignment, in bytes, of a type in memory. If the alignment
/// returned is adhered to, then the type is guaranteed to function properly.
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
///
/// # Examples
///
Expand All @@ -189,17 +190,10 @@ pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn align_of<T>() -> usize {
// We use the preferred alignment as the default alignment for a type. This
// appears to be what clang migrated towards as well:
//
// http://lists.cs.uiuc.edu/pipermail/cfe-commits/Week-of-Mon-20110725/044411.html
unsafe { intrinsics::pref_align_of::<T>() }
unsafe { intrinsics::min_align_of::<T>() }
}

/// Returns the alignment of the type of the value that `_val` points to.
///
/// This is similar to `align_of`, but function will properly handle types such as trait objects
/// (in the future), returning the alignment for an arbitrary value at runtime.
/// Returns the ABI-required minimum alignment of the type of the value that `val` points to
///
/// # Examples
///
Expand All @@ -210,8 +204,8 @@ pub fn align_of<T>() -> usize {
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn align_of_val<T>(_val: &T) -> usize {
align_of::<T>()
pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
unsafe { intrinsics::min_align_of_val(val) }
}

/// Creates a value initialized to zero.
Expand Down
22 changes: 11 additions & 11 deletions src/libstd/collections/hash/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ use cmp;
use hash::{Hash, Hasher};
use iter::{Iterator, ExactSizeIterator};
use marker::{Copy, Send, Sync, Sized, self};
use mem::{min_align_of, size_of};
use mem::{align_of, size_of};
use mem;
use num::wrapping::OverflowingOps;
use ops::{Deref, DerefMut, Drop};
Expand Down Expand Up @@ -553,9 +553,9 @@ fn calculate_allocation(hash_size: usize, hash_align: usize,
vals_align);
let (end_of_vals, oflo2) = vals_offset.overflowing_add(vals_size);

let min_align = cmp::max(hash_align, cmp::max(keys_align, vals_align));
let align = cmp::max(hash_align, cmp::max(keys_align, vals_align));

(min_align, hash_offset, end_of_vals, oflo || oflo2)
(align, hash_offset, end_of_vals, oflo || oflo2)
}

#[test]
Expand Down Expand Up @@ -597,9 +597,9 @@ impl<K, V> RawTable<K, V> {
// factored out into a different function.
let (malloc_alignment, hash_offset, size, oflo) =
calculate_allocation(
hashes_size, min_align_of::<u64>(),
keys_size, min_align_of::< K >(),
vals_size, min_align_of::< V >());
hashes_size, align_of::<u64>(),
keys_size, align_of::< K >(),
vals_size, align_of::< V >());

assert!(!oflo, "capacity overflow");

Expand Down Expand Up @@ -630,8 +630,8 @@ impl<K, V> RawTable<K, V> {
let buffer = *self.hashes as *mut u8;
let (keys_offset, vals_offset, oflo) =
calculate_offsets(hashes_size,
keys_size, min_align_of::<K>(),
min_align_of::<V>());
keys_size, align_of::<K>(),
align_of::<V>());
debug_assert!(!oflo, "capacity overflow");
unsafe {
RawBucket {
Expand Down Expand Up @@ -1005,9 +1005,9 @@ impl<K, V> Drop for RawTable<K, V> {
let keys_size = self.capacity * size_of::<K>();
let vals_size = self.capacity * size_of::<V>();
let (align, _, size, oflo) =
calculate_allocation(hashes_size, min_align_of::<u64>(),
keys_size, min_align_of::<K>(),
vals_size, min_align_of::<V>());
calculate_allocation(hashes_size, align_of::<u64>(),
keys_size, align_of::<K>(),
vals_size, align_of::<V>());

debug_assert!(!oflo, "should be impossible");

Expand Down

0 comments on commit 378a370

Please sign in to comment.