Copy path View file
@@ -145,11 +145,11 @@
#![allow(missing_docs)]
#![stable(feature = "rust1", since = "1.0.0")]

use core::ops::{Deref, DerefMut};
use core::fmt;
use core::iter::{FromIterator, FusedIterator};
use core::mem::{swap, size_of, ManuallyDrop};
use core::mem::{size_of, swap, ManuallyDrop};
use core::ops::{Deref, DerefMut};
use core::ptr;
use core::fmt;

use slice;
use vec::{self, Vec};
@@ -229,9 +229,7 @@ pub struct PeekMut<'a, T: 'a + Ord> {
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<'a, T: Ord + fmt::Debug> fmt::Debug for PeekMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("PeekMut")
.field(&self.heap.data[0])
.finish()
f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
}
}

@@ -272,7 +270,9 @@ impl<'a, T: Ord> PeekMut<'a, T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone> Clone for BinaryHeap<T> {
fn clone(&self) -> Self {
BinaryHeap { data: self.data.clone() }
BinaryHeap {
data: self.data.clone(),
}
}

fn clone_from(&mut self, source: &Self) {
@@ -329,7 +329,9 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
BinaryHeap { data: Vec::with_capacity(capacity) }
BinaryHeap {
data: Vec::with_capacity(capacity),
}
}

/// Returns an iterator visiting all values in the underlying vector, in
@@ -350,7 +352,9 @@ impl<T: Ord> BinaryHeap<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
Iter { iter: self.data.iter() }
Iter {
iter: self.data.iter(),
}
}

/// Returns the greatest item in the binary heap, or `None` if it is empty.
@@ -519,7 +523,7 @@ impl<T: Ord> BinaryHeap<T> {
/// assert!(heap.capacity() >= 10);
/// ```
#[inline]
#[unstable(feature = "shrink_to", reason = "new API", issue="56431")]
#[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.data.shrink_to(min_capacity)
}
@@ -762,7 +766,9 @@ impl<T: Ord> BinaryHeap<T> {
#[inline]
#[stable(feature = "drain", since = "1.6.0")]
pub fn drain(&mut self) -> Drain<T> {
Drain { iter: self.data.drain(..) }
Drain {
iter: self.data.drain(..),
}
}

/// Drops all items from the binary heap.
@@ -934,17 +940,17 @@ pub struct Iter<'a, T: 'a> {
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Iter")
.field(&self.iter.as_slice())
.finish()
f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
}
}

// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> {
Iter { iter: self.iter.clone() }
Iter {
iter: self.iter.clone(),
}
}
}

@@ -998,8 +1004,8 @@ pub struct IntoIter<T> {
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.iter.as_slice())
.finish()
.field(&self.iter.as_slice())
.finish()
}
}

@@ -1129,13 +1135,16 @@ impl<T: Ord> IntoIterator for BinaryHeap<T> {
/// }
/// ```
fn into_iter(self) -> IntoIter<T> {
IntoIter { iter: self.data.into_iter() }
IntoIter {
iter: self.data.into_iter(),
}
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a BinaryHeap<T>
where T: Ord
where
T: Ord,
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -1,6 +1,6 @@
pub mod map;
mod node;
mod search;
pub mod map;
pub mod set;

#[doc(hidden)]
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -2,22 +2,24 @@ use core::cmp::Ordering;

use borrow::Borrow;

use super::node::{Handle, NodeRef, marker};
use super::node::{marker, Handle, NodeRef};

use super::node::ForceResult::*;
use self::SearchResult::*;
use super::node::ForceResult::*;

pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>)
GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
}

pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
key: &Q
key: &Q,
) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
where Q: Ord, K: Borrow<Q> {

where
Q: Ord,
K: Borrow<Q>,
{
loop {
match search_node(node, key) {
Found(handle) => return Found(handle),
@@ -27,38 +29,38 @@ pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
node = internal.descend();
continue;
}
}
},
}
}
}

pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
node: NodeRef<BorrowType, K, V, Type>,
key: &Q
key: &Q,
) -> SearchResult<BorrowType, K, V, Type, Type>
where Q: Ord, K: Borrow<Q> {

where
Q: Ord,
K: Borrow<Q>,
{
match search_linear(&node, key) {
(idx, true) => Found(
Handle::new_kv(node, idx)
),
(idx, false) => SearchResult::GoDown(
Handle::new_edge(node, idx)
)
(idx, true) => Found(Handle::new_kv(node, idx)),
(idx, false) => SearchResult::GoDown(Handle::new_edge(node, idx)),
}
}

pub fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
node: &NodeRef<BorrowType, K, V, Type>,
key: &Q
key: &Q,
) -> (usize, bool)
where Q: Ord, K: Borrow<Q> {

where
Q: Ord,
K: Borrow<Q>,
{
for (i, k) in node.keys().iter().enumerate() {
match key.cmp(k.borrow()) {
Ordering::Greater => {},
Ordering::Greater => {}
Ordering::Equal => return (i, true),
Ordering::Less => return (i, false)
Ordering::Less => return (i, false),
}
}
(node.keys().len(), false)
Copy path View file
@@ -1,16 +1,16 @@
// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
// to TreeMap

use core::cmp::Ordering::{self, Less, Greater, Equal};
use core::cmp::{min, max};
use core::fmt::Debug;
use core::cmp::Ordering::{self, Equal, Greater, Less};
use core::cmp::{max, min};
use core::fmt;
use core::iter::{Peekable, FromIterator, FusedIterator};
use core::ops::{BitOr, BitAnd, BitXor, Sub, RangeBounds};
use core::fmt::Debug;
use core::iter::{FromIterator, FusedIterator, Peekable};
use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};

use super::Recover;
use borrow::Borrow;
use collections::btree_map::{self, BTreeMap, Keys};
use super::Recover;

// FIXME(conventions): implement bounded iterators

@@ -78,9 +78,7 @@ pub struct Iter<'a, T: 'a> {
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Iter")
.field(&self.iter.clone())
.finish()
f.debug_tuple("Iter").field(&self.iter.clone()).finish()
}
}

@@ -127,9 +125,9 @@ pub struct Difference<'a, T: 'a> {
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Difference<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Difference")
.field(&self.a)
.field(&self.b)
.finish()
.field(&self.a)
.field(&self.b)
.finish()
}
}

@@ -150,9 +148,9 @@ pub struct SymmetricDifference<'a, T: 'a> {
impl<'a, T: 'a + fmt::Debug> fmt::Debug for SymmetricDifference<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("SymmetricDifference")
.field(&self.a)
.field(&self.b)
.finish()
.field(&self.a)
.field(&self.b)
.finish()
}
}

@@ -173,9 +171,9 @@ pub struct Intersection<'a, T: 'a> {
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Intersection<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Intersection")
.field(&self.a)
.field(&self.b)
.finish()
.field(&self.a)
.field(&self.b)
.finish()
}
}

@@ -196,9 +194,9 @@ pub struct Union<'a, T: 'a> {
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Union<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Union")
.field(&self.a)
.field(&self.b)
.finish()
.field(&self.a)
.field(&self.b)
.finish()
}
}

@@ -215,7 +213,9 @@ impl<T: Ord> BTreeSet<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn new() -> BTreeSet<T> {
BTreeSet { map: BTreeMap::new() }
BTreeSet {
map: BTreeMap::new(),
}
}

/// Constructs a double-ended iterator over a sub-range of elements in the set.
@@ -242,9 +242,14 @@ impl<T: Ord> BTreeSet<T> {
/// ```
#[stable(feature = "btree_range", since = "1.17.0")]
pub fn range<K: ?Sized, R>(&self, range: R) -> Range<T>
where K: Ord, T: Borrow<K>, R: RangeBounds<K>
where
K: Ord,
T: Borrow<K>,
R: RangeBounds<K>,
{
Range { iter: self.map.range(range) }
Range {
iter: self.map.range(range),
}
}

/// Visits the values representing the difference,
@@ -296,9 +301,10 @@ impl<T: Ord> BTreeSet<T> {
/// assert_eq!(sym_diff, [1, 3]);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn symmetric_difference<'a>(&'a self,
other: &'a BTreeSet<T>)
-> SymmetricDifference<'a, T> {
pub fn symmetric_difference<'a>(
&'a self,
other: &'a BTreeSet<T>,
) -> SymmetricDifference<'a, T> {
SymmetricDifference {
a: self.iter().peekable(),
b: other.iter().peekable(),
@@ -393,8 +399,9 @@ impl<T: Ord> BTreeSet<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
where T: Borrow<Q>,
Q: Ord
where
T: Borrow<Q>,
Q: Ord,
{
self.map.contains_key(value)
}
@@ -416,8 +423,9 @@ impl<T: Ord> BTreeSet<T> {
/// ```
#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
where T: Borrow<Q>,
Q: Ord
where
T: Borrow<Q>,
Q: Ord,
{
Recover::get(&self.map, value)
}
@@ -577,8 +585,9 @@ impl<T: Ord> BTreeSet<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
where T: Borrow<Q>,
Q: Ord
where
T: Borrow<Q>,
Q: Ord,
{
self.map.remove(value).is_some()
}
@@ -600,8 +609,9 @@ impl<T: Ord> BTreeSet<T> {
/// ```
#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
where T: Borrow<Q>,
Q: Ord
where
T: Borrow<Q>,
Q: Ord,
{
Recover::take(&mut self.map, value)
}
@@ -669,8 +679,13 @@ impl<T: Ord> BTreeSet<T> {
/// assert!(b.contains(&41));
/// ```
#[stable(feature = "btree_split_off", since = "1.11.0")]
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self where T: Borrow<Q> {
BTreeSet { map: self.map.split_off(key) }
pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
where
T: Borrow<Q>,
{
BTreeSet {
map: self.map.split_off(key),
}
}
}

@@ -704,7 +719,9 @@ impl<T> BTreeSet<T> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> {
Iter { iter: self.map.keys() }
Iter {
iter: self.map.keys(),
}
}

/// Returns the number of elements in the set.
@@ -769,7 +786,9 @@ impl<T> IntoIterator for BTreeSet<T> {
/// assert_eq!(v, [1, 2, 3, 4]);
/// ```
fn into_iter(self) -> IntoIter<T> {
IntoIter { iter: self.map.into_iter() }
IntoIter {
iter: self.map.into_iter(),
}
}
}

@@ -910,7 +929,9 @@ impl<T: Debug> Debug for BTreeSet<T> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> Clone for Iter<'a, T> {
fn clone(&self) -> Iter<'a, T> {
Iter { iter: self.iter.clone() }
Iter {
iter: self.iter.clone(),
}
}
}
#[stable(feature = "rust1", since = "1.0.0")]
@@ -932,7 +953,9 @@ impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> ExactSizeIterator for Iter<'a, T> {
fn len(&self) -> usize { self.iter.len() }
fn len(&self) -> usize {
self.iter.len()
}
}

#[stable(feature = "fused", since = "1.26.0")]
@@ -957,7 +980,9 @@ impl<T> DoubleEndedIterator for IntoIter<T> {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> ExactSizeIterator for IntoIter<T> {
fn len(&self) -> usize { self.iter.len() }
fn len(&self) -> usize {
self.iter.len()
}
}

#[stable(feature = "fused", since = "1.26.0")]
@@ -966,7 +991,9 @@ impl<T> FusedIterator for IntoIter<T> {}
#[stable(feature = "btree_range", since = "1.17.0")]
impl<'a, T> Clone for Range<'a, T> {
fn clone(&self) -> Range<'a, T> {
Range { iter: self.iter.clone() }
Range {
iter: self.iter.clone(),
}
}
}

Copy path View file
@@ -14,14 +14,14 @@

use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hasher, Hash};
use core::hash::{Hash, Hasher};
use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ptr::NonNull;

use boxed::Box;
use super::SpecExtend;
use boxed::Box;

/// A doubly-linked list with owned nodes.
///
@@ -63,9 +63,7 @@ pub struct Iter<'a, T: 'a> {
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<'a, T: 'a + fmt::Debug> fmt::Debug for Iter<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("Iter")
.field(&self.len)
.finish()
f.debug_tuple("Iter").field(&self.len).finish()
}
}

@@ -96,9 +94,9 @@ pub struct IterMut<'a, T: 'a> {
impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("IterMut")
.field(&self.list)
.field(&self.len)
.finish()
.field(&self.list)
.field(&self.len)
.finish()
}
}

@@ -118,9 +116,7 @@ pub struct IntoIter<T> {
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("IntoIter")
.field(&self.list)
.finish()
f.debug_tuple("IntoIter").field(&self.list).finish()
}
}

@@ -466,7 +462,8 @@ impl<T> LinkedList<T> {
/// ```
#[stable(feature = "linked_list_contains", since = "1.12.0")]
pub fn contains(&self, x: &T) -> bool
where T: PartialEq<T>
where
T: PartialEq<T>,
{
self.iter().any(|e| e == x)
}
@@ -488,9 +485,7 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front(&self) -> Option<&T> {
unsafe {
self.head.as_ref().map(|node| &node.as_ref().element)
}
unsafe { self.head.as_ref().map(|node| &node.as_ref().element) }
}

/// Provides a mutable reference to the front element, or `None` if the list
@@ -516,9 +511,7 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front_mut(&mut self) -> Option<&mut T> {
unsafe {
self.head.as_mut().map(|node| &mut node.as_mut().element)
}
unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
}

/// Provides a reference to the back element, or `None` if the list is
@@ -538,9 +531,7 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back(&self) -> Option<&T> {
unsafe {
self.tail.as_ref().map(|node| &node.as_ref().element)
}
unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) }
}

/// Provides a mutable reference to the back element, or `None` if the list
@@ -566,9 +557,7 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back_mut(&mut self) -> Option<&mut T> {
unsafe {
self.tail.as_mut().map(|node| &mut node.as_mut().element)
}
unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) }
}

/// Adds an element first in the list.
@@ -765,7 +754,8 @@ impl<T> LinkedList<T> {
/// ```
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<T, F>
where F: FnMut(&mut T) -> bool
where
F: FnMut(&mut T) -> bool,
{
// avoid borrow issues.
let it = self.head;
@@ -911,9 +901,11 @@ impl<'a, T> IterMut<'a, T> {
/// }
/// ```
#[inline]
#[unstable(feature = "linked_list_extras",
reason = "this is probably better handled by a cursor type -- we'll see",
issue = "27794")]
#[unstable(
feature = "linked_list_extras",
reason = "this is probably better handled by a cursor type -- we'll see",
issue = "27794"
)]
pub fn insert_next(&mut self, element: T) {
match self.head {
None => self.list.push_back(element),
@@ -955,24 +947,25 @@ impl<'a, T> IterMut<'a, T> {
/// assert_eq!(it.next().unwrap(), &2);
/// ```
#[inline]
#[unstable(feature = "linked_list_extras",
reason = "this is probably better handled by a cursor type -- we'll see",
issue = "27794")]
#[unstable(
feature = "linked_list_extras",
reason = "this is probably better handled by a cursor type -- we'll see",
issue = "27794"
)]
pub fn peek_next(&mut self) -> Option<&mut T> {
if self.len == 0 {
None
} else {
unsafe {
self.head.as_mut().map(|node| &mut node.as_mut().element)
}
unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
}
}
}

/// An iterator produced by calling `drain_filter` on LinkedList.
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
pub struct DrainFilter<'a, T: 'a, F: 'a>
where F: FnMut(&mut T) -> bool,
where
F: FnMut(&mut T) -> bool,
{
list: &'a mut LinkedList<T>,
it: Option<NonNull<Node<T>>>,
@@ -983,7 +976,8 @@ pub struct DrainFilter<'a, T: 'a, F: 'a>

#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<'a, T, F> Iterator for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool,
where
F: FnMut(&mut T) -> bool,
{
type Item = T;

@@ -1010,7 +1004,8 @@ impl<'a, T, F> Iterator for DrainFilter<'a, T, F>

#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<'a, T, F> Drop for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool,
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
self.for_each(drop);
@@ -1019,12 +1014,11 @@ impl<'a, T, F> Drop for DrainFilter<'a, T, F>

#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
impl<'a, T: 'a + fmt::Debug, F> fmt::Debug for DrainFilter<'a, T, F>
where F: FnMut(&mut T) -> bool
where
F: FnMut(&mut T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_tuple("DrainFilter")
.field(&self.list)
.finish()
f.debug_tuple("DrainFilter").field(&self.list).finish()
}
}

@@ -1350,22 +1344,24 @@ mod tests {
}
check_links(&m);
assert_eq!(m.len(), 3 + len * 2);
assert_eq!(m.into_iter().collect::<Vec<_>>(),
[-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]);
assert_eq!(
m.into_iter().collect::<Vec<_>>(),
[-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]
);
}

#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
fn test_send() {
let n = list_from(&[1, 2, 3]);
thread::spawn(move || {
check_links(&n);
let a: &[_] = &[&1, &2, &3];
assert_eq!(a, &*n.iter().collect::<Vec<_>>());
})
.join()
.ok()
.unwrap();
check_links(&n);
let a: &[_] = &[&1, &2, &3];
assert_eq!(a, &*n.iter().collect::<Vec<_>>());
})
.join()
.ok()
.unwrap();
}

#[test]
Copy path View file
@@ -45,7 +45,7 @@ use alloc::{AllocErr, LayoutErr};

/// Augments `AllocErr` with a CapacityOverflow variant.
#[derive(Clone, PartialEq, Eq, Debug)]
#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
pub enum CollectionAllocErr {
/// Error due to the computed capacity exceeding the collection's maximum
/// (usually `isize::MAX` bytes).
@@ -54,15 +54,15 @@ pub enum CollectionAllocErr {
AllocErr,
}

#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
impl From<AllocErr> for CollectionAllocErr {
#[inline]
fn from(AllocErr: AllocErr) -> Self {
CollectionAllocErr::AllocErr
}
}

#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
impl From<LayoutErr> for CollectionAllocErr {
#[inline]
fn from(_: LayoutErr) -> Self {
Copy path View file

Large diffs are not rendered by default.

Oops, something went wrong.
Copy path View file
@@ -508,24 +508,24 @@

#[unstable(feature = "fmt_internals", issue = "0")]
pub use core::fmt::rt;
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
pub use core::fmt::Alignment;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{Formatter, Result, Write};
pub use core::fmt::Error;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{write, ArgumentV1, Arguments};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{Binary, Octal};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{Debug, Display};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{LowerHex, Pointer, UpperHex};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{LowerExp, UpperExp};
pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::Error;
pub use core::fmt::{Formatter, Result, Write};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{write, ArgumentV1, Arguments};
pub use core::fmt::{LowerExp, UpperExp};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
#[stable(feature = "fmt_flags_align", since = "1.28.0")]
pub use core::fmt::{Alignment};
pub use core::fmt::{LowerHex, Pointer, UpperHex};

use string;

Copy path View file
@@ -51,25 +51,26 @@
//! default global allocator. It is not compatible with the libc allocator API.

#![allow(unused_attributes)]
#![unstable(feature = "alloc",
reason = "this library is unlikely to be stabilized in its current \
form or name",
issue = "27783")]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
test(no_crate_inject, attr(allow(unused_variables), deny(warnings))))]
#![unstable(
feature = "alloc",
reason = "this library is unlikely to be stabilized in its current \
form or name",
issue = "27783"
)]
#![doc(
html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
)]
#![no_std]
#![needs_allocator]

#![deny(intra_doc_link_resolution_failure)]
#![deny(missing_debug_implementations)]

#![cfg_attr(not(test), feature(fn_traits))]
#![cfg_attr(not(test), feature(generator_trait))]
#![cfg_attr(test, feature(test))]

#![feature(allocator_api)]
#![feature(allow_internal_unstable)]
#![feature(arbitrary_self_types)]
@@ -119,9 +120,9 @@
#[macro_use]
extern crate std;
#[cfg(test)]
extern crate test;
#[cfg(test)]
extern crate rand;
#[cfg(test)]
extern crate test;

// Module with internal macros used by other modules (needs to be included before other modules).
#[macro_use]
@@ -131,9 +132,11 @@ mod macros;

pub mod alloc;

#[unstable(feature = "futures_api",
reason = "futures in libcore are unstable",
issue = "50547")]
#[unstable(
feature = "futures_api",
reason = "futures in libcore are unstable",
issue = "50547"
)]
pub mod task;
// Primitive types using the heaps above

@@ -146,22 +149,22 @@ pub mod boxed;
mod boxed {
pub use std::boxed::Box;
}
pub mod borrow;
#[cfg(test)]
mod boxed_test;
pub mod collections;
#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
pub mod sync;
pub mod rc;
pub mod raw_vec;
pub mod prelude;
pub mod borrow;
pub mod fmt;
pub mod prelude;
pub mod raw_vec;
pub mod rc;
pub mod slice;
pub mod str;
pub mod string;
#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
pub mod sync;
pub mod vec;

#[cfg(not(test))]
mod std {
pub use core::ops; // RangeFull
pub use core::ops; // RangeFull
}
Copy path View file
@@ -12,8 +12,13 @@

#![unstable(feature = "alloc", issue = "27783")]

#[unstable(feature = "alloc", issue = "27783")] pub use borrow::ToOwned;
#[unstable(feature = "alloc", issue = "27783")] pub use boxed::Box;
#[unstable(feature = "alloc", issue = "27783")] pub use slice::SliceConcatExt;
#[unstable(feature = "alloc", issue = "27783")] pub use string::{String, ToString};
#[unstable(feature = "alloc", issue = "27783")] pub use vec::Vec;
#[unstable(feature = "alloc", issue = "27783")]
pub use borrow::ToOwned;
#[unstable(feature = "alloc", issue = "27783")]
pub use boxed::Box;
#[unstable(feature = "alloc", issue = "27783")]
pub use slice::SliceConcatExt;
#[unstable(feature = "alloc", issue = "27783")]
pub use string::{String, ToString};
#[unstable(feature = "alloc", issue = "27783")]
pub use vec::Vec;
Copy path View file
@@ -7,10 +7,10 @@ use core::ops::Drop;
use core::ptr::{self, NonNull, Unique};
use core::slice;

use alloc::{Alloc, Layout, Global, handle_alloc_error};
use alloc::{handle_alloc_error, Alloc, Global, Layout};
use boxed::Box;
use collections::CollectionAllocErr;
use collections::CollectionAllocErr::*;
use boxed::Box;

/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
/// a buffer of memory on the heap without having to worry about all the corner cases
@@ -81,7 +81,9 @@ impl<T, A: Alloc> RawVec<T, A> {
unsafe {
let elem_size = mem::size_of::<T>();

let alloc_size = cap.checked_mul(elem_size).unwrap_or_else(|| capacity_overflow());
let alloc_size = cap
.checked_mul(elem_size)
.unwrap_or_else(|| capacity_overflow());
alloc_guard(alloc_size).unwrap_or_else(|_| capacity_overflow());

// handles ZSTs and `cap = 0` alike
@@ -307,14 +309,15 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(),
cur,
new_size);
let ptr_res = self
.a
.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
match ptr_res {
Ok(ptr) => (new_cap, ptr.cast().into()),
Err(_) => handle_alloc_error(
Layout::from_size_align_unchecked(new_size, cur.align())
),
Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
new_size,
cur.align(),
)),
}
}
None => {
@@ -368,23 +371,26 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_cap = 2 * self.cap;
let new_size = new_cap * elem_size;
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size) {
match self
.a
.grow_in_place(NonNull::from(self.ptr).cast(), old_layout, new_size)
{
Ok(_) => {
// We can't directly divide `size`.
self.cap = new_cap;
true
}
Err(_) => {
false
}
Err(_) => false,
}
}
}

/// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
pub fn try_reserve_exact(&mut self, used_cap: usize, needed_extra_cap: usize)
-> Result<(), CollectionAllocErr> {

pub fn try_reserve_exact(
&mut self,
used_cap: usize,
needed_extra_cap: usize,
) -> Result<(), CollectionAllocErr> {
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Exact)
}

@@ -413,26 +419,33 @@ impl<T, A: Alloc> RawVec<T, A> {
Err(CapacityOverflow) => capacity_overflow(),
Err(AllocErr) => unreachable!(),
Ok(()) => { /* yay */ }
}
}
}
}

/// Calculates the buffer's new size given that it'll hold `used_cap +
/// needed_extra_cap` elements. This logic is used in amortized reserve methods.
/// Returns `(new_capacity, new_alloc_size)`.
fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize)
-> Result<usize, CollectionAllocErr> {

fn amortized_new_size(
&self,
used_cap: usize,
needed_extra_cap: usize,
) -> Result<usize, CollectionAllocErr> {
// Nothing we can really do about these checks :(
let required_cap = used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?;
let required_cap = used_cap
.checked_add(needed_extra_cap)
.ok_or(CapacityOverflow)?;
// Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`.
let double_cap = self.cap * 2;
// `double_cap` guarantees exponential growth.
Ok(cmp::max(double_cap, required_cap))
}

/// The same as `reserve`, but returns on errors instead of panicking or aborting.
pub fn try_reserve(&mut self, used_cap: usize, needed_extra_cap: usize)
-> Result<(), CollectionAllocErr> {
pub fn try_reserve(
&mut self,
used_cap: usize,
needed_extra_cap: usize,
) -> Result<(), CollectionAllocErr> {
self.reserve_internal(used_cap, needed_extra_cap, Fallible, Amortized)
}

@@ -530,7 +543,8 @@ impl<T, A: Alloc> RawVec<T, A> {
return false;
}

let new_cap = self.amortized_new_size(used_cap, needed_extra_cap)
let new_cap = self
.amortized_new_size(used_cap, needed_extra_cap)
.unwrap_or_else(|_| capacity_overflow());

// Here, `cap < used_cap + needed_extra_cap <= new_cap`
@@ -541,15 +555,15 @@ impl<T, A: Alloc> RawVec<T, A> {
// FIXME: may crash and burn on over-reserve
alloc_guard(new_layout.size()).unwrap_or_else(|_| capacity_overflow());
match self.a.grow_in_place(
NonNull::from(self.ptr).cast(), old_layout, new_layout.size(),
NonNull::from(self.ptr).cast(),
old_layout,
new_layout.size(),
) {
Ok(_) => {
self.cap = new_cap;
true
}
Err(_) => {
false
}
Err(_) => false,
}
}
}
@@ -602,13 +616,14 @@ impl<T, A: Alloc> RawVec<T, A> {
let new_size = elem_size * amount;
let align = mem::align_of::<T>();
let old_layout = Layout::from_size_align_unchecked(old_size, align);
match self.a.realloc(NonNull::from(self.ptr).cast(),
old_layout,
new_size) {
match self
.a
.realloc(NonNull::from(self.ptr).cast(), old_layout, new_size)
{
Ok(p) => self.ptr = p.cast().into(),
Err(_) => handle_alloc_error(
Layout::from_size_align_unchecked(new_size, align)
),
Err(_) => {
handle_alloc_error(Layout::from_size_align_unchecked(new_size, align))
}
}
}
self.cap = amount;
@@ -654,7 +669,9 @@ impl<T, A: Alloc> RawVec<T, A> {

// Nothing we can really do about these checks :(
let new_cap = match strategy {
Exact => used_cap.checked_add(needed_extra_cap).ok_or(CapacityOverflow)?,
Exact => used_cap
.checked_add(needed_extra_cap)
.ok_or(CapacityOverflow)?,
Amortized => self.amortized_new_size(used_cap, needed_extra_cap)?,
};
let new_layout = Layout::array::<T>(new_cap).map_err(|_| CapacityOverflow)?;
@@ -664,7 +681,8 @@ impl<T, A: Alloc> RawVec<T, A> {
let res = match self.current_layout() {
Some(layout) => {
debug_assert!(new_layout.align() == layout.align());
self.a.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
self.a
.realloc(NonNull::from(self.ptr).cast(), layout, new_layout.size())
}
None => self.a.alloc(new_layout),
};
@@ -680,7 +698,6 @@ impl<T, A: Alloc> RawVec<T, A> {
Ok(())
}
}

}

impl<T> RawVec<T, Global> {
@@ -716,12 +733,12 @@ impl<T, A: Alloc> RawVec<T, A> {
unsafe impl<#[may_dangle] T, A: Alloc> Drop for RawVec<T, A> {
/// Frees the memory owned by the RawVec *without* trying to Drop its contents.
fn drop(&mut self) {
unsafe { self.dealloc_buffer(); }
unsafe {
self.dealloc_buffer();
}
}
}



// We need to guarantee the following:
// * We don't ever allocate `> isize::MAX` byte-size objects
// * We don't overflow `usize::MAX` and actually allocate too little
@@ -767,15 +784,20 @@ mod tests {

// A dumb allocator that consumes a fixed amount of fuel
// before allocation attempts start failing.
struct BoundedAlloc { fuel: usize }
struct BoundedAlloc {
fuel: usize,
}
unsafe impl Alloc for BoundedAlloc {
unsafe fn alloc(&mut self, layout: Layout) -> Result<NonNull<u8>, AllocErr> {
let size = layout.size();
if size > self.fuel {
return Err(AllocErr);
}
match Global.alloc(layout) {
ok @ Ok(_) => { self.fuel -= size; ok }
ok @ Ok(_) => {
self.fuel -= size;
ok
}
err @ Err(_) => err,
}
}
@@ -823,5 +845,4 @@ mod tests {
}
}


}
Copy path View file
@@ -237,20 +237,20 @@ use core::any::Any;
use core::borrow;
use core::cell::Cell;
use core::cmp::Ordering;
use core::convert::From;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
use core::marker;
use core::marker::{Unpin, Unsize, PhantomData};
use core::marker::{PhantomData, Unpin, Unsize};
use core::mem::{self, align_of_val, forget, size_of_val};
use core::ops::{Deref, Receiver};
use core::ops::{CoerceUnsized, DispatchFromDyn};
use core::ops::{Deref, Receiver};
use core::pin::Pin;
use core::ptr::{self, NonNull};
use core::convert::From;
use core::usize;

use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
use alloc::{box_free, handle_alloc_error, Alloc, Global, Layout};
use string::String;
use vec::Vec;

@@ -533,9 +533,7 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
unsafe {
Some(&mut this.ptr.as_mut().value)
}
unsafe { Some(&mut this.ptr.as_mut().value) }
} else {
None
}
@@ -616,9 +614,7 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
unsafe {
&mut this.ptr.as_mut().value
}
unsafe { &mut this.ptr.as_mut().value }
}
}

@@ -649,7 +645,10 @@ impl Rc<dyn Any> {
if (*self).is::<T>() {
let ptr = self.ptr.cast::<RcBox<T>>();
forget(self);
Ok(Rc { ptr, phantom: PhantomData })
Ok(Rc {
ptr,
phantom: PhantomData,
})
} else {
Err(self)
}
@@ -664,10 +663,14 @@ impl<T: ?Sized> Rc<T> {
// `&*(ptr as *const RcBox<T>)`, but this created a misaligned
// reference (see #54908).
let layout = Layout::new::<RcBox<()>>()
.extend(Layout::for_value(&*ptr)).unwrap().0
.pad_to_align().unwrap();

let mem = Global.alloc(layout)
.extend(Layout::for_value(&*ptr))
.unwrap()
.0
.pad_to_align()
.unwrap();

let mem = Global
.alloc(layout)
.unwrap_or_else(|_| handle_alloc_error(layout));

// Initialize the RcBox
@@ -692,12 +695,16 @@ impl<T: ?Sized> Rc<T> {
ptr::copy_nonoverlapping(
bptr as *const T as *const u8,
&mut (*ptr).value as *mut _ as *mut u8,
value_size);
value_size,
);

// Free the allocation without dropping its contents
box_free(box_unique);

Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
Rc {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
@@ -719,12 +726,12 @@ impl<T> Rc<[T]> {
let v_ptr = v as *const [T];
let ptr = Self::allocate_for_ptr(v_ptr);

ptr::copy_nonoverlapping(
v.as_ptr(),
&mut (*ptr).value as *mut [T] as *mut T,
v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());

Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
Rc {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}

@@ -768,7 +775,7 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
// Pointer to first element
let elems = &mut (*ptr).value as *mut [T] as *mut T;

let mut guard = Guard{
let mut guard = Guard {
mem: NonNull::new_unchecked(mem),
elems: elems,
layout: layout,
@@ -783,7 +790,10 @@ impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
// All clear. Forget the guard so it doesn't free the new RcBox.
forget(guard);

Rc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
Rc {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
@@ -875,7 +885,10 @@ impl<T: ?Sized> Clone for Rc<T> {
#[inline]
fn clone(&self) -> Rc<T> {
self.inc_strong();
Rc { ptr: self.ptr, phantom: PhantomData }
Rc {
ptr: self.ptr,
phantom: PhantomData,
}
}
}

@@ -1282,7 +1295,10 @@ impl<T: ?Sized> Weak<T> {
None
} else {
inner.inc_strong();
Some(Rc { ptr: self.ptr, phantom: PhantomData })
Some(Rc {
ptr: self.ptr,
phantom: PhantomData,
})
}
}

@@ -1459,7 +1475,9 @@ trait RcBoxPtr<T: ?Sized> {
// nevertheless, we insert an abort here to hint LLVM at
// an otherwise missed optimization.
if self.strong() == 0 || self.strong() == usize::max_value() {
unsafe { abort(); }
unsafe {
abort();
}
}
self.inner().strong.set(self.strong() + 1);
}
@@ -1481,7 +1499,9 @@ trait RcBoxPtr<T: ?Sized> {
// nevertheless, we insert an abort here to hint LLVM at
// an otherwise missed optimization.
if self.weak() == 0 || self.weak() == usize::max_value() {
unsafe { abort(); }
unsafe {
abort();
}
}
self.inner().weak.set(self.weak() + 1);
}
@@ -1495,9 +1515,7 @@ trait RcBoxPtr<T: ?Sized> {
impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> {
unsafe {
self.ptr.as_ref()
}
unsafe { self.ptr.as_ref() }
}
}

@@ -1513,12 +1531,12 @@ mod tests {
use super::{Rc, Weak};
use std::boxed::Box;
use std::cell::RefCell;
use std::clone::Clone;
use std::convert::From;
use std::mem::drop;
use std::option::Option;
use std::option::Option::{None, Some};
use std::result::Result::{Err, Ok};
use std::mem::drop;
use std::clone::Clone;
use std::convert::From;

#[test]
fn test_clone() {
@@ -1569,7 +1587,9 @@ mod tests {
x: RefCell<Option<Weak<Cycle>>>,
}

let a = Rc::new(Cycle { x: RefCell::new(None) });
let a = Rc::new(Cycle {
x: RefCell::new(None),
});
let b = Rc::downgrade(&a.clone());
*a.x.borrow_mut() = Some(b);

@@ -1927,4 +1947,4 @@ impl<T: ?Sized> AsRef<T> for Rc<T> {
}

#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized> Unpin for Rc<T> { }
impl<T: ?Sized> Unpin for Rc<T> {}
Copy path View file
@@ -88,35 +88,35 @@
#![cfg_attr(test, allow(unused_imports, dead_code))]

use core::cmp::Ordering::{self, Less};
use core::mem::size_of;
use core::mem;
use core::mem::size_of;
use core::ptr;
use core::{u8, u16, u32};
use core::{u16, u32, u8};

use borrow::{Borrow, BorrowMut, ToOwned};
use boxed::Box;
use vec::Vec;

#[stable(feature = "slice_get_slice", since = "1.28.0")]
pub use core::slice::SliceIndex;
#[stable(feature = "from_ref", since = "1.28.0")]
pub use core::slice::{from_mut, from_ref};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{Chunks, Windows};
pub use core::slice::{from_raw_parts, from_raw_parts_mut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{Iter, IterMut};
pub use core::slice::{Chunks, Windows};
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub use core::slice::{ChunksExact, ChunksExactMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{SplitMut, ChunksMut, Split};
pub use core::slice::{ChunksMut, Split, SplitMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{SplitN, RSplitN, SplitNMut, RSplitNMut};
pub use core::slice::{Iter, IterMut};
#[stable(feature = "rchunks", since = "1.31.0")]
pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut};
#[stable(feature = "slice_rsplit", since = "1.27.0")]
pub use core::slice::{RSplit, RSplitMut};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::slice::{from_raw_parts, from_raw_parts_mut};
#[stable(feature = "from_ref", since = "1.28.0")]
pub use core::slice::{from_ref, from_mut};
#[stable(feature = "slice_get_slice", since = "1.28.0")]
pub use core::slice::SliceIndex;
#[stable(feature = "chunks_exact", since = "1.31.0")]
pub use core::slice::{ChunksExact, ChunksExactMut};
#[stable(feature = "rchunks", since = "1.31.0")]
pub use core::slice::{RChunks, RChunksMut, RChunksExact, RChunksExactMut};
pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut};

////////////////////////////////////////////////////////////////////////////////
// Basic slice extension methods
@@ -154,7 +154,8 @@ mod hack {

#[inline]
pub fn to_vec<T>(s: &[T]) -> Vec<T>
where T: Clone
where
T: Clone,
{
let mut vector = Vec::with_capacity(s.len());
vector.extend_from_slice(s);
@@ -194,7 +195,8 @@ impl<T> [T] {
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn sort(&mut self)
where T: Ord
where
T: Ord,
{
merge_sort(self, |a, b| a.lt(b));
}
@@ -247,7 +249,8 @@ impl<T> [T] {
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn sort_by<F>(&mut self, mut compare: F)
where F: FnMut(&T, &T) -> Ordering
where
F: FnMut(&T, &T) -> Ordering,
{
merge_sort(self, |a, b| compare(a, b) == Less);
}
@@ -282,7 +285,9 @@ impl<T> [T] {
#[stable(feature = "slice_sort_by_key", since = "1.7.0")]
#[inline]
pub fn sort_by_key<K, F>(&mut self, mut f: F)
where F: FnMut(&T) -> K, K: Ord
where
F: FnMut(&T) -> K,
K: Ord,
{
merge_sort(self, |a, b| f(a).lt(&f(b)));
}
@@ -323,13 +328,19 @@ impl<T> [T] {
#[unstable(feature = "slice_sort_by_cached_key", issue = "34447")]
#[inline]
pub fn sort_by_cached_key<K, F>(&mut self, f: F)
where F: FnMut(&T) -> K, K: Ord
where
F: FnMut(&T) -> K,
K: Ord,
{
// Helper macro for indexing our vector by the smallest possible type, to reduce allocation.
macro_rules! sort_by_key {
($t:ty, $slice:ident, $f:ident) => ({
let mut indices: Vec<_> =
$slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect();
($t:ty, $slice:ident, $f:ident) => {{
let mut indices: Vec<_> = $slice
.iter()
.map($f)
.enumerate()
.map(|(i, k)| (k, i as $t))
.collect();
// The elements of `indices` are unique, as they are indexed, so any sort will be
// stable with respect to the original slice. We use `sort_unstable` here because
// it requires less memory allocation.
@@ -342,19 +353,27 @@ impl<T> [T] {
indices[i].1 = index;
$slice.swap(i, index as usize);
}
})
}};
}

let sz_u8 = mem::size_of::<(K, u8)>();
let sz_u16 = mem::size_of::<(K, u16)>();
let sz_u32 = mem::size_of::<(K, u32)>();
let sz_u8 = mem::size_of::<(K, u8)>();
let sz_u16 = mem::size_of::<(K, u16)>();
let sz_u32 = mem::size_of::<(K, u32)>();
let sz_usize = mem::size_of::<(K, usize)>();

let len = self.len();
if len < 2 { return }
if sz_u8 < sz_u16 && len <= ( u8::MAX as usize) { return sort_by_key!( u8, self, f) }
if sz_u16 < sz_u32 && len <= (u16::MAX as usize) { return sort_by_key!(u16, self, f) }
if sz_u32 < sz_usize && len <= (u32::MAX as usize) { return sort_by_key!(u32, self, f) }
if len < 2 {
return;
}
if sz_u8 < sz_u16 && len <= (u8::MAX as usize) {
return sort_by_key!(u8, self, f);
}
if sz_u16 < sz_u32 && len <= (u16::MAX as usize) {
return sort_by_key!(u16, self, f);
}
if sz_u32 < sz_usize && len <= (u32::MAX as usize) {
return sort_by_key!(u32, self, f);
}
sort_by_key!(usize, self, f)
}

@@ -371,7 +390,8 @@ impl<T> [T] {
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_vec(&self) -> Vec<T>
where T: Clone
where
T: Clone,
{
// NB see hack module in this file
hack::to_vec(self)
@@ -425,10 +445,15 @@ impl<T> [T] {
/// b"0123456789abcdef".repeat(usize::max_value());
/// }
/// ```
#[unstable(feature = "repeat_generic_slice",
reason = "it's on str, why not on slice?",
issue = "48784")]
pub fn repeat(&self, n: usize) -> Vec<T> where T: Copy {
#[unstable(
feature = "repeat_generic_slice",
reason = "it's on str, why not on slice?",
issue = "48784"
)]
pub fn repeat(&self, n: usize) -> Vec<T>
where
T: Copy,
{
if n == 0 {
return Vec::new();
}
@@ -525,9 +550,11 @@ impl [u8] {
////////////////////////////////////////////////////////////////////////////////
// Extension traits for slices over specific kinds of data
////////////////////////////////////////////////////////////////////////////////
#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747")]
#[unstable(
feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747"
)]
/// An extension trait for concatenating slices
///
/// While this trait is unstable, the methods are stable. `SliceConcatExt` is
@@ -538,9 +565,11 @@ impl [u8] {
/// [`join()`]: #tymethod.join
/// [`concat()`]: #tymethod.concat
pub trait SliceConcatExt<T: ?Sized> {
#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747")]
#[unstable(
feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747"
)]
/// The resulting type after concatenation
type Output;

@@ -572,9 +601,11 @@ pub trait SliceConcatExt<T: ?Sized> {
fn connect(&self, sep: &T) -> Self::Output;
}

#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747")]
#[unstable(
feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747"
)]
impl<T: Clone, V: Borrow<[T]>> SliceConcatExt<T> for [V] {
type Output = Vec<T>;

@@ -662,7 +693,8 @@ impl<T: Clone> ToOwned for [T] {
///
/// This is the integral subroutine of insertion sort.
fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
where F: FnMut(&T, &T) -> bool
where
F: FnMut(&T, &T) -> bool,
{
if v.len() >= 2 && is_less(&v[1], &v[0]) {
unsafe {
@@ -720,7 +752,9 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)

impl<T> Drop for InsertionHole<T> {
fn drop(&mut self) {
unsafe { ptr::copy_nonoverlapping(self.src, self.dest, 1); }
unsafe {
ptr::copy_nonoverlapping(self.src, self.dest, 1);
}
}
}
}
@@ -733,7 +767,8 @@ fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
where F: FnMut(&T, &T) -> bool
where
F: FnMut(&T, &T) -> bool,
{
let len = v.len();
let v = v.as_mut_ptr();
@@ -833,7 +868,9 @@ unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
fn drop(&mut self) {
// `T` is not a zero-sized type, so it's okay to divide by its size.
let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
unsafe { ptr::copy_nonoverlapping(self.start, self.dest, len); }
unsafe {
ptr::copy_nonoverlapping(self.start, self.dest, len);
}
}
}
}
@@ -851,7 +888,8 @@ unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
///
/// The invariants ensure that the total running time is `O(n log n)` worst-case.
fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
where F: FnMut(&T, &T) -> bool
where
F: FnMut(&T, &T) -> bool,
{
// Slices of up to this length get sorted using insertion sort.
const MAX_INSERTION: usize = 20;
@@ -868,7 +906,7 @@ fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
// Short arrays get sorted in-place via insertion sort to avoid allocations.
if len <= MAX_INSERTION {
if len >= 2 {
for i in (0..len-1).rev() {
for i in (0..len - 1).rev() {
insert_head(&mut v[i..], &mut is_less);
}
}
@@ -894,14 +932,13 @@ fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
start -= 1;
unsafe {
if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) {
while start > 0 && is_less(v.get_unchecked(start),
v.get_unchecked(start - 1)) {
while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) {
start -= 1;
}
v[start..end].reverse();
} else {
while start > 0 && !is_less(v.get_unchecked(start),
v.get_unchecked(start - 1)) {
while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1))
{
start -= 1;
}
}
@@ -927,8 +964,12 @@ fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
let left = runs[r + 1];
let right = runs[r];
unsafe {
merge(&mut v[left.start .. right.start + right.len], left.len, buf.as_mut_ptr(),
&mut is_less);
merge(
&mut v[left.start..right.start + right.len],
left.len,
buf.as_mut_ptr(),
&mut is_less,
);
}
runs[r] = Run {
start: left.start,
@@ -958,10 +999,12 @@ fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
#[inline]
fn collapse(runs: &[Run]) -> Option<usize> {
let n = runs.len();
if n >= 2 && (runs[n - 1].start == 0 ||
runs[n - 2].len <= runs[n - 1].len ||
(n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len) ||
(n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len)) {
if n >= 2
&& (runs[n - 1].start == 0
|| runs[n - 2].len <= runs[n - 1].len
|| (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len)
|| (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len))
{
if n >= 3 && runs[n - 3].len < runs[n - 1].len {
Some(n - 3)
} else {
Copy path View file
@@ -29,12 +29,12 @@
#![allow(unused_imports)]

use core::fmt;
use core::str as core_str;
use core::str::pattern::Pattern;
use core::str::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use core::iter::FusedIterator;
use core::mem;
use core::ptr;
use core::iter::FusedIterator;
use core::str as core_str;
use core::str::pattern::Pattern;
use core::str::pattern::{DoubleEndedSearcher, ReverseSearcher, Searcher};
use core::unicode::conversions;

use borrow::{Borrow, ToOwned};
@@ -44,36 +44,38 @@ use string::String;
use vec::Vec;

#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{FromStr, Utf8Error};
#[allow(deprecated)]
pub use core::str::pattern;
#[stable(feature = "encode_utf16", since = "1.8.0")]
pub use core::str::EncodeUtf16;
#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
pub use core::str::SplitAsciiWhitespace;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{Lines, LinesAny};
pub use core::str::SplitWhitespace;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{Split, RSplit};
pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{SplitN, RSplitN};
pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{SplitTerminator, RSplitTerminator};
pub use core::str::{FromStr, Utf8Error};
#[allow(deprecated)]
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{Matches, RMatches};
pub use core::str::{Lines, LinesAny};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{MatchIndices, RMatchIndices};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{from_utf8, from_utf8_mut, Chars, CharIndices, Bytes};
pub use core::str::{Matches, RMatches};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
pub use core::str::{RSplit, Split};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::SplitWhitespace;
pub use core::str::{RSplitN, SplitN};
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::pattern;
#[stable(feature = "encode_utf16", since = "1.8.0")]
pub use core::str::EncodeUtf16;
#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
pub use core::str::SplitAsciiWhitespace;
pub use core::str::{RSplitTerminator, SplitTerminator};

#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747")]
#[unstable(
feature = "slice_concat_ext",
reason = "trait should not have to exist",
issue = "27747"
)]
impl<S: Borrow<str>> SliceConcatExt<str> for [S] {
type Output = String;

@@ -82,9 +84,7 @@ impl<S: Borrow<str>> SliceConcatExt<str> for [S] {
}

fn join(&self, sep: &str) -> String {
unsafe {
String::from_utf8_unchecked( join_generic_copy(self, sep.as_bytes()) )
}
unsafe { String::from_utf8_unchecked(join_generic_copy(self, sep.as_bytes())) }
}

fn connect(&self, sep: &str) -> String {
@@ -122,10 +122,10 @@ macro_rules! spezialize_for_lengths {
macro_rules! copy_slice_and_advance {
($target:expr, $bytes:expr) => {
let len = $bytes.len();
let (head, tail) = {$target}.split_at_mut(len);
let (head, tail) = { $target }.split_at_mut(len);
head.copy_from_slice($bytes);
$target = tail;
}
};
}

// Optimized join implementation that works for both Vec<T> (T: Copy) and String's inner vec
@@ -155,11 +155,15 @@ where
// if the `len` calculation overflows, we'll panic
// we would have run out of memory anyway and the rest of the function requires
// the entire Vec pre-allocated for safety
let len = sep_len.checked_mul(iter.len()).and_then(|n| {
slice.iter()
let len = sep_len
.checked_mul(iter.len())
.and_then(|n| {
slice
.iter()
.map(|s| s.borrow().as_ref().len())
.try_fold(n, usize::checked_add)
}).expect("attempt to join into collection with len > usize::MAX");
})
.expect("attempt to join into collection with len > usize::MAX");

// crucial for safety
let mut result = Vec::with_capacity(len);
@@ -381,13 +385,13 @@ impl str {
// See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
// for the definition of `Final_Sigma`.
debug_assert!('Σ'.len_utf8() == 2);
let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) &&
!case_ignoreable_then_cased(from[i + 2..].chars());
let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev())
&& !case_ignoreable_then_cased(from[i + 2..].chars());
to.push_str(if is_word_final { "ς" } else { "σ" });
}

fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
use core::unicode::derived_property::{Cased, Case_Ignorable};
use core::unicode::derived_property::{Case_Ignorable, Cased};
match iter.skip_while(|&c| Case_Ignorable(c)).next() {
Some(c) => Cased(c),
None => false,
@@ -449,9 +453,11 @@ impl str {
/// escaped.
///
/// [`char::escape_debug`]: primitive.char.html#method.escape_debug
#[unstable(feature = "str_escape",
reason = "return type may change to be an iterator",
issue = "27791")]
#[unstable(
feature = "str_escape",
reason = "return type may change to be an iterator",
issue = "27791"
)]
pub fn escape_debug(&self) -> String {
let mut string = String::with_capacity(self.len());
let mut chars = self.chars();
@@ -465,19 +471,23 @@ impl str {
/// Escapes each char in `s` with [`char::escape_default`].
///
/// [`char::escape_default`]: primitive.char.html#method.escape_default
#[unstable(feature = "str_escape",
reason = "return type may change to be an iterator",
issue = "27791")]
#[unstable(
feature = "str_escape",
reason = "return type may change to be an iterator",
issue = "27791"
)]
pub fn escape_default(&self) -> String {
self.chars().flat_map(|c| c.escape_default()).collect()
}

/// Escapes each char in `s` with [`char::escape_unicode`].
///
/// [`char::escape_unicode`]: primitive.char.html#method.escape_unicode
#[unstable(feature = "str_escape",
reason = "return type may change to be an iterator",
issue = "27791")]
#[unstable(
feature = "str_escape",
reason = "return type may change to be an iterator",
issue = "27791"
)]
pub fn escape_unicode(&self) -> String {
self.chars().flat_map(|c| c.escape_unicode()).collect()
}
Copy path View file
@@ -53,13 +53,13 @@ use core::iter::{FromIterator, FusedIterator};
use core::ops::Bound::{Excluded, Included, Unbounded};
use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds};
use core::ptr;
use core::str::pattern::Pattern;
use core::str::lossy;
use core::str::pattern::Pattern;

use collections::CollectionAllocErr;
use borrow::{Cow, ToOwned};
use boxed::Box;
use str::{self, from_boxed_utf8_unchecked, FromStr, Utf8Error, Chars};
use collections::CollectionAllocErr;
use str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error};
use vec::Vec;

/// A UTF-8 encoded, growable string.
@@ -415,7 +415,9 @@ impl String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn with_capacity(capacity: usize) -> String {
String { vec: Vec::with_capacity(capacity) }
String {
vec: Vec::with_capacity(capacity),
}
}

// HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
@@ -493,12 +495,10 @@ impl String {
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
match str::from_utf8(&vec) {
Ok(..) => Ok(String { vec }),
Err(e) => {
Err(FromUtf8Error {
bytes: vec,
error: e,
})
}
Err(e) => Err(FromUtf8Error {
bytes: vec,
error: e,
}),
}
}

@@ -648,7 +648,9 @@ impl String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf16_lossy(v: &[u16]) -> String {
decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect()
decode_utf16(v.iter().cloned())
.map(|r| r.unwrap_or(REPLACEMENT_CHARACTER))
.collect()
}

/// Creates a new `String` from a length, capacity, and pointer.
@@ -695,7 +697,9 @@ impl String {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
String { vec: Vec::from_raw_parts(buf, length, capacity) }
String {
vec: Vec::from_raw_parts(buf, length, capacity),
}
}

/// Converts a vector of bytes to a `String` without checking that the
@@ -953,7 +957,7 @@ impl String {
/// }
/// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
/// ```
#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
self.vec.try_reserve(additional)
}
@@ -991,8 +995,8 @@ impl String {
/// }
/// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
/// ```
#[unstable(feature = "try_reserve", reason = "new API", issue="48043")]
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
self.vec.try_reserve_exact(additional)
}

@@ -1040,7 +1044,7 @@ impl String {
/// assert!(s.capacity() >= 3);
/// ```
#[inline]
#[unstable(feature = "shrink_to", reason = "new API", issue="56431")]
#[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
pub fn shrink_to(&mut self, min_capacity: usize) {
self.vec.shrink_to(min_capacity)
}
@@ -1067,7 +1071,9 @@ impl String {
pub fn push(&mut self, ch: char) {
match ch.len_utf8() {
1 => self.vec.push(ch as u8),
_ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
_ => self
.vec
.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
}
}

@@ -1190,9 +1196,11 @@ impl String {
let next = idx + ch.len_utf8();
let len = self.len();
unsafe {
ptr::copy(self.vec.as_ptr().add(next),
self.vec.as_mut_ptr().add(idx),
len - next);
ptr::copy(
self.vec.as_ptr().add(next),
self.vec.as_mut_ptr().add(idx),
len - next,
);
self.vec.set_len(len - (next - idx));
}
ch
@@ -1216,25 +1224,26 @@ impl String {
#[inline]
#[stable(feature = "string_retain", since = "1.26.0")]
pub fn retain<F>(&mut self, mut f: F)
where F: FnMut(char) -> bool
where
F: FnMut(char) -> bool,
{
let len = self.len();
let mut del_bytes = 0;
let mut idx = 0;

while idx < len {
let ch = unsafe {
self.get_unchecked(idx..len).chars().next().unwrap()
};
let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() };
let ch_len = ch.len_utf8();

if !f(ch) {
del_bytes += ch_len;
} else if del_bytes > 0 {
unsafe {
ptr::copy(self.vec.as_ptr().add(idx),
self.vec.as_mut_ptr().add(idx - del_bytes),
ch_len);
ptr::copy(
self.vec.as_ptr().add(idx),
self.vec.as_mut_ptr().add(idx - del_bytes),
ch_len,
);
}
}

@@ -1243,7 +1252,9 @@ impl String {
}

if del_bytes > 0 {
unsafe { self.vec.set_len(len - del_bytes); }
unsafe {
self.vec.set_len(len - del_bytes);
}
}
}

@@ -1289,12 +1300,12 @@ impl String {
let amt = bytes.len();
self.vec.reserve(amt);

ptr::copy(self.vec.as_ptr().add(idx),
self.vec.as_mut_ptr().add(idx + amt),
len - idx);
ptr::copy(bytes.as_ptr(),
self.vec.as_mut_ptr().add(idx),
amt);
ptr::copy(
self.vec.as_ptr().add(idx),
self.vec.as_mut_ptr().add(idx + amt),
len - idx,
);
ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
self.vec.set_len(len + amt);
}

@@ -1486,7 +1497,8 @@ impl String {
/// ```
#[stable(feature = "drain", since = "1.6.0")]
pub fn drain<R>(&mut self, range: R) -> Drain
where R: RangeBounds<usize>
where
R: RangeBounds<usize>,
{
// Memory safety
//
@@ -1546,27 +1558,26 @@ impl String {
/// ```
#[stable(feature = "splice", since = "1.27.0")]
pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
where R: RangeBounds<usize>
where
R: RangeBounds<usize>,
{
// Memory safety
//
// Replace_range does not have the memory safety issues of a vector Splice.
// of the vector version. The data is just plain bytes.

match range.start_bound() {
Included(&n) => assert!(self.is_char_boundary(n)),
Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
Unbounded => {},
Included(&n) => assert!(self.is_char_boundary(n)),
Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
Unbounded => {}
};
match range.end_bound() {
Included(&n) => assert!(self.is_char_boundary(n + 1)),
Excluded(&n) => assert!(self.is_char_boundary(n)),
Unbounded => {},
Included(&n) => assert!(self.is_char_boundary(n + 1)),
Excluded(&n) => assert!(self.is_char_boundary(n)),
Unbounded => {}
};

unsafe {
self.as_mut_vec()
}.splice(range, replace_with.bytes());
unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes());
}

/// Converts this `String` into a [`Box`]`<`[`str`]`>`.
@@ -1684,7 +1695,9 @@ impl fmt::Display for FromUtf16Error {
#[stable(feature = "rust1", since = "1.0.0")]
impl Clone for String {
fn clone(&self) -> Self {
String { vec: self.vec.clone() }
String {
vec: self.vec.clone(),
}
}

fn clone_from(&mut self, source: &Self) {
@@ -1795,9 +1808,11 @@ impl<'a> Extend<Cow<'a, str>> for String {
}

/// A convenience impl that delegates to the impl for `&str`
#[unstable(feature = "pattern",
reason = "API not fully fleshed out and ready to be stabilized",
issue = "27721")]
#[unstable(
feature = "pattern",
reason = "API not fully fleshed out and ready to be stabilized",
issue = "27721"
)]
impl<'a, 'b> Pattern<'a> for &'b String {
type Searcher = <&'b str as Pattern<'a>>::Searcher;

@@ -1833,20 +1848,27 @@ macro_rules! impl_eq {
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<$rhs> for $lhs {
#[inline]
fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
fn eq(&self, other: &$rhs) -> bool {
PartialEq::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &$rhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
fn ne(&self, other: &$rhs) -> bool {
PartialEq::ne(&self[..], &other[..])
}
}

#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, 'b> PartialEq<$lhs> for $rhs {
#[inline]
fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) }
fn eq(&self, other: &$lhs) -> bool {
PartialEq::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &$lhs) -> bool { PartialEq::ne(&self[..], &other[..]) }
fn ne(&self, other: &$lhs) -> bool {
PartialEq::ne(&self[..], &other[..])
}
}

}
};
}

impl_eq! { String, str }
@@ -2159,7 +2181,7 @@ impl<T: fmt::Display + ?Sized> ToString for T {
use core::fmt::Write;
let mut buf = String::new();
buf.write_fmt(format_args!("{}", self))
.expect("a Display implementation returned an error unexpectedly");
.expect("a Display implementation returned an error unexpectedly");
buf.shrink_to_fit();
buf
}
Copy path View file
@@ -7,23 +7,23 @@
//! [arc]: struct.Arc.html

use core::any::Any;
use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::borrow;
use core::fmt;
use core::cmp::Ordering;
use core::convert::From;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::intrinsics::abort;
use core::marker::{PhantomData, Unpin, Unsize};
use core::mem::{self, align_of_val, size_of_val};
use core::ops::{Deref, Receiver};
use core::ops::{CoerceUnsized, DispatchFromDyn};
use core::ops::{Deref, Receiver};
use core::pin::Pin;
use core::ptr::{self, NonNull};
use core::marker::{Unpin, Unsize, PhantomData};
use core::hash::{Hash, Hasher};
use core::sync::atomic;
use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
use core::{isize, usize};
use core::convert::From;

use alloc::{Global, Alloc, Layout, box_free, handle_alloc_error};
use alloc::{box_free, handle_alloc_error, Alloc, Global, Layout};
use boxed::Box;
use rc::is_dangling;
use string::String;
@@ -290,7 +290,10 @@ impl<T> Arc<T> {
weak: atomic::AtomicUsize::new(1),
data,
};
Arc { ptr: Box::into_raw_non_null(x), phantom: PhantomData }
Arc {
ptr: Box::into_raw_non_null(x),
phantom: PhantomData,
}
}

/// Constructs a new `Pin<Arc<T>>`. If `T` does not implement `Unpin`, then
@@ -325,7 +328,12 @@ impl<T> Arc<T> {
#[stable(feature = "arc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
if this.inner().strong.compare_exchange(1, 0, Release, Relaxed).is_err() {
if this
.inner()
.strong
.compare_exchange(1, 0, Release, Relaxed)
.is_err()
{
return Err(this);
}

@@ -446,7 +454,11 @@ impl<T: ?Sized> Arc<T> {
// Unlike with Clone(), we need this to be an Acquire read to
// synchronize with the write coming from `is_unique`, so that the
// events prior to that write happen before this read.
match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
match this
.inner()
.weak
.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed)
{
Ok(_) => {
// Make sure we do not create a dangling Weak
debug_assert!(!is_dangling(this.ptr));
@@ -485,7 +497,11 @@ impl<T: ?Sized> Arc<T> {
let cnt = this.inner().weak.load(SeqCst);
// If the weak count is currently locked, the value of the
// count was 0 just before taking the lock.
if cnt == usize::MAX { 0 } else { cnt - 1 }
if cnt == usize::MAX {
0
} else {
cnt - 1
}
}

/// Gets the number of strong (`Arc`) pointers to this value.
@@ -567,10 +583,14 @@ impl<T: ?Sized> Arc<T> {
// `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
// reference (see #54908).
let layout = Layout::new::<ArcInner<()>>()
.extend(Layout::for_value(&*ptr)).unwrap().0
.pad_to_align().unwrap();

let mem = Global.alloc(layout)
.extend(Layout::for_value(&*ptr))
.unwrap()
.0
.pad_to_align()
.unwrap();

let mem = Global
.alloc(layout)
.unwrap_or_else(|_| handle_alloc_error(layout));

// Initialize the ArcInner
@@ -595,12 +615,16 @@ impl<T: ?Sized> Arc<T> {
ptr::copy_nonoverlapping(
bptr as *const T as *const u8,
&mut (*ptr).data as *mut _ as *mut u8,
value_size);
value_size,
);

// Free the allocation without dropping its contents
box_free(box_unique);

Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
Arc {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
@@ -622,12 +646,12 @@ impl<T> Arc<[T]> {
let v_ptr = v as *const [T];
let ptr = Self::allocate_for_ptr(v_ptr);

ptr::copy_nonoverlapping(
v.as_ptr(),
&mut (*ptr).data as *mut [T] as *mut T,
v.len());
ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());

Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
Arc {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}

@@ -672,7 +696,7 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
// Pointer to first element
let elems = &mut (*ptr).data as *mut [T] as *mut T;

let mut guard = Guard{
let mut guard = Guard {
mem: NonNull::new_unchecked(mem),
elems: elems,
layout: layout,
@@ -687,7 +711,10 @@ impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
// All clear. Forget the guard so it doesn't free the new ArcInner.
mem::forget(guard);

Arc { ptr: NonNull::new_unchecked(ptr), phantom: PhantomData }
Arc {
ptr: NonNull::new_unchecked(ptr),
phantom: PhantomData,
}
}
}
}
@@ -745,7 +772,10 @@ impl<T: ?Sized> Clone for Arc<T> {
}
}

Arc { ptr: self.ptr, phantom: PhantomData }
Arc {
ptr: self.ptr,
phantom: PhantomData,
}
}
}

@@ -803,7 +833,12 @@ impl<T: Clone> Arc<T> {
// before release writes (i.e., decrements) to `strong`. Since we hold a
// weak count, there's no chance the ArcInner itself could be
// deallocated.
if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
if this
.inner()
.strong
.compare_exchange(1, 0, Acquire, Relaxed)
.is_err()
{
// Another strong pointer exists; clone
*this = Arc::new((**this).clone());
} else if this.inner().weak.load(Relaxed) != 1 {
@@ -841,9 +876,7 @@ impl<T: Clone> Arc<T> {

// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
&mut this.ptr.as_mut().data
}
unsafe { &mut this.ptr.as_mut().data }
}
}

@@ -883,9 +916,7 @@ impl<T: ?Sized> Arc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
Some(&mut this.ptr.as_mut().data)
}
unsafe { Some(&mut this.ptr.as_mut().data) }
} else {
None
}
@@ -903,7 +934,12 @@ impl<T: ?Sized> Arc<T> {
// writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
// of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
// weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
if self
.inner()
.weak
.compare_exchange(1, usize::MAX, Acquire, Relaxed)
.is_ok()
{
// This needs to be an `Acquire` to synchronize with the decrement of the `strong`
// counter in `drop` -- the only access that happens when any but the last reference
// is being dropped.
@@ -1024,7 +1060,10 @@ impl Arc<dyn Any + Send + Sync> {
if (*self).is::<T>() {
let ptr = self.ptr.cast::<ArcInner<T>>();
mem::forget(self);
Ok(Arc { ptr, phantom: PhantomData })
Ok(Arc {
ptr,
phantom: PhantomData,
})
} else {
Err(self)
}
@@ -1106,12 +1145,17 @@ impl<T: ?Sized> Weak<T> {
}

// Relaxed is valid for the same reason it is on Arc's Clone impl
match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
Ok(_) => return Some(Arc {
// null checked above
ptr: self.ptr,
phantom: PhantomData,
}),
match inner
.strong
.compare_exchange_weak(n, n + 1, Relaxed, Relaxed)
{
Ok(_) => {
return Some(Arc {
// null checked above
ptr: self.ptr,
phantom: PhantomData,
})
}
Err(old) => n = old,
}
}
@@ -1273,14 +1317,12 @@ impl<T: ?Sized> Drop for Weak<T> {
let inner = if let Some(inner) = self.inner() {
inner
} else {
return
return;
};

if inner.weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
}
unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
}
}
}
@@ -1576,16 +1618,16 @@ impl<T> From<Vec<T>> for Arc<[T]> {
mod tests {
use std::boxed::Box;
use std::clone::Clone;
use std::sync::mpsc::channel;
use std::convert::From;
use std::mem::drop;
use std::ops::Drop;
use std::option::Option;
use std::option::Option::{None, Some};
use std::sync::atomic;
use std::sync::atomic::Ordering::{Acquire, SeqCst};
use std::thread;
use std::sync::mpsc::channel;
use std::sync::Mutex;
use std::convert::From;
use std::thread;

use super::{Arc, Weak};
use vec::Vec;
@@ -1768,7 +1810,9 @@ mod tests {
x: Mutex<Option<Weak<Cycle>>>,
}

let a = Arc::new(Cycle { x: Mutex::new(None) });
let a = Arc::new(Cycle {
x: Mutex::new(None),
});
let b = Arc::downgrade(&a.clone());
*a.x.lock().unwrap() = Some(b);

@@ -2043,4 +2087,4 @@ impl<T: ?Sized> AsRef<T> for Arc<T> {
}

#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized> Unpin for Arc<T> { }
impl<T: ?Sized> Unpin for Arc<T> {}
Copy path View file
@@ -69,7 +69,8 @@ mod if_arc {
}

impl<T> From<Arc<T>> for Waker
where T: Wake + 'static,
where
T: Wake + 'static,
{
fn from(rc: Arc<T>) -> Self {
unsafe {
Copy path View file
@@ -1,7 +1,7 @@
use std::any::Any;
use std::sync::{Arc, Weak};
use std::cell::RefCell;
use std::cmp::PartialEq;
use std::sync::{Arc, Weak};

#[test]
fn uninhabited() {
@@ -10,16 +10,16 @@ fn uninhabited() {
a = a.clone();
assert!(a.upgrade().is_none());

let mut a: Weak<dyn Any> = a; // Unsizing
let mut a: Weak<dyn Any> = a; // Unsizing
a = a.clone();
assert!(a.upgrade().is_none());
}

#[test]
fn slice() {
let a: Arc<[u32; 3]> = Arc::new([3, 2, 1]);
let a: Arc<[u32]> = a; // Unsizing
let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion
let a: Arc<[u32]> = a; // Unsizing
let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion
assert_eq!(a, b);

// Exercise is_dangling() with a DST
@@ -31,7 +31,7 @@ fn slice() {
#[test]
fn trait_object() {
let a: Arc<u32> = Arc::new(4);
let a: Arc<dyn Any> = a; // Unsizing
let a: Arc<dyn Any> = a; // Unsizing

// Exercise is_dangling() with a DST
let mut a = Arc::downgrade(&a);
@@ -41,7 +41,7 @@ fn trait_object() {
let mut b = Weak::<u32>::new();
b = b.clone();
assert!(b.upgrade().is_none());
let mut b: Weak<dyn Any> = b; // Unsizing
let mut b: Weak<dyn Any> = b; // Unsizing
b = b.clone();
assert!(b.upgrade().is_none());
}
@@ -55,7 +55,7 @@ fn float_nan_ne() {

#[test]
fn partial_eq() {
struct TestPEq (RefCell<usize>);
struct TestPEq(RefCell<usize>);
impl PartialEq for TestPEq {
fn eq(&self, other: &TestPEq) -> bool {
*self.0.borrow_mut() += 1;
@@ -72,7 +72,7 @@ fn partial_eq() {
#[test]
fn eq() {
#[derive(Eq)]
struct TestEq (RefCell<usize>);
struct TestEq(RefCell<usize>);
impl PartialEq for TestEq {
fn eq(&self, other: &TestEq) -> bool {
*self.0.borrow_mut() += 1;
Copy path View file
@@ -1,10 +1,10 @@
use std::cmp;
use std::collections::BinaryHeap;
use std::collections::binary_heap::{Drain, PeekMut};
use std::collections::BinaryHeap;
use std::panic::{self, AssertUnwindSafe};
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
use std::sync::atomic::{AtomicUsize, Ordering, ATOMIC_USIZE_INIT};

use rand::{thread_rng, seq::SliceRandom};
use rand::{seq::SliceRandom, thread_rng};

#[test]
fn test_iterator() {
@@ -315,10 +315,11 @@ fn panic_safe() {
for i in 1..=DATASZ {
DROP_COUNTER.store(0, Ordering::SeqCst);

let mut panic_ords: Vec<_> = data.iter()
.filter(|&&x| x != i)
.map(|&x| PanicOrd(x, false))
.collect();
let mut panic_ords: Vec<_> = data
.iter()
.filter(|&&x| x != i)
.map(|&x| PanicOrd(x, false))
.collect();
let panic_item = PanicOrd(i, true);

// heapify the sane items
Copy path View file
@@ -1,10 +1,10 @@
use std::collections::BTreeMap;
use std::collections::btree_map::Entry::{Occupied, Vacant};
use std::collections::BTreeMap;
use std::ops::Bound::{self, Excluded, Included, Unbounded};
use std::rc::Rc;

use std::iter::FromIterator;
use super::DeterministicRng;
use std::iter::FromIterator;

#[test]
fn test_basic_large() {
@@ -75,7 +75,8 @@ fn test_iter() {
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();

fn test<T>(size: usize, mut iter: T)
where T: Iterator<Item = (usize, usize)>
where
T: Iterator<Item = (usize, usize)>,
{
for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
@@ -97,7 +98,8 @@ fn test_iter_rev() {
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();

fn test<T>(size: usize, mut iter: T)
where T: Iterator<Item = (usize, usize)>
where
T: Iterator<Item = (usize, usize)>,
{
for i in 0..size {
assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
@@ -133,7 +135,8 @@ fn test_iter_mixed() {
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();

fn test<T>(size: usize, mut iter: T)
where T: Iterator<Item = (usize, usize)> + DoubleEndedIterator
where
T: Iterator<Item = (usize, usize)> + DoubleEndedIterator,
{
for i in 0..size / 4 {
assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
@@ -175,8 +178,9 @@ fn test_range_inclusive() {
let map: BTreeMap<_, _> = (0..=size).map(|i| (i, i)).collect();

fn check<'a, L, R>(lhs: L, rhs: R)
where L: IntoIterator<Item=(&'a i32, &'a i32)>,
R: IntoIterator<Item=(&'a i32, &'a i32)>,
where
L: IntoIterator<Item = (&'a i32, &'a i32)>,
R: IntoIterator<Item = (&'a i32, &'a i32)>,
{
let lhs: Vec<_> = lhs.into_iter().collect();
let rhs: Vec<_> = rhs.into_iter().collect();
@@ -192,7 +196,10 @@ fn test_range_inclusive() {
check(map.range(-1..=size), map.range(..));
check(map.range(..=size), map.range(..));
check(map.range(..=200), map.range(..201));
check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]);
check(
map.range(5..=8),
vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)],
);
check(map.range(-1..=0), vec![(&0, &0)]);
check(map.range(-1..=2), vec![(&0, &0), (&1, &1), (&2, &2)]);
}
@@ -278,7 +285,7 @@ fn test_range_borrowed_key() {
map.insert("coyote".to_string(), 3);
map.insert("dingo".to_string(), 4);
// NOTE: would like to use simply "b".."d" here...
let mut iter = map.range::<str, _>((Included("b"),Excluded("d")));
let mut iter = map.range::<str, _>((Included("b"), Excluded("d")));
assert_eq!(iter.next(), Some((&"baboon".to_string(), &2)));
assert_eq!(iter.next(), Some((&"coyote".to_string(), &3)));
assert_eq!(iter.next(), None);
@@ -291,7 +298,9 @@ fn test_range() {

for i in 0..size {
for j in i..size {
let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
let mut kvs = map
.range((Included(&i), Included(&j)))
.map(|(&k, &v)| (k, v));
let mut pairs = (i..=j).map(|i| (i, i));

for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
@@ -310,7 +319,9 @@ fn test_range_mut() {

for i in 0..size {
for j in i..size {
let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
let mut kvs = map
.range_mut((Included(&i), Included(&j)))
.map(|(&k, &mut v)| (k, v));
let mut pairs = (i..=j).map(|i| (i, i));

for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
@@ -367,7 +378,6 @@ fn test_entry() {
assert_eq!(map.get(&1).unwrap(), &100);
assert_eq!(map.len(), 6);


// Existing key (update)
match map.entry(2) {
Vacant(_) => unreachable!(),
@@ -389,7 +399,6 @@ fn test_entry() {
assert_eq!(map.get(&3), None);
assert_eq!(map.len(), 5);


// Inexistent key (insert)
match map.entry(10) {
Occupied(_) => unreachable!(),
@@ -511,7 +520,7 @@ fn test_clone() {
#[test]
#[allow(dead_code)]
fn test_variance() {
use std::collections::btree_map::{Iter, IntoIter, Range, Keys, Values};
use std::collections::btree_map::{IntoIter, Iter, Keys, Range, Values};

fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> {
v
@@ -592,7 +601,7 @@ macro_rules! create_append_test {

let mut b = BTreeMap::new();
for i in 5..$len {
b.insert(i, 2*i);
b.insert(i, 2 * i);
}

a.append(&mut b);
@@ -604,12 +613,12 @@ macro_rules! create_append_test {
if i < 5 {
assert_eq!(a[&i], i);
} else {
assert_eq!(a[&i], 2*i);
assert_eq!(a[&i], 2 * i);
}
}

assert_eq!(a.remove(&($len-1)), Some(2*($len-1)));
assert_eq!(a.insert($len-1, 20), None);
assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1)));
assert_eq!(a.insert($len - 1, 20), None);
}
};
}
@@ -672,6 +681,10 @@ fn test_split_off_large_random_sorted() {
let key = data[data.len() / 2].0;
let right = map.split_off(&key);

assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
assert!(map
.into_iter()
.eq(data.clone().into_iter().filter(|x| x.0 < key)));
assert!(right
.into_iter()
.eq(data.into_iter().filter(|x| x.0 >= key)));
}
Copy path View file
@@ -1,7 +1,7 @@
use std::collections::BTreeSet;

use std::iter::FromIterator;
use super::DeterministicRng;
use std::iter::FromIterator;

#[test]
fn test_clone_eq() {
@@ -30,7 +30,8 @@ fn test_hash() {
}

fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
where F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool
where
F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool,
{
let mut set_a = BTreeSet::new();
let mut set_b = BTreeSet::new();
@@ -43,13 +44,11 @@ fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
}

let mut i = 0;
f(&set_a,
&set_b,
&mut |&x| {
assert_eq!(x, expected[i]);
i += 1;
true
});
f(&set_a, &set_b, &mut |&x| {
assert_eq!(x, expected[i]);
i += 1;
true
});
assert_eq!(i, expected.len());
}

@@ -64,9 +63,11 @@ fn test_intersection() {
check_intersection(&[], &[1, 2, 3], &[]);
check_intersection(&[2], &[1, 2, 3], &[2]);
check_intersection(&[1, 2, 3], &[2], &[2]);
check_intersection(&[11, 1, 3, 77, 103, 5, -5],
&[2, 11, 77, -9, -42, 5, 3],
&[3, 5, 11, 77]);
check_intersection(
&[11, 1, 3, 77, 103, 5, -5],
&[2, 11, 77, -9, -42, 5, 3],
&[3, 5, 11, 77],
);
}

#[test]
@@ -79,9 +80,11 @@ fn test_difference() {
check_difference(&[1, 12], &[], &[1, 12]);
check_difference(&[], &[1, 2, 3, 9], &[]);
check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]);
check_difference(&[-5, 11, 22, 33, 40, 42],
&[-12, -5, 14, 23, 34, 38, 39, 50],
&[11, 22, 33, 40, 42]);
check_difference(
&[-5, 11, 22, 33, 40, 42],
&[-12, -5, 14, 23, 34, 38, 39, 50],
&[11, 22, 33, 40, 42],
);
}

#[test]
@@ -93,9 +96,11 @@ fn test_symmetric_difference() {
check_symmetric_difference(&[], &[], &[]);
check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]);
check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]);
check_symmetric_difference(&[1, 3, 5, 9, 11],
&[-2, 3, 9, 14, 22],
&[-2, 1, 5, 11, 14, 22]);
check_symmetric_difference(
&[1, 3, 5, 9, 11],
&[-2, 3, 9, 14, 22],
&[-2, 1, 5, 11, 14, 22],
);
}

#[test]
@@ -107,9 +112,11 @@ fn test_union() {
check_union(&[], &[], &[]);
check_union(&[1, 2, 3], &[2], &[1, 2, 3]);
check_union(&[2], &[1, 2, 3], &[1, 2, 3]);
check_union(&[1, 3, 5, 9, 11, 16, 19, 24],
&[-2, 1, 5, 9, 13, 19],
&[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24]);
check_union(
&[1, 3, 5, 9, 11, 16, 19, 24],
&[-2, 1, 5, 9, 13, 19],
&[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24],
);
}

#[test]
@@ -316,6 +323,8 @@ fn test_split_off_large_random_sorted() {
let key = data[data.len() / 2];
let right = set.split_off(&key);

assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key)));
assert!(set
.into_iter()
.eq(data.clone().into_iter().filter(|x| *x < key)));
assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key)));
}
Copy path View file
@@ -1,4 +1,4 @@
use std::alloc::{Global, Alloc, Layout, System};
use std::alloc::{Alloc, Global, Layout, System};

/// https://github.com/rust-lang/rust/issues/45955
#[test]
@@ -16,12 +16,19 @@ fn check_overalign_requests<T: Alloc>(mut allocator: T) {
let align = 16; // greater than size
let iterations = 100;
unsafe {
let pointers: Vec<_> = (0..iterations).map(|_| {
allocator.alloc(Layout::from_size_align(size, align).unwrap()).unwrap()
}).collect();
let pointers: Vec<_> = (0..iterations)
.map(|_| {
allocator
.alloc(Layout::from_size_align(size, align).unwrap())
.unwrap()
})
.collect();
for &ptr in &pointers {
assert_eq!((ptr.as_ptr() as usize) % align, 0,
"Got a pointer less aligned than requested")
assert_eq!(
(ptr.as_ptr() as usize) % align,
0,
"Got a pointer less aligned than requested"
)
}

// Clean up
Copy path View file
@@ -13,8 +13,8 @@
extern crate core;
extern crate rand;

use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};

mod arc;
mod binary_heap;
@@ -27,8 +27,8 @@ mod rc;
mod slice;
mod str;
mod string;
mod vec_deque;
mod vec;
mod vec_deque;

fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
Oops, something went wrong.