Skip to content

Commit

Permalink
Register new snapshots
Browse files Browse the repository at this point in the history
Also convert a number of `static mut` to just a plain old `static` and remove
some unsafe blocks.
  • Loading branch information
alexcrichton committed Oct 11, 2014
1 parent 1add4de commit dae48a0
Show file tree
Hide file tree
Showing 36 changed files with 107 additions and 307 deletions.
23 changes: 0 additions & 23 deletions src/libcollections/string.rs
Expand Up @@ -928,29 +928,6 @@ impl<S: Str> Add<S, String> for String {
}
}

#[cfg(stage0)]
impl ops::Slice<uint, str> for String {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a str {
self.as_slice()
}

#[inline]
fn slice_from_<'a>(&'a self, from: &uint) -> &'a str {
self[][*from..]
}

#[inline]
fn slice_to_<'a>(&'a self, to: &uint) -> &'a str {
self[][..*to]
}

#[inline]
fn slice_<'a>(&'a self, from: &uint, to: &uint) -> &'a str {
self[][*from..*to]
}
}
#[cfg(not(stage0))]
impl ops::Slice<uint, str> for String {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a str {
Expand Down
18 changes: 0 additions & 18 deletions src/libcollections/trie.rs
Expand Up @@ -389,15 +389,6 @@ macro_rules! bound {

impl<T> TrieMap<T> {
// If `upper` is true then returns upper_bound else returns lower_bound.
#[cfg(stage0)]
#[inline]
fn bound<'a>(&'a self, key: uint, upper: bool) -> Entries<'a, T> {
bound!(Entries, self = self,
key = key, is_upper = upper,
slice_from = slice_from_, iter = iter,
mutability = )
}
#[cfg(not(stage0))]
#[inline]
fn bound<'a>(&'a self, key: uint, upper: bool) -> Entries<'a, T> {
bound!(Entries, self = self,
Expand Down Expand Up @@ -440,15 +431,6 @@ impl<T> TrieMap<T> {
self.bound(key, true)
}
// If `upper` is true then returns upper_bound else returns lower_bound.
#[cfg(stage0)]
#[inline]
fn bound_mut<'a>(&'a mut self, key: uint, upper: bool) -> MutEntries<'a, T> {
bound!(MutEntries, self = self,
key = key, is_upper = upper,
slice_from = slice_from_mut_, iter = iter_mut,
mutability = mut)
}
#[cfg(not(stage0))]
#[inline]
fn bound_mut<'a>(&'a mut self, key: uint, upper: bool) -> MutEntries<'a, T> {
bound!(MutEntries, self = self,
Expand Down
44 changes: 0 additions & 44 deletions src/libcollections/vec.rs
Expand Up @@ -461,28 +461,6 @@ impl<T> Index<uint,T> for Vec<T> {
}
}*/

#[cfg(stage0)]
impl<T> ops::Slice<uint, [T]> for Vec<T> {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a [T] {
self.as_slice()
}

#[inline]
fn slice_from_<'a>(&'a self, start: &uint) -> &'a [T] {
self.as_slice().slice_from_(start)
}

#[inline]
fn slice_to_<'a>(&'a self, end: &uint) -> &'a [T] {
self.as_slice().slice_to_(end)
}
#[inline]
fn slice_<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] {
self.as_slice().slice_(start, end)
}
}
#[cfg(not(stage0))]
impl<T> ops::Slice<uint, [T]> for Vec<T> {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a [T] {
Expand All @@ -504,28 +482,6 @@ impl<T> ops::Slice<uint, [T]> for Vec<T> {
}
}

#[cfg(stage0)]
impl<T> ops::SliceMut<uint, [T]> for Vec<T> {
#[inline]
fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] {
self.as_mut_slice()
}

#[inline]
fn slice_from_mut_<'a>(&'a mut self, start: &uint) -> &'a mut [T] {
self.as_mut_slice().slice_from_mut_(start)
}

#[inline]
fn slice_to_mut_<'a>(&'a mut self, end: &uint) -> &'a mut [T] {
self.as_mut_slice().slice_to_mut_(end)
}
#[inline]
fn slice_mut_<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] {
self.as_mut_slice().slice_mut_(start, end)
}
}
#[cfg(not(stage0))]
impl<T> ops::SliceMut<uint, [T]> for Vec<T> {
#[inline]
fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] {
Expand Down
1 change: 0 additions & 1 deletion src/libcore/intrinsics.rs
Expand Up @@ -254,7 +254,6 @@ extern "rust-intrinsic" {
/// enabling further optimizations.
///
/// NB: This is very different from the `unreachable!()` macro!
#[cfg(not(stage0))]
pub fn unreachable() -> !;

/// Execute a breakpoint trap, for inspection by a debugger.
Expand Down
33 changes: 1 addition & 32 deletions src/libcore/ops.rs
Expand Up @@ -711,7 +711,6 @@ pub trait IndexMut<Index, Result> {
* }
* ```
*/
#[cfg(not(stage0))]
#[lang="slice"]
pub trait Slice<Idx, Sized? Result> for Sized? {
/// The method for the slicing operation foo[]
Expand All @@ -723,21 +722,6 @@ pub trait Slice<Idx, Sized? Result> for Sized? {
/// The method for the slicing operation foo[from..to]
fn slice_or_fail<'a>(&'a self, from: &Idx, to: &Idx) -> &'a Result;
}
#[cfg(stage0)]
/**
*
*/
#[lang="slice"]
pub trait Slice<Idx, Sized? Result> for Sized? {
/// The method for the slicing operation foo[]
fn as_slice_<'a>(&'a self) -> &'a Result;
/// The method for the slicing operation foo[from..]
fn slice_from_<'a>(&'a self, from: &Idx) -> &'a Result;
/// The method for the slicing operation foo[..to]
fn slice_to_<'a>(&'a self, to: &Idx) -> &'a Result;
/// The method for the slicing operation foo[from..to]
fn slice_<'a>(&'a self, from: &Idx, to: &Idx) -> &'a Result;
}

/**
*
Expand Down Expand Up @@ -776,7 +760,6 @@ pub trait Slice<Idx, Sized? Result> for Sized? {
* }
* ```
*/
#[cfg(not(stage0))]
#[lang="slice_mut"]
pub trait SliceMut<Idx, Sized? Result> for Sized? {
/// The method for the slicing operation foo[]
Expand All @@ -788,21 +771,7 @@ pub trait SliceMut<Idx, Sized? Result> for Sized? {
/// The method for the slicing operation foo[from..to]
fn slice_or_fail_mut<'a>(&'a mut self, from: &Idx, to: &Idx) -> &'a mut Result;
}
#[cfg(stage0)]
/**
*
*/
#[lang="slice_mut"]
pub trait SliceMut<Idx, Sized? Result> for Sized? {
/// The method for the slicing operation foo[mut]
fn as_mut_slice_<'a>(&'a mut self) -> &'a mut Result;
/// The method for the slicing operation foo[mut from..]
fn slice_from_mut_<'a>(&'a mut self, from: &Idx) -> &'a mut Result;
/// The method for the slicing operation foo[mut ..to]
fn slice_to_mut_<'a>(&'a mut self, to: &Idx) -> &'a mut Result;
/// The method for the slicing operation foo[mut from..to]
fn slice_mut_<'a>(&'a mut self, from: &Idx, to: &Idx) -> &'a mut Result;
}

/**
*
* The `Deref` trait is used to specify the functionality of dereferencing
Expand Down
59 changes: 0 additions & 59 deletions src/libcore/slice.rs
Expand Up @@ -488,7 +488,6 @@ impl<'a,T> ImmutableSlice<'a, T> for &'a [T] {



#[cfg(not(stage0))]
impl<T> ops::Slice<uint, [T]> for [T] {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a [T] {
Expand Down Expand Up @@ -516,36 +515,7 @@ impl<T> ops::Slice<uint, [T]> for [T] {
}
}
}
#[cfg(stage0)]
impl<T> ops::Slice<uint, [T]> for [T] {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a [T] {
self
}

#[inline]
fn slice_from_<'a>(&'a self, start: &uint) -> &'a [T] {
self.slice_(start, &self.len())
}

#[inline]
fn slice_to_<'a>(&'a self, end: &uint) -> &'a [T] {
self.slice_(&0, end)
}
#[inline]
fn slice_<'a>(&'a self, start: &uint, end: &uint) -> &'a [T] {
assert!(*start <= *end);
assert!(*end <= self.len());
unsafe {
transmute(RawSlice {
data: self.as_ptr().offset(*start as int),
len: (*end - *start)
})
}
}
}

#[cfg(not(stage0))]
impl<T> ops::SliceMut<uint, [T]> for [T] {
#[inline]
fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] {
Expand Down Expand Up @@ -574,35 +544,6 @@ impl<T> ops::SliceMut<uint, [T]> for [T] {
}
}
}
#[cfg(stage0)]
impl<T> ops::SliceMut<uint, [T]> for [T] {
#[inline]
fn as_mut_slice_<'a>(&'a mut self) -> &'a mut [T] {
self
}

#[inline]
fn slice_from_mut_<'a>(&'a mut self, start: &uint) -> &'a mut [T] {
let len = &self.len();
self.slice_mut_(start, len)
}

#[inline]
fn slice_to_mut_<'a>(&'a mut self, end: &uint) -> &'a mut [T] {
self.slice_mut_(&0, end)
}
#[inline]
fn slice_mut_<'a>(&'a mut self, start: &uint, end: &uint) -> &'a mut [T] {
assert!(*start <= *end);
assert!(*end <= self.len());
unsafe {
transmute(RawSlice {
data: self.as_ptr().offset(*start as int),
len: (*end - *start)
})
}
}
}

/// Extension methods for slices such that their elements are
/// mutable.
Expand Down
23 changes: 0 additions & 23 deletions src/libcore/str.rs
Expand Up @@ -1164,29 +1164,6 @@ pub mod traits {
fn equiv(&self, other: &S) -> bool { eq_slice(*self, other.as_slice()) }
}

#[cfg(stage0)]
impl ops::Slice<uint, str> for str {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a str {
self
}

#[inline]
fn slice_from_<'a>(&'a self, from: &uint) -> &'a str {
self.slice_from(*from)
}

#[inline]
fn slice_to_<'a>(&'a self, to: &uint) -> &'a str {
self.slice_to(*to)
}

#[inline]
fn slice_<'a>(&'a self, from: &uint, to: &uint) -> &'a str {
self.slice(*from, *to)
}
}
#[cfg(not(stage0))]
impl ops::Slice<uint, str> for str {
#[inline]
fn as_slice_<'a>(&'a self) -> &'a str {
Expand Down
14 changes: 6 additions & 8 deletions src/libcoretest/atomic.rs
Expand Up @@ -69,15 +69,13 @@ fn int_xor() {
assert_eq!(x.load(SeqCst), 0xf731 ^ 0x137f);
}

static mut S_BOOL : AtomicBool = INIT_ATOMIC_BOOL;
static mut S_INT : AtomicInt = INIT_ATOMIC_INT;
static mut S_UINT : AtomicUint = INIT_ATOMIC_UINT;
static S_BOOL : AtomicBool = INIT_ATOMIC_BOOL;
static S_INT : AtomicInt = INIT_ATOMIC_INT;
static S_UINT : AtomicUint = INIT_ATOMIC_UINT;

#[test]
fn static_init() {
unsafe {
assert!(!S_BOOL.load(SeqCst));
assert!(S_INT.load(SeqCst) == 0);
assert!(S_UINT.load(SeqCst) == 0);
}
assert!(!S_BOOL.load(SeqCst));
assert!(S_INT.load(SeqCst) == 0);
assert!(S_UINT.load(SeqCst) == 0);
}
4 changes: 2 additions & 2 deletions src/libgreen/lib.rs
Expand Up @@ -335,7 +335,7 @@ impl SchedPool {
/// This will configure the pool according to the `config` parameter, and
/// initially run `main` inside the pool of schedulers.
pub fn new(config: PoolConfig) -> SchedPool {
static mut POOL_ID: AtomicUint = INIT_ATOMIC_UINT;
static POOL_ID: AtomicUint = INIT_ATOMIC_UINT;

let PoolConfig {
threads: nscheds,
Expand All @@ -349,7 +349,7 @@ impl SchedPool {
threads: vec![],
handles: vec![],
stealers: vec![],
id: unsafe { POOL_ID.fetch_add(1, SeqCst) },
id: POOL_ID.fetch_add(1, SeqCst),
sleepers: SleeperList::new(),
stack_pool: StackPool::new(),
deque_pool: deque::BufferPool::new(),
Expand Down
2 changes: 1 addition & 1 deletion src/libgreen/sched.rs
Expand Up @@ -1458,7 +1458,7 @@ mod test {
#[test]
fn test_spawn_sched_blocking() {
use std::rt::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT};
static mut LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT;
static LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT;

// Testing that a task in one scheduler can block in foreign code
// without affecting other schedulers
Expand Down
6 changes: 3 additions & 3 deletions src/libgreen/stack.rs
Expand Up @@ -158,8 +158,8 @@ impl StackPool {
}

fn max_cached_stacks() -> uint {
static mut AMT: atomic::AtomicUint = atomic::INIT_ATOMIC_UINT;
match unsafe { AMT.load(atomic::SeqCst) } {
static AMT: atomic::AtomicUint = atomic::INIT_ATOMIC_UINT;
match AMT.load(atomic::SeqCst) {
0 => {}
n => return n - 1,
}
Expand All @@ -169,7 +169,7 @@ fn max_cached_stacks() -> uint {
let amt = amt.unwrap_or(10);
// 0 is our sentinel value, so ensure that we'll never see 0 after
// initialization has run
unsafe { AMT.store(amt + 1, atomic::SeqCst); }
AMT.store(amt + 1, atomic::SeqCst);
return amt;
}

Expand Down
4 changes: 2 additions & 2 deletions src/liblog/lib.rs
Expand Up @@ -348,8 +348,8 @@ pub struct LogLocation {
/// module's log statement should be emitted or not.
#[doc(hidden)]
pub fn mod_enabled(level: u32, module: &str) -> bool {
static mut INIT: Once = ONCE_INIT;
unsafe { INIT.doit(init); }
static INIT: Once = ONCE_INIT;
INIT.doit(init);

// It's possible for many threads are in this function, only one of them
// will perform the global initialization, but all of them will need to check
Expand Down
4 changes: 2 additions & 2 deletions src/libnative/io/helper_thread.rs
Expand Up @@ -55,8 +55,8 @@ pub struct Helper<M> {
pub initialized: UnsafeCell<bool>,
}

macro_rules! helper_init( (static mut $name:ident: Helper<$m:ty>) => (
static mut $name: Helper<$m> = Helper {
macro_rules! helper_init( (static $name:ident: Helper<$m:ty>) => (
static $name: Helper<$m> = Helper {
lock: ::std::rt::mutex::NATIVE_MUTEX_INIT,
chan: ::std::cell::UnsafeCell { value: 0 as *mut Sender<$m> },
signal: ::std::cell::UnsafeCell { value: 0 },
Expand Down
2 changes: 1 addition & 1 deletion src/libnative/io/net.rs
Expand Up @@ -1063,7 +1063,7 @@ mod os {
unsafe {
use std::rt::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT};
static mut INITIALIZED: bool = false;
static mut LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT;
static LOCK: StaticNativeMutex = NATIVE_MUTEX_INIT;

let _guard = LOCK.lock();
if !INITIALIZED {
Expand Down

5 comments on commit dae48a0

@bors
Copy link
Contributor

@bors bors commented on dae48a0 Oct 11, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

saw approval from sfackler
at alexcrichton@dae48a0

@bors
Copy link
Contributor

@bors bors commented on dae48a0 Oct 11, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

merging alexcrichton/rust/snapshots = dae48a0 into auto

@bors
Copy link
Contributor

@bors bors commented on dae48a0 Oct 11, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

alexcrichton/rust/snapshots = dae48a0 merged ok, testing candidate = 9b98332

@bors
Copy link
Contributor

@bors bors commented on dae48a0 Oct 11, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@bors
Copy link
Contributor

@bors bors commented on dae48a0 Oct 11, 2014

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fast-forwarding master to auto = 9b98332

Please sign in to comment.