diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 24b4abbff4ae2..0617c604121f4 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -206,12 +206,12 @@ impl Arc { /// Get the number of weak references to this value. #[inline] #[unstable(feature = "alloc")] -pub fn weak_count(this: &Arc) -> uint { this.inner().weak.load(SeqCst) - 1 } +pub fn weak_count(this: &Arc) -> usize { this.inner().weak.load(SeqCst) - 1 } /// Get the number of strong references to this value. #[inline] #[unstable(feature = "alloc")] -pub fn strong_count(this: &Arc) -> uint { this.inner().strong.load(SeqCst) } +pub fn strong_count(this: &Arc) -> usize { this.inner().strong.load(SeqCst) } #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Arc { @@ -649,7 +649,7 @@ mod tests { let (tx, rx) = channel(); let _t = Thread::spawn(move || { - let arc_v: Arc> = rx.recv().unwrap(); + let arc_v: Arc> = rx.recv().unwrap(); assert_eq!((*arc_v)[3], 4); }); @@ -818,5 +818,5 @@ mod tests { // Make sure deriving works with Arc #[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)] - struct Foo { inner: Arc } + struct Foo { inner: Arc } } diff --git a/src/liballoc/boxed_test.rs b/src/liballoc/boxed_test.rs index 8f65b8c42c928..b7bacaa0cae2b 100644 --- a/src/liballoc/boxed_test.rs +++ b/src/liballoc/boxed_test.rs @@ -22,7 +22,7 @@ use std::boxed::BoxAny; #[test] fn test_owned_clone() { let a = Box::new(5); - let b: Box = a.clone(); + let b: Box = a.clone(); assert!(a == b); } @@ -31,11 +31,11 @@ struct Test; #[test] fn any_move() { - let a = Box::new(8us) as Box; + let a = Box::new(8) as Box; let b = Box::new(Test) as Box; - match a.downcast::() { - Ok(a) => { assert!(a == Box::new(8us)); } + match a.downcast::() { + Ok(a) => { assert!(a == Box::new(8)); } Err(..) => panic!() } match b.downcast::() { @@ -47,7 +47,7 @@ fn any_move() { let b = Box::new(Test) as Box; assert!(a.downcast::>().is_err()); - assert!(b.downcast::>().is_err()); + assert!(b.downcast::>().is_err()); } #[test] diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 1d5637a6ad6b3..d3d86270d1e9f 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -21,7 +21,7 @@ use core::ptr::PtrExt; /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. #[inline] -pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { +pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { imp::allocate(size, align) } @@ -37,7 +37,7 @@ pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 { +pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 { imp::reallocate(ptr, old_size, size, align) } @@ -54,7 +54,8 @@ pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> uint { +pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, + align: usize) -> usize { imp::reallocate_inplace(ptr, old_size, size, align) } @@ -66,14 +67,14 @@ pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, align /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) { +pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) { imp::deallocate(ptr, old_size, align) } /// Returns the usable size of an allocation created with the specified the /// `size` and `align`. #[inline] -pub fn usable_size(size: uint, align: uint) -> uint { +pub fn usable_size(size: usize, align: usize) -> usize { imp::usable_size(size, align) } @@ -96,7 +97,7 @@ pub const EMPTY: *mut () = 0x1 as *mut (); #[cfg(not(test))] #[lang="exchange_malloc"] #[inline] -unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 { +unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 { if size == 0 { EMPTY as *mut u8 } else { @@ -109,7 +110,7 @@ unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 { #[cfg(not(test))] #[lang="exchange_free"] #[inline] -unsafe fn exchange_free(ptr: *mut u8, old_size: uint, align: uint) { +unsafe fn exchange_free(ptr: *mut u8, old_size: usize, align: usize) { deallocate(ptr, old_size, align); } @@ -122,49 +123,49 @@ unsafe fn exchange_free(ptr: *mut u8, old_size: uint, align: uint) { target_arch = "mips", target_arch = "mipsel", target_arch = "powerpc")))] -const MIN_ALIGN: uint = 8; +const MIN_ALIGN: usize = 8; #[cfg(all(not(feature = "external_funcs"), not(feature = "external_crate"), any(target_arch = "x86", target_arch = "x86_64", target_arch = "aarch64")))] -const MIN_ALIGN: uint = 16; +const MIN_ALIGN: usize = 16; #[cfg(feature = "external_funcs")] mod imp { extern { - fn rust_allocate(size: uint, align: uint) -> *mut u8; - fn rust_deallocate(ptr: *mut u8, old_size: uint, align: uint); - fn rust_reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8; - fn rust_reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, - align: uint) -> uint; - fn rust_usable_size(size: uint, align: uint) -> uint; + fn rust_allocate(size: usize, align: usize) -> *mut u8; + fn rust_deallocate(ptr: *mut u8, old_size: usize, align: usize); + fn rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8; + fn rust_reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, + align: usize) -> usize; + fn rust_usable_size(size: usize, align: usize) -> usize; fn rust_stats_print(); } #[inline] - pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { rust_allocate(size, align) } #[inline] - pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) { + pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) { rust_deallocate(ptr, old_size, align) } #[inline] - pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 { + pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 { rust_reallocate(ptr, old_size, size, align) } #[inline] - pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, - align: uint) -> uint { + pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize, + align: usize) -> usize { rust_reallocate_inplace(ptr, old_size, size, align) } #[inline] - pub fn usable_size(size: uint, align: uint) -> uint { + pub fn usable_size(size: usize, align: usize) -> usize { unsafe { rust_usable_size(size, align) } } @@ -215,42 +216,42 @@ mod imp { // MALLOCX_ALIGN(a) macro #[inline(always)] - fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int } + fn mallocx_align(a: usize) -> c_int { a.trailing_zeros() as c_int } #[inline(always)] - fn align_to_flags(align: uint) -> c_int { + fn align_to_flags(align: usize) -> c_int { if align <= MIN_ALIGN { 0 } else { mallocx_align(align) } } #[inline] - pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { let flags = align_to_flags(align); je_mallocx(size as size_t, flags) as *mut u8 } #[inline] - pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 { + pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 { let flags = align_to_flags(align); je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 } #[inline] - pub unsafe fn reallocate_inplace(ptr: *mut u8, _old_size: uint, size: uint, - align: uint) -> uint { + pub unsafe fn reallocate_inplace(ptr: *mut u8, _old_size: usize, size: usize, + align: usize) -> usize { let flags = align_to_flags(align); - je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as uint + je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize } #[inline] - pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) { + pub unsafe fn deallocate(ptr: *mut u8, old_size: usize, align: usize) { let flags = align_to_flags(align); je_sdallocx(ptr as *mut c_void, old_size as size_t, flags) } #[inline] - pub fn usable_size(size: uint, align: uint) -> uint { + pub fn usable_size(size: usize, align: usize) -> usize { let flags = align_to_flags(align); - unsafe { je_nallocx(size as size_t, flags) as uint } + unsafe { je_nallocx(size as size_t, flags) as usize } } pub fn stats_print() { @@ -277,7 +278,7 @@ mod imp { } #[inline] - pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::malloc(size as libc::size_t) as *mut u8 } else { @@ -294,7 +295,7 @@ mod imp { } #[inline] - pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 { + pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8 } else { @@ -306,18 +307,18 @@ mod imp { } #[inline] - pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: uint, _size: uint, - _align: uint) -> uint { + pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: usize, _size: usize, + _align: usize) -> usize { old_size } #[inline] - pub unsafe fn deallocate(ptr: *mut u8, _old_size: uint, _align: uint) { + pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, _align: usize) { libc::free(ptr as *mut libc::c_void) } #[inline] - pub fn usable_size(size: uint, _align: uint) -> uint { + pub fn usable_size(size: usize, _align: usize) -> usize { size } @@ -341,7 +342,7 @@ mod imp { } #[inline] - pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::malloc(size as size_t) as *mut u8 } else { @@ -350,7 +351,7 @@ mod imp { } #[inline] - pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 { + pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::realloc(ptr as *mut c_void, size as size_t) as *mut u8 } else { @@ -359,13 +360,13 @@ mod imp { } #[inline] - pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: uint, _size: uint, - _align: uint) -> uint { + pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: usize, _size: usize, + _align: usize) -> usize { old_size } #[inline] - pub unsafe fn deallocate(ptr: *mut u8, _old_size: uint, align: uint) { + pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) { if align <= MIN_ALIGN { libc::free(ptr as *mut libc::c_void) } else { @@ -374,7 +375,7 @@ mod imp { } #[inline] - pub fn usable_size(size: uint, _align: uint) -> uint { + pub fn usable_size(size: usize, _align: usize) -> usize { size } diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 81391fd63eb85..0e6266f9cbc57 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -70,7 +70,6 @@ #![feature(lang_items, unsafe_destructor)] #![feature(box_syntax)] #![feature(optin_builtin_traits)] -#![feature(int_uint)] #![feature(unboxed_closures)] #![feature(core)] #![feature(hash)] diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 54ff4c1865472..ab3c0901bc956 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -40,7 +40,7 @@ //! } //! //! struct Gadget { -//! id: int, +//! id: i32, //! owner: Rc //! // ...other fields //! } @@ -99,7 +99,7 @@ //! } //! //! struct Gadget { -//! id: int, +//! id: i32, //! owner: Rc //! // ...other fields //! } @@ -166,8 +166,8 @@ use heap::deallocate; struct RcBox { value: T, - strong: Cell, - weak: Cell + strong: Cell, + weak: Cell } /// An immutable reference-counted pointer type. @@ -233,12 +233,12 @@ impl Rc { /// Get the number of weak references to this value. #[inline] #[unstable(feature = "alloc")] -pub fn weak_count(this: &Rc) -> uint { this.weak() - 1 } +pub fn weak_count(this: &Rc) -> usize { this.weak() - 1 } /// Get the number of strong references to this value. #[inline] #[unstable(feature = "alloc")] -pub fn strong_count(this: &Rc) -> uint { this.strong() } +pub fn strong_count(this: &Rc) -> usize { this.strong() } /// Returns true if there are no other `Rc` or `Weak` values that share the same inner value. /// @@ -447,7 +447,7 @@ impl Default for Rc { /// use std::rc::Rc; /// use std::default::Default; /// - /// let x: Rc = Default::default(); + /// let x: Rc = Default::default(); /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] @@ -750,7 +750,7 @@ trait RcBoxPtr { fn inner(&self) -> &RcBox; #[inline] - fn strong(&self) -> uint { self.inner().strong.get() } + fn strong(&self) -> usize { self.inner().strong.get() } #[inline] fn inc_strong(&self) { self.inner().strong.set(self.strong() + 1); } @@ -759,7 +759,7 @@ trait RcBoxPtr { fn dec_strong(&self) { self.inner().strong.set(self.strong() - 1); } #[inline] - fn weak(&self) -> uint { self.inner().weak.get() } + fn weak(&self) -> usize { self.inner().weak.get() } #[inline] fn inc_weak(&self) { self.inner().weak.set(self.weak() + 1); } diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 62d103ae06a88..223c5111f8f4e 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -31,7 +31,6 @@ #![feature(alloc)] #![feature(box_syntax)] #![feature(core)] -#![feature(int_uint)] #![feature(staged_api)] #![feature(unboxed_closures)] #![feature(unsafe_destructor)] @@ -55,12 +54,12 @@ use std::rt::heap::{allocate, deallocate}; #[derive(Clone, PartialEq)] struct Chunk { data: Rc>>, - fill: Cell, + fill: Cell, is_copy: Cell, } impl Chunk { - fn capacity(&self) -> uint { + fn capacity(&self) -> usize { self.data.borrow().capacity() } @@ -105,7 +104,7 @@ impl Arena { } /// Allocates a new Arena with `initial_size` bytes preallocated. - pub fn new_with_size(initial_size: uint) -> Arena { + pub fn new_with_size(initial_size: usize) -> Arena { Arena { head: RefCell::new(chunk(initial_size, false)), copy_head: RefCell::new(chunk(initial_size, true)), @@ -114,7 +113,7 @@ impl Arena { } } -fn chunk(size: uint, is_copy: bool) -> Chunk { +fn chunk(size: usize, is_copy: bool) -> Chunk { Chunk { data: Rc::new(RefCell::new(Vec::with_capacity(size))), fill: Cell::new(0), @@ -137,7 +136,7 @@ impl Drop for Arena { } #[inline] -fn round_up(base: uint, align: uint) -> uint { +fn round_up(base: usize, align: usize) -> usize { (base.checked_add(align - 1)).unwrap() & !(align - 1) } @@ -149,7 +148,7 @@ unsafe fn destroy_chunk(chunk: &Chunk) { let fill = chunk.fill.get(); while idx < fill { - let tydesc_data: *const uint = mem::transmute(buf.offset(idx as int)); + let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize)); let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data); let (size, align) = ((*tydesc).size, (*tydesc).align); @@ -160,7 +159,7 @@ unsafe fn destroy_chunk(chunk: &Chunk) { //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}", // start, size, align, is_done); if is_done { - ((*tydesc).drop_glue)(buf.offset(start as int) as *const i8); + ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8); } // Find where the next tydesc lives @@ -173,21 +172,21 @@ unsafe fn destroy_chunk(chunk: &Chunk) { // is necessary in order to properly do cleanup if a panic occurs // during an initializer. #[inline] -fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> uint { - p as uint | (is_done as uint) +fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize { + p as usize | (is_done as usize) } #[inline] -fn un_bitpack_tydesc_ptr(p: uint) -> (*const TyDesc, bool) { +fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) { ((p & !1) as *const TyDesc, p & 1 == 1) } impl Arena { - fn chunk_size(&self) -> uint { + fn chunk_size(&self) -> usize { self.copy_head.borrow().capacity() } // Functions for the POD part of the arena - fn alloc_copy_grow(&self, n_bytes: uint, align: uint) -> *const u8 { + fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 { // Allocate a new chunk. let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size()); self.chunks.borrow_mut().push(self.copy_head.borrow().clone()); @@ -199,7 +198,7 @@ impl Arena { } #[inline] - fn alloc_copy_inner(&self, n_bytes: uint, align: uint) -> *const u8 { + fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 { let start = round_up(self.copy_head.borrow().fill.get(), align); let end = start + n_bytes; @@ -211,7 +210,7 @@ impl Arena { copy_head.fill.set(end); unsafe { - copy_head.as_ptr().offset(start as int) + copy_head.as_ptr().offset(start as isize) } } @@ -227,8 +226,8 @@ impl Arena { } // Functions for the non-POD part of the arena - fn alloc_noncopy_grow(&self, n_bytes: uint, - align: uint) -> (*const u8, *const u8) { + fn alloc_noncopy_grow(&self, n_bytes: usize, + align: usize) -> (*const u8, *const u8) { // Allocate a new chunk. let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size()); self.chunks.borrow_mut().push(self.head.borrow().clone()); @@ -240,8 +239,8 @@ impl Arena { } #[inline] - fn alloc_noncopy_inner(&self, n_bytes: uint, - align: uint) -> (*const u8, *const u8) { + fn alloc_noncopy_inner(&self, n_bytes: usize, + align: usize) -> (*const u8, *const u8) { // Be careful to not maintain any `head` borrows active, because // `alloc_noncopy_grow` borrows it mutably. let (start, end, tydesc_start, head_capacity) = { @@ -265,7 +264,7 @@ impl Arena { unsafe { let buf = head.as_ptr(); - return (buf.offset(tydesc_start as int), buf.offset(start as int)); + return (buf.offset(tydesc_start as isize), buf.offset(start as isize)); } } @@ -276,7 +275,7 @@ impl Arena { let (ty_ptr, ptr) = self.alloc_noncopy_inner(mem::size_of::(), mem::min_align_of::()); - let ty_ptr = ty_ptr as *mut uint; + let ty_ptr = ty_ptr as *mut usize; let ptr = ptr as *mut T; // Write in our tydesc along with a bit indicating that it // has *not* been initialized yet. @@ -320,7 +319,7 @@ fn test_arena_destructors() { #[test] fn test_arena_alloc_nested() { - struct Inner { value: uint } + struct Inner { value: usize } struct Outer<'a> { inner: &'a Inner } let arena = Arena::new(); @@ -343,10 +342,10 @@ fn test_arena_destructors_fail() { arena.alloc(|| { Rc::new(i) }); // Allocate something with funny size and alignment, to keep // things interesting. - arena.alloc(|| { [0u8, 1u8, 2u8] }); + arena.alloc(|| { [0u8, 1, 2] }); } // Now, panic while allocating - arena.alloc::, _>(|| { + arena.alloc::, _>(|| { panic!(); }); } @@ -373,12 +372,12 @@ struct TypedArenaChunk { next: *mut TypedArenaChunk, /// The number of elements that this chunk can hold. - capacity: uint, + capacity: usize, // Objects follow here, suitably aligned. } -fn calculate_size(capacity: uint) -> uint { +fn calculate_size(capacity: usize) -> usize { let mut size = mem::size_of::>(); size = round_up(size, mem::min_align_of::()); let elem_size = mem::size_of::(); @@ -389,7 +388,7 @@ fn calculate_size(capacity: uint) -> uint { impl TypedArenaChunk { #[inline] - unsafe fn new(next: *mut TypedArenaChunk, capacity: uint) + unsafe fn new(next: *mut TypedArenaChunk, capacity: usize) -> *mut TypedArenaChunk { let size = calculate_size::(capacity); let chunk = allocate(size, mem::min_align_of::>()) @@ -403,13 +402,13 @@ impl TypedArenaChunk { /// Destroys this arena chunk. If the type descriptor is supplied, the /// drop glue is called; otherwise, drop glue is not called. #[inline] - unsafe fn destroy(&mut self, len: uint) { + unsafe fn destroy(&mut self, len: usize) { // Destroy all the allocated objects. if intrinsics::needs_drop::() { let mut start = self.start(); for _ in 0..len { ptr::read(start as *const T); // run the destructor on the pointer - start = start.offset(mem::size_of::() as int) + start = start.offset(mem::size_of::() as isize) } } @@ -429,7 +428,7 @@ impl TypedArenaChunk { fn start(&self) -> *const u8 { let this: *const TypedArenaChunk = self; unsafe { - mem::transmute(round_up(this.offset(1) as uint, + mem::transmute(round_up(this.offset(1) as usize, mem::min_align_of::())) } } @@ -439,7 +438,7 @@ impl TypedArenaChunk { fn end(&self) -> *const u8 { unsafe { let size = mem::size_of::().checked_mul(self.capacity).unwrap(); - self.start().offset(size as int) + self.start().offset(size as isize) } } } @@ -454,7 +453,7 @@ impl TypedArena { /// Creates a new `TypedArena` with preallocated space for the given number of /// objects. #[inline] - pub fn with_capacity(capacity: uint) -> TypedArena { + pub fn with_capacity(capacity: usize) -> TypedArena { unsafe { let chunk = TypedArenaChunk::::new(ptr::null_mut(), capacity); TypedArena { @@ -501,8 +500,8 @@ impl Drop for TypedArena { fn drop(&mut self) { unsafe { // Determine how much was filled. - let start = self.first.borrow().as_ref().unwrap().start() as uint; - let end = self.ptr.get() as uint; + let start = self.first.borrow().as_ref().unwrap().start() as usize; + let end = self.ptr.get() as usize; let diff = (end - start) / mem::size_of::(); // Pass that to the `destroy` method. @@ -519,9 +518,9 @@ mod tests { #[allow(dead_code)] struct Point { - x: int, - y: int, - z: int, + x: i32, + y: i32, + z: i32, } #[test] @@ -576,7 +575,7 @@ mod tests { #[allow(dead_code)] struct Noncopy { string: String, - array: Vec, + array: Vec, } #[test] diff --git a/src/liblibc/lib.rs b/src/liblibc/lib.rs index 38d5c5eb27ae2..d95d0863ca81f 100644 --- a/src/liblibc/lib.rs +++ b/src/liblibc/lib.rs @@ -15,7 +15,6 @@ #![cfg_attr(not(feature = "cargo-build"), feature(staged_api))] #![cfg_attr(not(feature = "cargo-build"), staged_api)] #![cfg_attr(not(feature = "cargo-build"), feature(core))] -#![feature(int_uint)] #![feature(no_std)] #![no_std] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", @@ -1905,7 +1904,7 @@ pub mod types { #[repr(C)] #[derive(Copy)] pub struct WSAPROTOCOLCHAIN { pub ChainLen: c_int, - pub ChainEntries: [DWORD; MAX_PROTOCOL_CHAIN as uint], + pub ChainEntries: [DWORD; MAX_PROTOCOL_CHAIN as usize], } pub type LPWSAPROTOCOLCHAIN = *mut WSAPROTOCOLCHAIN; @@ -1931,7 +1930,7 @@ pub mod types { pub iSecurityScheme: c_int, pub dwMessageSize: DWORD, pub dwProviderReserved: DWORD, - pub szProtocol: [u8; (WSAPROTOCOL_LEN as uint) + 1us], + pub szProtocol: [u8; WSAPROTOCOL_LEN as usize + 1], } pub type LPWSAPROTOCOL_INFO = *mut WSAPROTOCOL_INFO;