diff --git a/src/libcollectionstest/slice.rs b/src/libcollectionstest/slice.rs index 0c3c82eea780f..4168fe88a4b56 100644 --- a/src/libcollectionstest/slice.rs +++ b/src/libcollectionstest/slice.rs @@ -1428,7 +1428,7 @@ mod bench { let mut v = Vec::::with_capacity(1024); unsafe { let vp = v.as_mut_ptr(); - ptr::set_memory(vp, 0, 1024); + ptr::write_bytes(vp, 0, 1024); v.set_len(1024); } v diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 1462d07652d08..297c3192ecb5e 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -44,6 +44,10 @@ use marker::Sized; +#[cfg(stage0)] pub use self::copy_memory as copy; +#[cfg(stage0)] pub use self::set_memory as write_bytes; +#[cfg(stage0)] pub use self::copy_nonoverlapping_memory as copy_nonoverlapping; + extern "rust-intrinsic" { // NB: These intrinsics take unsafe pointers because they mutate aliased @@ -246,7 +250,7 @@ extern "rust-intrinsic" { /// Copies `count * size_of` bytes from `src` to `dst`. The source /// and destination may *not* overlap. /// - /// `copy_nonoverlapping_memory` is semantically equivalent to C's `memcpy`. + /// `copy_nonoverlapping` is semantically equivalent to C's `memcpy`. /// /// # Safety /// @@ -271,9 +275,9 @@ extern "rust-intrinsic" { /// let mut t: T = mem::uninitialized(); /// /// // Perform the swap, `&mut` pointers never alias - /// ptr::copy_nonoverlapping_memory(&mut t, &*x, 1); - /// ptr::copy_nonoverlapping_memory(x, &*y, 1); - /// ptr::copy_nonoverlapping_memory(y, &t, 1); + /// ptr::copy_nonoverlapping(&mut t, &*x, 1); + /// ptr::copy_nonoverlapping(x, &*y, 1); + /// ptr::copy_nonoverlapping(y, &t, 1); /// /// // y and t now point to the same thing, but we need to completely forget `tmp` /// // because it's no longer relevant. @@ -282,12 +286,18 @@ extern "rust-intrinsic" { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[cfg(not(stage0))] + pub fn copy_nonoverlapping(dst: *mut T, src: *const T, count: usize); + + /// dox + #[stable(feature = "rust1", since = "1.0.0")] + #[cfg(stage0)] pub fn copy_nonoverlapping_memory(dst: *mut T, src: *const T, count: usize); /// Copies `count * size_of` bytes from `src` to `dst`. The source /// and destination may overlap. /// - /// `copy_memory` is semantically equivalent to C's `memmove`. + /// `copy` is semantically equivalent to C's `memmove`. /// /// # Safety /// @@ -306,16 +316,28 @@ extern "rust-intrinsic" { /// unsafe fn from_buf_raw(ptr: *const T, elts: uint) -> Vec { /// let mut dst = Vec::with_capacity(elts); /// dst.set_len(elts); - /// ptr::copy_memory(dst.as_mut_ptr(), ptr, elts); + /// ptr::copy(dst.as_mut_ptr(), ptr, elts); /// dst /// } /// ``` /// + #[cfg(not(stage0))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn copy(dst: *mut T, src: *const T, count: usize); + + /// dox + #[cfg(stage0)] #[stable(feature = "rust1", since = "1.0.0")] pub fn copy_memory(dst: *mut T, src: *const T, count: usize); /// Invokes memset on the specified pointer, setting `count * size_of::()` /// bytes of memory starting at `dst` to `c`. + #[cfg(not(stage0))] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn write_bytes(dst: *mut T, val: u8, count: usize); + + /// dox + #[cfg(stage0)] #[stable(feature = "rust1", since = "1.0.0")] pub fn set_memory(dst: *mut T, val: u8, count: usize); diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 1cbea057e8842..0c0cb81a89ec7 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -105,27 +105,13 @@ use cmp::Ordering::{self, Less, Equal, Greater}; // FIXME #19649: intrinsic docs don't render, so these have no docs :( #[stable(feature = "rust1", since = "1.0.0")] -pub use intrinsics::copy_nonoverlapping_memory as copy_nonoverlapping; +pub use intrinsics::copy_nonoverlapping; #[stable(feature = "rust1", since = "1.0.0")] -pub use intrinsics::copy_memory as copy; +pub use intrinsics::copy; #[stable(feature = "rust1", since = "1.0.0")] -pub use intrinsics::set_memory as write_bytes; - -extern "rust-intrinsic" { - #[unstable(feature = "core")] - #[deprecated(since = "1.0.0", reason = "renamed to `copy_nonoverlapping`")] - pub fn copy_nonoverlapping_memory(dst: *mut T, src: *const T, count: usize); - #[unstable(feature = "core")] - #[deprecated(since = "1.0.0", reason = "renamed to `copy`")] - pub fn copy_memory(dst: *mut T, src: *const T, count: usize); - - #[unstable(feature = "core", - reason = "uncertain about naming and semantics")] - #[deprecated(since = "1.0.0", reason = "renamed to `write_bytes`")] - pub fn set_memory(dst: *mut T, val: u8, count: usize); -} +pub use intrinsics::write_bytes; /// Creates a null raw pointer. /// diff --git a/src/libcoretest/ptr.rs b/src/libcoretest/ptr.rs index 6a25c8be14e5a..adc15b9fbc27f 100644 --- a/src/libcoretest/ptr.rs +++ b/src/libcoretest/ptr.rs @@ -35,18 +35,18 @@ fn test() { let v0 = vec![32000u16, 32001u16, 32002u16]; let mut v1 = vec![0u16, 0u16, 0u16]; - copy_memory(v1.as_mut_ptr().offset(1), - v0.as_ptr().offset(1), 1); + copy(v1.as_mut_ptr().offset(1), + v0.as_ptr().offset(1), 1); assert!((v1[0] == 0u16 && v1[1] == 32001u16 && v1[2] == 0u16)); - copy_memory(v1.as_mut_ptr(), - v0.as_ptr().offset(2), 1); + copy(v1.as_mut_ptr(), + v0.as_ptr().offset(2), 1); assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 0u16)); - copy_memory(v1.as_mut_ptr().offset(2), - v0.as_ptr(), 1); + copy(v1.as_mut_ptr().offset(2), + v0.as_ptr(), 1); assert!((v1[0] == 32002u16 && v1[1] == 32001u16 && v1[2] == 32000u16)); @@ -164,7 +164,7 @@ fn test_ptr_subtraction() { fn test_set_memory() { let mut xs = [0u8; 20]; let ptr = xs.as_mut_ptr(); - unsafe { set_memory(ptr, 5u8, xs.len()); } + unsafe { write_bytes(ptr, 5u8, xs.len()); } assert!(xs == [5u8; 20]); } diff --git a/src/librustc_trans/trans/intrinsic.rs b/src/librustc_trans/trans/intrinsic.rs index 69ca9a5e81cbc..7f4b0b02d53fe 100644 --- a/src/librustc_trans/trans/intrinsic.rs +++ b/src/librustc_trans/trans/intrinsic.rs @@ -386,7 +386,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, InBoundsGEP(bcx, ptr, &[offset]) } - (_, "copy_nonoverlapping_memory") => { + (_, "copy_nonoverlapping") => { copy_intrinsic(bcx, false, false, @@ -396,7 +396,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, llargs[2], call_debug_location) } - (_, "copy_memory") => { + (_, "copy") => { copy_intrinsic(bcx, true, false, @@ -406,7 +406,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, llargs[2], call_debug_location) } - (_, "set_memory") => { + (_, "write_bytes") => { memset_intrinsic(bcx, false, *substs.types.get(FnSpace, 0), diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 45d4a1edc6b24..b9a6a05fda9e8 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -5365,7 +5365,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { mutbl: ast::MutImmutable })) } - "copy_memory" | "copy_nonoverlapping_memory" | + "copy" | "copy_nonoverlapping" | "volatile_copy_memory" | "volatile_copy_nonoverlapping_memory" => { (1, vec!( @@ -5381,7 +5381,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &ast::ForeignItem) { ), ty::mk_nil(tcx)) } - "set_memory" | "volatile_set_memory" => { + "write_bytes" | "volatile_set_memory" => { (1, vec!( ty::mk_ptr(tcx, ty::mt { diff --git a/src/libstd/old_io/extensions.rs b/src/libstd/old_io/extensions.rs index 2990c1c265d54..5b1b9471b075c 100644 --- a/src/libstd/old_io/extensions.rs +++ b/src/libstd/old_io/extensions.rs @@ -159,7 +159,7 @@ pub fn u64_to_be_bytes(n: u64, size: uint, f: F) -> T where /// that many bytes are parsed. For example, if `size` is 4, then a /// 32-bit value is parsed. pub fn u64_from_be_bytes(data: &[u8], start: uint, size: uint) -> u64 { - use ptr::{copy_nonoverlapping_memory}; + use ptr::{copy_nonoverlapping}; assert!(size <= 8); @@ -171,7 +171,7 @@ pub fn u64_from_be_bytes(data: &[u8], start: uint, size: uint) -> u64 { unsafe { let ptr = data.as_ptr().offset(start as int); let out = buf.as_mut_ptr(); - copy_nonoverlapping_memory(out.offset((8 - size) as int), ptr, size); + copy_nonoverlapping(out.offset((8 - size) as int), ptr, size); (*(out as *const u64)).to_be() } } diff --git a/src/test/bench/shootout-reverse-complement.rs b/src/test/bench/shootout-reverse-complement.rs index 93aa5f2571bfb..0ab552cf047a0 100644 --- a/src/test/bench/shootout-reverse-complement.rs +++ b/src/test/bench/shootout-reverse-complement.rs @@ -46,7 +46,7 @@ extern crate libc; use std::old_io::stdio::{stdin_raw, stdout_raw}; use std::old_io::*; -use std::ptr::{copy_memory, Unique}; +use std::ptr::{copy, Unique}; use std::thread; struct Tables { @@ -181,8 +181,8 @@ fn reverse_complement(seq: &mut [u8], tables: &Tables) { let mut i = LINE_LEN; while i < len { unsafe { - copy_memory(seq.as_mut_ptr().offset((i - off + 1) as int), - seq.as_ptr().offset((i - off) as int), off); + copy(seq.as_mut_ptr().offset((i - off + 1) as int), + seq.as_ptr().offset((i - off) as int), off); *seq.get_unchecked_mut(i - off) = b'\n'; } i += LINE_LEN + 1;