From ab45694198356ae78972025e0d3beece297431d1 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 23 Feb 2015 11:39:16 -0800 Subject: [PATCH] std: Stabilize some `ptr` functions Specifically, the following actions were taken: * The `copy_memory` and `copy_nonoverlapping_memory` functions to drop the `_memory` suffix (as it's implied by the functionality). Both functions are now marked as `#[stable]`. * The `set_memory` function was renamed to `write_bytes` and is now stable. * The `zero_memory` function is now deprecated in favor of `write_bytes` directly. * The `Unique` pointer type is now behind its own feature gate called `unique` to facilitate future stabilization. * All type parameters now are `T: ?Sized` wherever possible and new clauses were added to the `offset` functions to require that the type is sized. [breaking-change] --- src/liballoc/lib.rs | 1 + src/libcollections/btree/node.rs | 24 +++--- src/libcollections/lib.rs | 1 + src/libcollections/slice.rs | 26 +++---- src/libcollections/string.rs | 18 ++--- src/libcollections/vec.rs | 10 +-- src/libcollections/vec_deque.rs | 22 +++--- src/libcore/intrinsics.rs | 7 +- src/libcore/mem.rs | 6 +- src/libcore/ptr.rs | 109 ++++++++++++++------------- src/libcore/slice.rs | 8 +- src/libflate/lib.rs | 1 + src/libstd/collections/hash/table.rs | 8 +- src/libstd/io/buffered.rs | 6 +- src/libstd/lib.rs | 1 + 15 files changed, 126 insertions(+), 122 deletions(-) diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index 0cdc71b6f60..82bd13475c7 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -73,6 +73,7 @@ #![feature(unboxed_closures)] #![feature(unsafe_no_drop_flag)] #![feature(core)] +#![feature(unique)] #![cfg_attr(test, feature(test, alloc, rustc_private))] #![cfg_attr(all(not(feature = "external_funcs"), not(feature = "external_crate")), feature(libc))] diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index f0fc12da727..b8703f6e7d9 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -1136,12 +1136,12 @@ impl Node { // This must be followed by insert_edge on an internal node. #[inline] unsafe fn insert_kv(&mut self, index: usize, key: K, val: V) -> &mut V { - ptr::copy_memory( + ptr::copy( self.keys_mut().as_mut_ptr().offset(index as isize + 1), self.keys().as_ptr().offset(index as isize), self.len() - index ); - ptr::copy_memory( + ptr::copy( self.vals_mut().as_mut_ptr().offset(index as isize + 1), self.vals().as_ptr().offset(index as isize), self.len() - index @@ -1158,7 +1158,7 @@ impl Node { // This can only be called immediately after a call to insert_kv. #[inline] unsafe fn insert_edge(&mut self, index: usize, edge: Node) { - ptr::copy_memory( + ptr::copy( self.edges_mut().as_mut_ptr().offset(index as isize + 1), self.edges().as_ptr().offset(index as isize), self.len() - index @@ -1191,12 +1191,12 @@ impl Node { let key = ptr::read(self.keys().get_unchecked(index)); let val = ptr::read(self.vals().get_unchecked(index)); - ptr::copy_memory( + ptr::copy( self.keys_mut().as_mut_ptr().offset(index as isize), self.keys().as_ptr().offset(index as isize + 1), self.len() - index - 1 ); - ptr::copy_memory( + ptr::copy( self.vals_mut().as_mut_ptr().offset(index as isize), self.vals().as_ptr().offset(index as isize + 1), self.len() - index - 1 @@ -1212,7 +1212,7 @@ impl Node { unsafe fn remove_edge(&mut self, index: usize) -> Node { let edge = ptr::read(self.edges().get_unchecked(index)); - ptr::copy_memory( + ptr::copy( self.edges_mut().as_mut_ptr().offset(index as isize), self.edges().as_ptr().offset(index as isize + 1), self.len() - index + 1 @@ -1239,18 +1239,18 @@ impl Node { unsafe { right._len = self.len() / 2; let right_offset = self.len() - right.len(); - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( right.keys_mut().as_mut_ptr(), self.keys().as_ptr().offset(right_offset as isize), right.len() ); - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( right.vals_mut().as_mut_ptr(), self.vals().as_ptr().offset(right_offset as isize), right.len() ); if !self.is_leaf() { - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( right.edges_mut().as_mut_ptr(), self.edges().as_ptr().offset(right_offset as isize), right.len() + 1 @@ -1280,18 +1280,18 @@ impl Node { ptr::write(self.keys_mut().get_unchecked_mut(old_len), key); ptr::write(self.vals_mut().get_unchecked_mut(old_len), val); - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( self.keys_mut().as_mut_ptr().offset(old_len as isize + 1), right.keys().as_ptr(), right.len() ); - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( self.vals_mut().as_mut_ptr().offset(old_len as isize + 1), right.vals().as_ptr(), right.len() ); if !self.is_leaf() { - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( self.edges_mut().as_mut_ptr().offset(old_len as isize + 1), right.edges().as_ptr(), right.len() + 1 diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index 1f3c54a4cb5..7b66bfee34f 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -30,6 +30,7 @@ #![feature(unboxed_closures)] #![feature(unicode)] #![feature(unsafe_destructor)] +#![feature(unique)] #![feature(unsafe_no_drop_flag)] #![cfg_attr(test, feature(rand, rustc_private, test))] #![cfg_attr(test, allow(deprecated))] // rand diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 4b2b125fc99..8ca60d717f4 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -1331,12 +1331,10 @@ fn insertion_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O if i != j { let tmp = ptr::read(read_ptr); - ptr::copy_memory(buf_v.offset(j + 1), - &*buf_v.offset(j), - (i - j) as usize); - ptr::copy_nonoverlapping_memory(buf_v.offset(j), - &tmp, - 1); + ptr::copy(buf_v.offset(j + 1), + &*buf_v.offset(j), + (i - j) as usize); + ptr::copy_nonoverlapping(buf_v.offset(j), &tmp, 1); mem::forget(tmp); } } @@ -1409,10 +1407,10 @@ fn merge_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order // j + 1 could be `len` (for the last `i`), but in // that case, `i == j` so we don't copy. The // `.offset(j)` is always in bounds. - ptr::copy_memory(buf_dat.offset(j + 1), - &*buf_dat.offset(j), - i - j as usize); - ptr::copy_nonoverlapping_memory(buf_dat.offset(j), read_ptr, 1); + ptr::copy(buf_dat.offset(j + 1), + &*buf_dat.offset(j), + i - j as usize); + ptr::copy_nonoverlapping(buf_dat.offset(j), read_ptr, 1); } } } @@ -1460,11 +1458,11 @@ fn merge_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order if left == right_start { // the number remaining in this run. let elems = (right_end as usize - right as usize) / mem::size_of::(); - ptr::copy_nonoverlapping_memory(out, &*right, elems); + ptr::copy_nonoverlapping(out, &*right, elems); break; } else if right == right_end { let elems = (right_start as usize - left as usize) / mem::size_of::(); - ptr::copy_nonoverlapping_memory(out, &*left, elems); + ptr::copy_nonoverlapping(out, &*left, elems); break; } @@ -1478,7 +1476,7 @@ fn merge_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order } else { step(&mut left) }; - ptr::copy_nonoverlapping_memory(out, &*to_copy, 1); + ptr::copy_nonoverlapping(out, &*to_copy, 1); step(&mut out); } } @@ -1492,7 +1490,7 @@ fn merge_sort(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> Order // write the result to `v` in one go, so that there are never two copies // of the same object in `v`. unsafe { - ptr::copy_nonoverlapping_memory(v.as_mut_ptr(), &*buf_dat, len); + ptr::copy_nonoverlapping(v.as_mut_ptr(), &*buf_dat, len); } // increment the pointer, returning the old pointer. diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 6c2624cd204..e141a49002b 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -568,9 +568,9 @@ impl String { let CharRange { ch, next } = self.char_range_at(idx); unsafe { - ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as isize), - self.vec.as_ptr().offset(next as isize), - len - next); + ptr::copy(self.vec.as_mut_ptr().offset(idx as isize), + self.vec.as_ptr().offset(next as isize), + len - next); self.vec.set_len(len - (next - idx)); } ch @@ -598,12 +598,12 @@ impl String { let amt = ch.encode_utf8(&mut bits).unwrap(); unsafe { - ptr::copy_memory(self.vec.as_mut_ptr().offset((idx + amt) as isize), - self.vec.as_ptr().offset(idx as isize), - len - idx); - ptr::copy_memory(self.vec.as_mut_ptr().offset(idx as isize), - bits.as_ptr(), - amt); + ptr::copy(self.vec.as_mut_ptr().offset((idx + amt) as isize), + self.vec.as_ptr().offset(idx as isize), + len - idx); + ptr::copy(self.vec.as_mut_ptr().offset(idx as isize), + bits.as_ptr(), + amt); self.vec.set_len(len + amt); } } diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 3f5f9288916..8097b94cd3d 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -267,7 +267,7 @@ impl Vec { pub unsafe fn from_raw_buf(ptr: *const T, elts: usize) -> Vec { let mut dst = Vec::with_capacity(elts); dst.set_len(elts); - ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), ptr, elts); + ptr::copy_nonoverlapping(dst.as_mut_ptr(), ptr, elts); dst } @@ -548,7 +548,7 @@ impl Vec { let p = self.as_mut_ptr().offset(index as isize); // Shift everything over to make space. (Duplicating the // `index`th element into two consecutive places.) - ptr::copy_memory(p.offset(1), &*p, len - index); + ptr::copy(p.offset(1), &*p, len - index); // Write it in, overwriting the first copy of the `index`th // element. ptr::write(&mut *p, element); @@ -585,7 +585,7 @@ impl Vec { ret = ptr::read(ptr); // Shift everything down to fill in that spot. - ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1); + ptr::copy(ptr, &*ptr.offset(1), len - index - 1); } self.set_len(len - 1); ret @@ -718,7 +718,7 @@ impl Vec { self.reserve(other.len()); let len = self.len(); unsafe { - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( self.get_unchecked_mut(len), other.as_ptr(), other.len()); @@ -1036,7 +1036,7 @@ impl Vec { self.set_len(at); other.set_len(other_len); - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( other.as_mut_ptr(), self.as_ptr().offset(at as isize), other.len()); diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index f65e644fa52..a7e93c40447 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -134,7 +134,7 @@ impl VecDeque { self.cap); debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, self.cap); - ptr::copy_memory( + ptr::copy( self.ptr.offset(dst as isize), self.ptr.offset(src as isize), len); @@ -147,7 +147,7 @@ impl VecDeque { self.cap); debug_assert!(src + len <= self.cap, "dst={} src={} len={} cap={}", dst, src, len, self.cap); - ptr::copy_nonoverlapping_memory( + ptr::copy_nonoverlapping( self.ptr.offset(dst as isize), self.ptr.offset(src as isize), len); @@ -1343,22 +1343,22 @@ impl VecDeque { // `at` lies in the first half. let amount_in_first = first_len - at; - ptr::copy_nonoverlapping_memory(*other.ptr, - first_half.as_ptr().offset(at as isize), - amount_in_first); + ptr::copy_nonoverlapping(*other.ptr, + first_half.as_ptr().offset(at as isize), + amount_in_first); // just take all of the second half. - ptr::copy_nonoverlapping_memory(other.ptr.offset(amount_in_first as isize), - second_half.as_ptr(), - second_len); + ptr::copy_nonoverlapping(other.ptr.offset(amount_in_first as isize), + second_half.as_ptr(), + second_len); } else { // `at` lies in the second half, need to factor in the elements we skipped // in the first half. let offset = at - first_len; let amount_in_second = second_len - offset; - ptr::copy_nonoverlapping_memory(*other.ptr, - second_half.as_ptr().offset(offset as isize), - amount_in_second); + ptr::copy_nonoverlapping(*other.ptr, + second_half.as_ptr().offset(offset as isize), + amount_in_second); } } diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index b2ee9596387..e7af0be88a0 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -293,7 +293,7 @@ extern "rust-intrinsic" { /// } /// } /// ``` - #[unstable(feature = "core")] + #[stable(feature = "rust1", since = "1.0.0")] pub fn copy_nonoverlapping_memory(dst: *mut T, src: *const T, count: usize); /// Copies `count * size_of` bytes from `src` to `dst`. The source @@ -323,13 +323,12 @@ extern "rust-intrinsic" { /// } /// ``` /// - #[unstable(feature = "core")] + #[stable(feature = "rust1", since = "1.0.0")] pub fn copy_memory(dst: *mut T, src: *const T, count: usize); /// Invokes memset on the specified pointer, setting `count * size_of::()` /// bytes of memory starting at `dst` to `c`. - #[unstable(feature = "core", - reason = "uncertain about naming and semantics")] + #[stable(feature = "rust1", since = "1.0.0")] pub fn set_memory(dst: *mut T, val: u8, count: usize); /// Equivalent to the appropriate `llvm.memcpy.p0i8.0i8.*` intrinsic, with diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index 740997b7a24..2aa8ec0c548 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -203,9 +203,9 @@ pub fn swap(x: &mut T, y: &mut T) { let mut t: T = uninitialized(); // Perform the swap, `&mut` pointers never alias - ptr::copy_nonoverlapping_memory(&mut t, &*x, 1); - ptr::copy_nonoverlapping_memory(x, &*y, 1); - ptr::copy_nonoverlapping_memory(y, &t, 1); + ptr::copy_nonoverlapping(&mut t, &*x, 1); + ptr::copy_nonoverlapping(x, &*y, 1); + ptr::copy_nonoverlapping(y, &t, 1); // y and t now point to the same thing, but we need to completely forget `t` // because it's no longer relevant. diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index b44cc899787..0625c3c7d60 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -101,16 +101,28 @@ use cmp::Ordering::{self, Less, Equal, Greater}; // FIXME #19649: intrinsic docs don't render, so these have no docs :( -#[unstable(feature = "core")] -pub use intrinsics::copy_nonoverlapping_memory; +#[stable(feature = "rust1", since = "1.0.0")] +pub use intrinsics::copy_nonoverlapping_memory as copy_nonoverlapping; -#[unstable(feature = "core")] -pub use intrinsics::copy_memory; +#[stable(feature = "rust1", since = "1.0.0")] +pub use intrinsics::copy_memory as copy; -#[unstable(feature = "core", - reason = "uncertain about naming and semantics")] -pub use intrinsics::set_memory; +#[stable(feature = "rust1", since = "1.0.0")] +pub use intrinsics::set_memory as write_bytes; +extern "rust-intrinsic" { + #[unstable(feature = "core")] + #[deprecated(since = "1.0.0", reason = "renamed to `copy_nonoverlapping`")] + pub fn copy_nonoverlapping_memory(dst: *mut T, src: *const T, count: usize); + #[unstable(feature = "core")] + #[deprecated(since = "1.0.0", reason = "renamed to `copy`")] + pub fn copy_memory(dst: *mut T, src: *const T, count: usize); + + #[unstable(feature = "core", + reason = "uncertain about naming and semantics")] + #[deprecated(since = "1.0.0", reason = "renamed to `write_bytes`")] + pub fn set_memory(dst: *mut T, val: u8, count: usize); +} /// Creates a null raw pointer. /// @@ -150,8 +162,9 @@ pub fn null_mut() -> *mut T { 0 as *mut T } #[inline] #[unstable(feature = "core", reason = "may play a larger role in std::ptr future extensions")] +#[deprecated(since = "1.0.0", reason = "use `write_bytes` instead")] pub unsafe fn zero_memory(dst: *mut T, count: usize) { - set_memory(dst, 0, count); + write_bytes(dst, 0, count); } /// Swaps the values at two mutable locations of the same type, without @@ -169,9 +182,9 @@ pub unsafe fn swap(x: *mut T, y: *mut T) { let t: *mut T = &mut tmp; // Perform the swap - copy_nonoverlapping_memory(t, &*x, 1); - copy_memory(x, &*y, 1); // `x` and `y` may overlap - copy_nonoverlapping_memory(y, &*t, 1); + copy_nonoverlapping(t, &*x, 1); + copy(x, &*y, 1); // `x` and `y` may overlap + copy_nonoverlapping(y, &*t, 1); // y and t now point to the same thing, but we need to completely forget `tmp` // because it's no longer relevant. @@ -207,7 +220,7 @@ pub unsafe fn replace(dest: *mut T, mut src: T) -> T { #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn read(src: *const T) -> T { let mut tmp: T = mem::uninitialized(); - copy_nonoverlapping_memory(&mut tmp, src, 1); + copy_nonoverlapping(&mut tmp, src, 1); tmp } @@ -224,7 +237,7 @@ pub unsafe fn read_and_zero(dest: *mut T) -> T { let tmp = read(&*dest); // Now zero out `dest`: - zero_memory(dest, 1); + write_bytes(dest, 0, 1); tmp } @@ -248,9 +261,9 @@ pub unsafe fn write(dst: *mut T, src: T) { /// Methods on raw pointers #[stable(feature = "rust1", since = "1.0.0")] -pub trait PtrExt: Sized { +pub trait PtrExt { /// The type which is being pointed at - type Target; + type Target: ?Sized; /// Returns true if the pointer is null. #[stable(feature = "rust1", since = "1.0.0")] @@ -279,14 +292,14 @@ pub trait PtrExt: Sized { /// Otherwise `offset` invokes Undefined Behaviour, regardless of whether /// the pointer is used. #[stable(feature = "rust1", since = "1.0.0")] - unsafe fn offset(self, count: isize) -> Self; + unsafe fn offset(self, count: isize) -> Self where Self::Target: Sized; } /// Methods on mutable raw pointers #[stable(feature = "rust1", since = "1.0.0")] pub trait MutPtrExt { /// The type which is being pointed at - type Target; + type Target: ?Sized; /// Returns `None` if the pointer is null, or else returns a mutable /// reference to the value wrapped in `Some`. @@ -302,7 +315,7 @@ pub trait MutPtrExt { } #[stable(feature = "rust1", since = "1.0.0")] -impl PtrExt for *const T { +impl PtrExt for *const T { type Target = T; #[inline] @@ -311,7 +324,7 @@ impl PtrExt for *const T { #[inline] #[stable(feature = "rust1", since = "1.0.0")] - unsafe fn offset(self, count: isize) -> *const T { + unsafe fn offset(self, count: isize) -> *const T where T: Sized { intrinsics::offset(self, count) } @@ -329,7 +342,7 @@ impl PtrExt for *const T { } #[stable(feature = "rust1", since = "1.0.0")] -impl PtrExt for *mut T { +impl PtrExt for *mut T { type Target = T; #[inline] @@ -338,7 +351,7 @@ impl PtrExt for *mut T { #[inline] #[stable(feature = "rust1", since = "1.0.0")] - unsafe fn offset(self, count: isize) -> *mut T { + unsafe fn offset(self, count: isize) -> *mut T where T: Sized { intrinsics::offset(self, count) as *mut T } @@ -356,7 +369,7 @@ impl PtrExt for *mut T { } #[stable(feature = "rust1", since = "1.0.0")] -impl MutPtrExt for *mut T { +impl MutPtrExt for *mut T { type Target = T; #[inline] @@ -374,33 +387,25 @@ impl MutPtrExt for *mut T { // Equality for pointers #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for *const T { +impl PartialEq for *const T { #[inline] - fn eq(&self, other: &*const T) -> bool { - *self == *other - } - #[inline] - fn ne(&self, other: &*const T) -> bool { !self.eq(other) } + fn eq(&self, other: &*const T) -> bool { *self == *other } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for *const T {} +impl Eq for *const T {} #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for *mut T { +impl PartialEq for *mut T { #[inline] - fn eq(&self, other: &*mut T) -> bool { - *self == *other - } - #[inline] - fn ne(&self, other: &*mut T) -> bool { !self.eq(other) } + fn eq(&self, other: &*mut T) -> bool { *self == *other } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for *mut T {} +impl Eq for *mut T {} #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for *const T { +impl Clone for *const T { #[inline] fn clone(&self) -> *const T { *self @@ -408,7 +413,7 @@ impl Clone for *const T { } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for *mut T { +impl Clone for *mut T { #[inline] fn clone(&self) -> *mut T { *self @@ -452,7 +457,7 @@ mod externfnpointers { // Comparison for pointers #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for *const T { +impl Ord for *const T { #[inline] fn cmp(&self, other: &*const T) -> Ordering { if self < other { @@ -466,7 +471,7 @@ impl Ord for *const T { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for *const T { +impl PartialOrd for *const T { #[inline] fn partial_cmp(&self, other: &*const T) -> Option { Some(self.cmp(other)) @@ -486,7 +491,7 @@ impl PartialOrd for *const T { } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for *mut T { +impl Ord for *mut T { #[inline] fn cmp(&self, other: &*mut T) -> Ordering { if self < other { @@ -500,7 +505,7 @@ impl Ord for *mut T { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for *mut T { +impl PartialOrd for *mut T { #[inline] fn partial_cmp(&self, other: &*mut T) -> Option { Some(self.cmp(other)) @@ -527,8 +532,8 @@ impl PartialOrd for *mut T { /// modified without a unique path to the `Unique` reference. Useful /// for building abstractions like `Vec` or `Box`, which /// internally use raw pointers to manage the memory that they own. -#[unstable(feature = "core", reason = "recently added to this module")] -pub struct Unique { +#[unstable(feature = "unique")] +pub struct Unique { pointer: NonZero<*const T>, _marker: PhantomData, } @@ -537,39 +542,37 @@ pub struct Unique { /// reference is unaliased. Note that this aliasing invariant is /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. -#[unstable(feature = "core", reason = "recently added to this module")] +#[unstable(feature = "unique")] unsafe impl Send for Unique { } /// `Unique` pointers are `Sync` if `T` is `Sync` because the data they /// reference is unaliased. Note that this aliasing invariant is /// unenforced by the type system; the abstraction using the /// `Unique` must enforce it. -#[unstable(feature = "core", reason = "recently added to this module")] +#[unstable(feature = "unique")] unsafe impl Sync for Unique { } -impl Unique { +impl Unique { /// Create a new `Unique`. - #[unstable(feature = "core", - reason = "recently added to this module")] + #[unstable(feature = "unique")] pub unsafe fn new(ptr: *mut T) -> Unique { Unique { pointer: NonZero::new(ptr as *const T), _marker: PhantomData } } /// Dereference the content. - #[unstable(feature = "core", - reason = "recently added to this module")] + #[unstable(feature = "unique")] pub unsafe fn get(&self) -> &T { &**self.pointer } /// Mutably dereference the content. - #[unstable(feature = "core", - reason = "recently added to this module")] + #[unstable(feature = "unique")] pub unsafe fn get_mut(&mut self) -> &mut T { &mut ***self } } +#[unstable(feature = "unique")] impl Deref for Unique { type Target = *mut T; diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index 2debcaa5813..ae64a57a390 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -1500,7 +1500,7 @@ pub mod bytes { impl MutableByteVector for [u8] { #[inline] fn set_memory(&mut self, value: u8) { - unsafe { ptr::set_memory(self.as_mut_ptr(), value, self.len()) }; + unsafe { ptr::write_bytes(self.as_mut_ptr(), value, self.len()) }; } } @@ -1514,9 +1514,9 @@ pub mod bytes { // `dst` is unaliasable, so we know statically it doesn't overlap // with `src`. unsafe { - ptr::copy_nonoverlapping_memory(dst.as_mut_ptr(), - src.as_ptr(), - len_src); + ptr::copy_nonoverlapping(dst.as_mut_ptr(), + src.as_ptr(), + len_src); } } } diff --git a/src/libflate/lib.rs b/src/libflate/lib.rs index 24660b3f396..2ce52cdec25 100644 --- a/src/libflate/lib.rs +++ b/src/libflate/lib.rs @@ -27,6 +27,7 @@ #![feature(int_uint)] #![feature(libc)] #![feature(staged_api)] +#![feature(unique)] #[cfg(test)] #[macro_use] extern crate log; diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index 7513cb8a61c..4c03d8915eb 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -23,7 +23,7 @@ use num::{Int, UnsignedInt}; use ops::{Deref, DerefMut, Drop}; use option::Option; use option::Option::{Some, None}; -use ptr::{self, PtrExt, copy_nonoverlapping_memory, Unique, zero_memory}; +use ptr::{self, PtrExt, Unique}; use rt::heap::{allocate, deallocate, EMPTY}; use collections::hash_state::HashState; @@ -477,8 +477,8 @@ impl>> GapThenFull { pub fn shift(mut self) -> Option> { unsafe { *self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET); - copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key, 1); - copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val, 1); + ptr::copy_nonoverlapping(self.gap.raw.key, self.full.raw.key, 1); + ptr::copy_nonoverlapping(self.gap.raw.val, self.full.raw.val, 1); } let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full; @@ -637,7 +637,7 @@ impl RawTable { pub fn new(capacity: usize) -> RawTable { unsafe { let ret = RawTable::new_uninitialized(capacity); - zero_memory(*ret.hashes, capacity); + ptr::write_bytes(*ret.hashes, 0, capacity); ret } } diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 9ef31978236..6458dfc5aa2 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -155,9 +155,9 @@ impl BufWriter { if written > 0 { // NB: would be better expressed as .remove(0..n) if it existed unsafe { - ptr::copy_memory(self.buf.as_mut_ptr(), - self.buf.as_ptr().offset(written as isize), - len - written); + ptr::copy(self.buf.as_mut_ptr(), + self.buf.as_ptr().offset(written as isize), + len - written); } } self.buf.truncate(len - written); diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index caaedeeb2fc..b5bdeb7f181 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -123,6 +123,7 @@ #![feature(unsafe_no_drop_flag)] #![feature(macro_reexport)] #![feature(hash)] +#![feature(unique)] #![cfg_attr(test, feature(test, rustc_private, env))] // Don't link to std. We are std.