Stabilize rc_raw feature, closes

This commit is contained in:
Aaron Turon 2017-03-14 21:10:02 -07:00 committed by Alex Crichton
parent 48890d4971
commit a8f4a1bd98
11 changed files with 54 additions and 54 deletions
src
liballoc
libcollections
libcore
librustc_data_structures
libstd
collections/hash
lib.rs

@ -287,17 +287,15 @@ impl<T> Arc<T> {
/// # Examples
///
/// ```
/// #![feature(rc_raw)]
///
/// use std::sync::Arc;
///
/// let x = Arc::new(10);
/// let x_ptr = Arc::into_raw(x);
/// assert_eq!(unsafe { *x_ptr }, 10);
/// ```
#[unstable(feature = "rc_raw", issue = "37197")]
pub fn into_raw(this: Self) -> *mut T {
let ptr = unsafe { &mut (**this.ptr).data as *mut _ };
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &(**this.ptr).data as *const _ };
mem::forget(this);
ptr
}
@ -315,8 +313,6 @@ impl<T> Arc<T> {
/// # Examples
///
/// ```
/// #![feature(rc_raw)]
///
/// use std::sync::Arc;
///
/// let x = Arc::new(10);
@ -332,11 +328,11 @@ impl<T> Arc<T> {
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
#[unstable(feature = "rc_raw", issue = "37197")]
pub unsafe fn from_raw(ptr: *mut T) -> Self {
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `ArcInner` we need to subtract the offset of the
// `data` field from the pointer.
Arc { ptr: Shared::new((ptr as *mut u8).offset(-offset_of!(ArcInner<T>, data)) as *mut _) }
Arc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(ArcInner<T>, data)) as *const _) }
}
}
@ -448,7 +444,7 @@ impl<T: ?Sized> Arc<T> {
// Non-inlined part of `drop`.
#[inline(never)]
unsafe fn drop_slow(&mut self) {
let ptr = *self.ptr;
let ptr = self.ptr.as_mut_ptr();
// Destroy the data at this time, even though we may not free the box
// allocation itself (there may still be weak pointers lying around).
@ -624,7 +620,7 @@ impl<T: Clone> Arc<T> {
// As with `get_mut()`, the unsafety is ok because our reference was
// either unique to begin with, or became one upon cloning the contents.
unsafe {
let inner = &mut **this.ptr;
let inner = &mut *this.ptr.as_mut_ptr();
&mut inner.data
}
}
@ -667,7 +663,7 @@ impl<T: ?Sized> Arc<T> {
// the Arc itself to be `mut`, so we're returning the only possible
// reference to the inner data.
unsafe {
let inner = &mut **this.ptr;
let inner = &mut *this.ptr.as_mut_ptr();
Some(&mut inner.data)
}
} else {

@ -364,17 +364,15 @@ impl<T> Rc<T> {
/// # Examples
///
/// ```
/// #![feature(rc_raw)]
///
/// use std::rc::Rc;
///
/// let x = Rc::new(10);
/// let x_ptr = Rc::into_raw(x);
/// assert_eq!(unsafe { *x_ptr }, 10);
/// ```
#[unstable(feature = "rc_raw", issue = "37197")]
pub fn into_raw(this: Self) -> *mut T {
let ptr = unsafe { &mut (**this.ptr).value as *mut _ };
#[stable(feature = "rc_raw", since = "1.17.0")]
pub fn into_raw(this: Self) -> *const T {
let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ };
mem::forget(this);
ptr
}
@ -392,8 +390,6 @@ impl<T> Rc<T> {
/// # Examples
///
/// ```
/// #![feature(rc_raw)]
///
/// use std::rc::Rc;
///
/// let x = Rc::new(10);
@ -409,11 +405,11 @@ impl<T> Rc<T> {
///
/// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
/// ```
#[unstable(feature = "rc_raw", issue = "37197")]
pub unsafe fn from_raw(ptr: *mut T) -> Self {
#[stable(feature = "rc_raw", since = "1.17.0")]
pub unsafe fn from_raw(ptr: *const T) -> Self {
// To find the corresponding pointer to the `RcBox` we need to subtract the offset of the
// `value` field from the pointer.
Rc { ptr: Shared::new((ptr as *mut u8).offset(-offset_of!(RcBox<T>, value)) as *mut _) }
Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox<T>, value)) as *const _) }
}
}
@ -543,7 +539,7 @@ impl<T: ?Sized> Rc<T> {
#[stable(feature = "rc_unique", since = "1.4.0")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
let inner = unsafe { &mut **this.ptr };
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
Some(&mut inner.value)
} else {
None
@ -627,7 +623,7 @@ impl<T: Clone> Rc<T> {
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
let inner = unsafe { &mut **this.ptr };
let inner = unsafe { &mut *this.ptr.as_mut_ptr() };
&mut inner.value
}
}
@ -673,7 +669,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
let ptr = *self.ptr;
let ptr = self.ptr.as_mut_ptr();
self.dec_strong();
if self.strong() == 0 {

@ -133,10 +133,13 @@ mod std {
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)]
pub enum Bound<T> {
/// An inclusive bound.
#[stable(feature = "collections_bound", since = "1.17.0")]
Included(T),
/// An exclusive bound.
#[stable(feature = "collections_bound", since = "1.17.0")]
Excluded(T),
/// An infinite endpoint. Indicates that there is no bound in this direction.
#[stable(feature = "collections_bound", since = "1.17.0")]
Unbounded,
}

@ -142,7 +142,7 @@ impl<T> LinkedList<T> {
match self.head {
None => self.tail = node,
Some(head) => (**head).prev = node,
Some(head) => (*head.as_mut_ptr()).prev = node,
}
self.head = node;
@ -154,12 +154,12 @@ impl<T> LinkedList<T> {
#[inline]
fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
self.head.map(|node| unsafe {
let node = Box::from_raw(*node);
let node = Box::from_raw(node.as_mut_ptr());
self.head = node.next;
match self.head {
None => self.tail = None,
Some(head) => (**head).prev = None,
Some(head) => (*head.as_mut_ptr()).prev = None,
}
self.len -= 1;
@ -177,7 +177,7 @@ impl<T> LinkedList<T> {
match self.tail {
None => self.head = node,
Some(tail) => (**tail).next = node,
Some(tail) => (*tail.as_mut_ptr()).next = node,
}
self.tail = node;
@ -189,12 +189,12 @@ impl<T> LinkedList<T> {
#[inline]
fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
self.tail.map(|node| unsafe {
let node = Box::from_raw(*node);
let node = Box::from_raw(node.as_mut_ptr());
self.tail = node.prev;
match self.tail {
None => self.head = None,
Some(tail) => (**tail).next = None,
Some(tail) => (*tail.as_mut_ptr()).next = None,
}
self.len -= 1;
@ -269,8 +269,8 @@ impl<T> LinkedList<T> {
Some(tail) => {
if let Some(other_head) = other.head.take() {
unsafe {
(**tail).next = Some(other_head);
(**other_head).prev = Some(tail);
(*tail.as_mut_ptr()).next = Some(other_head);
(*other_head.as_mut_ptr()).prev = Some(tail);
}
self.tail = other.tail.take();
@ -484,7 +484,7 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn front_mut(&mut self) -> Option<&mut T> {
self.head.map(|node| unsafe { &mut (**node).element })
self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
}
/// Provides a reference to the back element, or `None` if the list is
@ -530,7 +530,7 @@ impl<T> LinkedList<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn back_mut(&mut self) -> Option<&mut T> {
self.tail.map(|node| unsafe { &mut (**node).element })
self.tail.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
}
/// Adds an element first in the list.
@ -675,9 +675,9 @@ impl<T> LinkedList<T> {
let second_part_head;
unsafe {
second_part_head = (**split_node.unwrap()).next.take();
second_part_head = (*split_node.unwrap().as_mut_ptr()).next.take();
if let Some(head) = second_part_head {
(**head).prev = None;
(*head.as_mut_ptr()).prev = None;
}
}
@ -816,7 +816,7 @@ impl<'a, T> Iterator for IterMut<'a, T> {
None
} else {
self.head.map(|node| unsafe {
let node = &mut **node;
let node = &mut *node.as_mut_ptr();
self.len -= 1;
self.head = node.next;
&mut node.element
@ -838,7 +838,7 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
None
} else {
self.tail.map(|node| unsafe {
let node = &mut **node;
let node = &mut *node.as_mut_ptr();
self.len -= 1;
self.tail = node.prev;
&mut node.element
@ -896,8 +896,8 @@ impl<'a, T> IterMut<'a, T> {
element: element,
})));
(**prev).next = node;
(**head).prev = node;
(*prev.as_mut_ptr()).next = node;
(*head.as_mut_ptr()).prev = node;
self.list.len += 1;
},
@ -929,7 +929,7 @@ impl<'a, T> IterMut<'a, T> {
if self.len == 0 {
None
} else {
self.head.map(|node| unsafe { &mut (**node).element })
self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element })
}
}
}

@ -2120,7 +2120,7 @@ unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
for _x in self.by_ref() {}
// RawVec handles deallocation
let _ = unsafe { RawVec::from_raw_parts(*self.buf, self.cap) };
let _ = unsafe { RawVec::from_raw_parts(self.buf.as_mut_ptr(), self.cap) };
}
}
@ -2185,7 +2185,7 @@ impl<'a, T> Drop for Drain<'a, T> {
if self.tail_len > 0 {
unsafe {
let source_vec = &mut **self.vec;
let source_vec = &mut *self.vec.as_mut_ptr();
// memmove back untouched tail, update to new length
let start = source_vec.len();
let tail = self.tail_start;

@ -2125,7 +2125,7 @@ impl<'a, T: 'a> Drop for Drain<'a, T> {
fn drop(&mut self) {
for _ in self.by_ref() {}
let source_deque = unsafe { &mut **self.deque };
let source_deque = unsafe { &mut *self.deque.as_mut_ptr() };
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
//

@ -968,11 +968,19 @@ impl<T: ?Sized> Shared<T> {
/// # Safety
///
/// `ptr` must be non-null.
pub unsafe fn new(ptr: *mut T) -> Self {
pub unsafe fn new(ptr: *const T) -> Self {
Shared { pointer: NonZero::new(ptr), _marker: PhantomData }
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Shared<T> {
/// Acquires the underlying pointer as a `*mut` pointer.
pub unsafe fn as_mut_ptr(&self) -> *mut T {
**self as _
}
}
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Clone for Shared<T> {
fn clone(&self) -> Self {
@ -988,10 +996,10 @@ impl<T: ?Sized, U: ?Sized> CoerceUnsized<Shared<U>> for Shared<T> where T: Unsiz
#[unstable(feature = "shared", issue = "27730")]
impl<T: ?Sized> Deref for Shared<T> {
type Target = *mut T;
type Target = *const T;
#[inline]
fn deref(&self) -> &*mut T {
fn deref(&self) -> &*const T {
unsafe { mem::transmute(&*self.pointer) }
}
}

@ -248,7 +248,7 @@ impl<'a, A: Array> Drop for Drain<'a, A> {
if self.tail_len > 0 {
unsafe {
let source_array_vec = &mut **self.array_vec;
let source_array_vec = &mut *self.array_vec.as_mut_ptr();
// memmove back untouched tail, update to new length
let start = source_array_vec.len();
let tail = self.tail_start;
@ -317,4 +317,3 @@ impl<T> Default for ManuallyDrop<T> {
ManuallyDrop::new()
}
}

@ -27,7 +27,6 @@
#![feature(shared)]
#![feature(collections_range)]
#![feature(collections_bound)]
#![cfg_attr(stage0,feature(field_init_shorthand))]
#![feature(nonzero)]
#![feature(rustc_private)]

@ -1154,7 +1154,7 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
fn next(&mut self) -> Option<(SafeHash, K, V)> {
self.iter.next().map(|bucket| {
unsafe {
(**self.table).size -= 1;
(*self.table.as_mut_ptr()).size -= 1;
let (k, v) = ptr::read(bucket.pair);
(SafeHash { hash: ptr::replace(bucket.hash, EMPTY_BUCKET) }, k, v)
}

@ -245,7 +245,6 @@
#![feature(char_escape_debug)]
#![feature(char_internals)]
#![feature(collections)]
#![feature(collections_bound)]
#![feature(collections_range)]
#![feature(compiler_builtins_lib)]
#![feature(const_fn)]