diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index dfa55848c90..41efa0468ac 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -153,8 +153,7 @@ use core::mem::{transmute, min_align_of, size_of, forget}; use core::ops::{Deref, Drop}; use core::option::Option; use core::option::Option::{Some, None}; -use core::ptr; -use core::ptr::RawPtr; +use core::ptr::{mod, NonZero, RawPtr}; use core::result::Result; use core::result::Result::{Ok, Err}; @@ -174,7 +173,7 @@ struct RcBox { pub struct Rc { // FIXME #12808: strange names to try to avoid interfering with field accesses of the contained // type via Deref - _ptr: *mut RcBox, + _ptr: NonZero<*mut RcBox>, _nosend: marker::NoSend, _noshare: marker::NoSync } @@ -196,11 +195,11 @@ impl Rc { // there is an implicit weak pointer owned by all the strong pointers, which // ensures that the weak destructor never frees the allocation while the strong // destructor is running, even if the weak pointer is stored inside the strong one. - _ptr: transmute(box RcBox { + _ptr: NonZero(transmute(box RcBox { value: value, strong: Cell::new(1), weak: Cell::new(1) - }), + })), _nosend: marker::NoSend, _noshare: marker::NoSync } @@ -281,7 +280,8 @@ pub fn try_unwrap(rc: Rc) -> Result> { let val = ptr::read(&*rc); // copy the contained object // destruct the box and skip our Drop // we can ignore the refcounts because we know we're unique - deallocate(rc._ptr as *mut u8, size_of::>(), + let NonZero(ptr) = rc._ptr; + deallocate(ptr as *mut u8, size_of::>(), min_align_of::>()); forget(rc); Ok(val) @@ -311,7 +311,10 @@ pub fn try_unwrap(rc: Rc) -> Result> { #[experimental] pub fn get_mut<'a, T>(rc: &'a mut Rc) -> Option<&'a mut T> { if is_unique(rc) { - let inner = unsafe { &mut *rc._ptr }; + let inner = unsafe { + let NonZero(ptr) = rc._ptr; + &mut *ptr + }; Some(&mut inner.value) } else { None @@ -343,7 +346,10 @@ impl Rc { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the `Rc` itself to be `mut`, so we're returning the only // possible reference to the inner value. - let inner = unsafe { &mut *self._ptr }; + let inner = unsafe { + let NonZero(ptr) = self._ptr; + &mut *ptr + }; &mut inner.value } } @@ -391,7 +397,8 @@ impl Drop for Rc { /// ``` fn drop(&mut self) { unsafe { - if !self._ptr.is_null() { + let NonZero(ptr) = self._ptr; + if !ptr.is_null() { self.dec_strong(); if self.strong() == 0 { ptr::read(&**self); // destroy the contained object @@ -401,7 +408,7 @@ impl Drop for Rc { self.dec_weak(); if self.weak() == 0 { - deallocate(self._ptr as *mut u8, size_of::>(), + deallocate(ptr as *mut u8, size_of::>(), min_align_of::>()) } } @@ -618,7 +625,7 @@ impl fmt::Show for Rc { pub struct Weak { // FIXME #12808: strange names to try to avoid interfering with // field accesses of the contained type via Deref - _ptr: *mut RcBox, + _ptr: NonZero<*mut RcBox>, _nosend: marker::NoSend, _noshare: marker::NoSync } @@ -682,12 +689,13 @@ impl Drop for Weak { /// ``` fn drop(&mut self) { unsafe { - if !self._ptr.is_null() { + let NonZero(ptr) = self._ptr; + if !ptr.is_null() { self.dec_weak(); // the weak count starts at 1, and will only go to zero if all the strong pointers // have disappeared. if self.weak() == 0 { - deallocate(self._ptr as *mut u8, size_of::>(), + deallocate(ptr as *mut u8, size_of::>(), min_align_of::>()) } } @@ -742,12 +750,18 @@ trait RcBoxPtr { impl RcBoxPtr for Rc { #[inline(always)] - fn inner(&self) -> &RcBox { unsafe { &(*self._ptr) } } + fn inner(&self) -> &RcBox { + let NonZero(ptr) = self._ptr; + unsafe { &(*ptr) } + } } impl RcBoxPtr for Weak { #[inline(always)] - fn inner(&self) -> &RcBox { unsafe { &(*self._ptr) } } + fn inner(&self) -> &RcBox { + let NonZero(ptr) = self._ptr; + unsafe { &(*ptr) } + } } #[cfg(test)]