From 4af50548b9ed283acb62768624a8cd942eabe964 Mon Sep 17 00:00:00 2001 From: Luqman Aden Date: Thu, 4 Dec 2014 13:29:47 -0500 Subject: [PATCH] liballoc: Use NonZero in Arc. --- src/liballoc/arc.rs | 29 +++++++++++++++++------------ 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 8d8bbb42932..290617535bb 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -79,8 +79,7 @@ use core::mem; use core::ops::{Drop, Deref}; use core::option::Option; use core::option::Option::{Some, None}; -use core::ptr::RawPtr; -use core::ptr; +use core::ptr::{mod, NonZero, RawPtr}; use heap::deallocate; /// An atomically reference counted wrapper for shared state. @@ -114,7 +113,7 @@ use heap::deallocate; pub struct Arc { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref - _ptr: *mut ArcInner, + _ptr: NonZero<*mut ArcInner>, } unsafe impl Send for Arc { } @@ -130,7 +129,7 @@ unsafe impl Sync for Arc { } pub struct Weak { // FIXME #12808: strange name to try to avoid interfering with // field accesses of the contained type via Deref - _ptr: *mut ArcInner, + _ptr: NonZero<*mut ArcInner>, } unsafe impl Send for Weak { } @@ -165,7 +164,7 @@ impl Arc { weak: atomic::AtomicUint::new(1), data: data, }; - Arc { _ptr: unsafe { mem::transmute(x) } } + Arc { _ptr: NonZero(unsafe { mem::transmute(x) }) } } /// Downgrades the `Arc` to a `Weak` reference. @@ -194,7 +193,8 @@ impl Arc { // pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync` // because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer // to these contents. - unsafe { &*self._ptr } + let NonZero(ptr) = self._ptr; + unsafe { &*ptr } } } @@ -281,7 +281,8 @@ impl Arc { // pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at // this point, and we required the Arc itself to be `mut`, so we're returning the only // possible reference to the inner data. - let inner = unsafe { &mut *self._ptr }; + let NonZero(ptr) = self._ptr; + let inner = unsafe { &mut *ptr }; &mut inner.data } } @@ -316,7 +317,8 @@ impl Drop for Arc { fn drop(&mut self) { // This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but // it is guaranteed to be zeroed after the first if it's run more than once) - if self._ptr.is_null() { return } + let NonZero(ptr) = self._ptr; + if ptr.is_null() { return } // Because `fetch_sub` is already atomic, we do not need to synchronize with other threads // unless we are going to delete the object. This same logic applies to the below @@ -346,7 +348,7 @@ impl Drop for Arc { if self.inner().weak.fetch_sub(1, atomic::Release) == 1 { atomic::fence(atomic::Acquire); - unsafe { deallocate(self._ptr as *mut u8, size_of::>(), + unsafe { deallocate(ptr as *mut u8, size_of::>(), min_align_of::>()) } } } @@ -386,7 +388,8 @@ impl Weak { #[inline] fn inner(&self) -> &ArcInner { // See comments above for why this is "safe" - unsafe { &*self._ptr } + let NonZero(ptr) = self._ptr; + unsafe { &*ptr } } } @@ -442,14 +445,16 @@ impl Drop for Weak { /// } // implicit drop /// ``` fn drop(&mut self) { + let NonZero(ptr) = self._ptr; + // see comments above for why this check is here - if self._ptr.is_null() { return } + if ptr.is_null() { return } // If we find out that we were the last weak pointer, then its time to deallocate the data // entirely. See the discussion in Arc::drop() about the memory orderings if self.inner().weak.fetch_sub(1, atomic::Release) == 1 { atomic::fence(atomic::Acquire); - unsafe { deallocate(self._ptr as *mut u8, size_of::>(), + unsafe { deallocate(ptr as *mut u8, size_of::>(), min_align_of::>()) } } }