Auto merge of #27837 - Gankro:weaknique, r=aturon

This prepares both for the FCP of #27718

Arc:

* Add previously omitted function `Arc::try_unwrap(Self) -> Result<T, Self>`
* Move `arc.downgrade()` to `Arc::downgrade(&Self)` per conventions.
* Deprecate `Arc::weak_count` and `Arc::strong_count` for raciness. It is almost
  impossible to correctly act on these results without a CAS loop on the actual
  fields.
* Rename `Arc::make_unique` to `Arc::make_mut` to avoid uniqueness terminology 
  and to clarify relation to `Arc::get_mut`.
 

Rc:
* Add `Rc::would_unwrap(&Self) -> bool` to introspect whether try_unwrap would succeed, 
  because it's destructive (unlike get_mut).
* Move `rc.downgrade()` to `Rc::downgrade(&Self)` per conventions.
* Deprecate `Rc::weak_count` and `Rc::strong_count` for questionable utility.
* Deprecate `Rc::is_unique` for questionable semantics (there are two kinds of 
  uniqueness with Weak pointers in play).
* Rename `rc.make_unique()` to `Rc::make_mut(&mut Self)` per conventions, to
  avoid uniqueness terminology, and to clarify the relation to `Rc::get_mut`.

Notable omission:

* Arc::would_unwrap is not added due to the fact that it's racy, and therefore doesn't 
  provide much actionable information.

(note: rc_would_unwrap is not proposed for FCP as it is truly experimental)


r? @aturon (careful attention needs to be taken for the new Arc::try_unwrap, but intuitively it should "just work" by virtue of being semantically equivalent to Drop).
This commit is contained in:
bors 2015-08-21 05:04:32 +00:00
commit b1d07bbe70
7 changed files with 239 additions and 161 deletions

View File

@ -136,9 +136,7 @@ impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
/// Weak pointers will not keep the data inside of the `Arc` alive, and can be
/// used to break cycles between `Arc` pointers.
#[unsafe_no_drop_flag]
#[unstable(feature = "arc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
#[unstable(feature = "arc_weak", reason = "needs FCP", issue = "27718")]
pub struct Weak<T: ?Sized> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
@ -162,7 +160,7 @@ struct ArcInner<T: ?Sized> {
// the value usize::MAX acts as a sentinel for temporarily "locking" the
// ability to upgrade weak pointers or downgrade strong ones; this is used
// to avoid races in `make_unique` and `get_mut`.
// to avoid races in `make_mut` and `get_mut`.
weak: atomic::AtomicUsize,
data: T,
@ -193,6 +191,44 @@ impl<T> Arc<T> {
};
Arc { _ptr: unsafe { NonZero::new(Box::into_raw(x)) } }
}
/// Unwraps the contained value if the `Arc<T>` has only one strong reference.
/// This will succeed even if there are outstanding weak references.
///
/// Otherwise, an `Err` is returned with the same `Arc<T>`.
///
/// # Examples
///
/// ```
/// #![feature(arc_unique)]
/// use std::sync::Arc;
///
/// let x = Arc::new(3);
/// assert_eq!(Arc::try_unwrap(x), Ok(3));
///
/// let x = Arc::new(4);
/// let _y = x.clone();
/// assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
/// ```
#[inline]
#[unstable(feature = "arc_unique", reason = "needs FCP", issue = "27718")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
// See `drop` for why all these atomics are like this
if this.inner().strong.compare_and_swap(1, 0, Release) != 1 { return Err(this) }
atomic::fence(Acquire);
unsafe {
let ptr = *this._ptr;
let elem = ptr::read(&(*ptr).data);
// Make a weak pointer to clean up the implicit strong-weak reference
let _weak = Weak { _ptr: this._ptr };
mem::forget(this);
Ok(elem)
}
}
}
impl<T: ?Sized> Arc<T> {
@ -202,21 +238,18 @@ impl<T: ?Sized> Arc<T> {
///
/// ```
/// #![feature(arc_weak)]
///
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// let weak_five = five.downgrade();
/// let weak_five = Arc::downgrade(&five);
/// ```
#[unstable(feature = "arc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
pub fn downgrade(&self) -> Weak<T> {
#[unstable(feature = "arc_weak", reason = "needs FCP", issue = "27718")]
pub fn downgrade(this: &Self) -> Weak<T> {
loop {
// This Relaxed is OK because we're checking the value in the CAS
// below.
let cur = self.inner().weak.load(Relaxed);
let cur = this.inner().weak.load(Relaxed);
// check if the weak counter is currently "locked"; if so, spin.
if cur == usize::MAX { continue }
@ -228,23 +261,23 @@ impl<T: ?Sized> Arc<T> {
// Unlike with Clone(), we need this to be an Acquire read to
// synchronize with the write coming from `is_unique`, so that the
// events prior to that write happen before this read.
if self.inner().weak.compare_and_swap(cur, cur + 1, Acquire) == cur {
return Weak { _ptr: self._ptr }
if this.inner().weak.compare_and_swap(cur, cur + 1, Acquire) == cur {
return Weak { _ptr: this._ptr }
}
}
}
/// Get the number of weak references to this value.
#[inline]
#[unstable(feature = "arc_counts", issue = "27718")]
pub fn weak_count(this: &Arc<T>) -> usize {
#[unstable(feature = "arc_counts", reason = "not clearly useful, and racy", issue = "27718")]
pub fn weak_count(this: &Self) -> usize {
this.inner().weak.load(SeqCst) - 1
}
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "arc_counts", issue = "27718")]
pub fn strong_count(this: &Arc<T>) -> usize {
#[unstable(feature = "arc_counts", reason = "not clearly useful, and racy", issue = "27718")]
pub fn strong_count(this: &Self) -> usize {
this.inner().strong.load(SeqCst)
}
@ -332,27 +365,40 @@ impl<T: ?Sized> Deref for Arc<T> {
}
impl<T: Clone> Arc<T> {
/// Make a mutable reference from the given `Arc<T>`.
#[unstable(feature = "arc_unique", reason = "renamed to Arc::make_mut", issue = "27718")]
#[deprecated(since = "1.4.0", reason = "renamed to Arc::make_mut")]
pub fn make_unique(this: &mut Self) -> &mut T {
Arc::make_mut(this)
}
/// Make a mutable reference into the given `Arc<T>` by cloning the inner
/// data if the `Arc<T>` doesn't have one strong reference and no weak
/// references.
///
/// This is also referred to as a copy-on-write operation because the inner
/// data is cloned if the (strong) reference count is greater than one. If
/// we hold the only strong reference, any existing weak references will no
/// longer be upgradeable.
/// This is also referred to as a copy-on-write.
///
/// # Examples
///
/// ```
/// #![feature(arc_unique)]
///
/// use std::sync::Arc;
///
/// let mut five = Arc::new(5);
/// let mut data = Arc::new(5);
///
/// *Arc::make_mut(&mut data) += 1; // Won't clone anything
/// let mut other_data = data.clone(); // Won't clone inner data
/// *Arc::make_mut(&mut data) += 1; // Clones inner data
/// *Arc::make_mut(&mut data) += 1; // Won't clone anything
/// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
///
/// // Note: data and other_data now point to different numbers
/// assert_eq!(*data, 8);
/// assert_eq!(*other_data, 12);
///
/// let mut_five = Arc::make_unique(&mut five);
/// ```
#[inline]
#[unstable(feature = "arc_unique", issue = "27718")]
pub fn make_unique(this: &mut Arc<T>) -> &mut T {
#[unstable(feature = "arc_unique", reason = "needs FCP", issue = "27718")]
pub fn make_mut(this: &mut Self) -> &mut T {
// Note that we hold both a strong reference and a weak reference.
// Thus, releasing our strong reference only will not, by itself, cause
// the memory to be deallocated.
@ -407,18 +453,14 @@ impl<T: Clone> Arc<T> {
}
impl<T: ?Sized> Arc<T> {
/// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
///
/// Returns `None` if the `Arc<T>` is not unique.
/// Returns a mutable reference to the contained value if the `Arc<T>` has
/// one strong reference and no weak references.
///
/// # Examples
///
/// ```
/// #![feature(arc_unique, alloc)]
///
/// extern crate alloc;
/// # fn main() {
/// use alloc::arc::Arc;
/// #![feature(arc_unique)]
/// use std::sync::Arc;
///
/// let mut x = Arc::new(3);
/// *Arc::get_mut(&mut x).unwrap() = 4;
@ -426,11 +468,10 @@ impl<T: ?Sized> Arc<T> {
///
/// let _y = x.clone();
/// assert!(Arc::get_mut(&mut x).is_none());
/// # }
/// ```
#[inline]
#[unstable(feature = "arc_unique", issue = "27718")]
pub fn get_mut(this: &mut Arc<T>) -> Option<&mut T> {
#[unstable(feature = "arc_unique", reason = "needs FCP", issue = "27718")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if this.is_unique() {
// This unsafety is ok because we're guaranteed that the pointer
// returned is the *only* pointer that will ever be returned to T. Our
@ -542,9 +583,6 @@ impl<T: ?Sized> Drop for Arc<T> {
}
}
#[unstable(feature = "arc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
impl<T: ?Sized> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
@ -557,15 +595,15 @@ impl<T: ?Sized> Weak<T> {
///
/// ```
/// #![feature(arc_weak)]
///
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
///
/// let weak_five = five.downgrade();
/// let weak_five = Arc::downgrade(&five);
///
/// let strong_five: Option<Arc<_>> = weak_five.upgrade();
/// ```
#[unstable(feature = "arc_weak", reason = "needs FCP", issue = "27718")]
pub fn upgrade(&self) -> Option<Arc<T>> {
// We use a CAS loop to increment the strong count instead of a
// fetch_add because once the count hits 0 it must never be above 0.
@ -591,9 +629,7 @@ impl<T: ?Sized> Weak<T> {
}
}
#[unstable(feature = "arc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
#[unstable(feature = "arc_weak", reason = "needs FCP", issue = "27718")]
impl<T: ?Sized> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
///
@ -603,10 +639,9 @@ impl<T: ?Sized> Clone for Weak<T> {
///
/// ```
/// #![feature(arc_weak)]
///
/// use std::sync::Arc;
///
/// let weak_five = Arc::new(5).downgrade();
/// let weak_five = Arc::downgrade(&Arc::new(5));
///
/// weak_five.clone();
/// ```
@ -637,12 +672,11 @@ impl<T: ?Sized> Drop for Weak<T> {
///
/// ```
/// #![feature(arc_weak)]
///
/// use std::sync::Arc;
///
/// {
/// let five = Arc::new(5);
/// let weak_five = five.downgrade();
/// let weak_five = Arc::downgrade(&five);
///
/// // stuff
///
@ -650,7 +684,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// }
/// {
/// let five = Arc::new(5);
/// let weak_five = five.downgrade();
/// let weak_five = Arc::downgrade(&five);
///
/// // stuff
///
@ -890,23 +924,35 @@ mod tests {
assert!(Arc::get_mut(&mut x).is_none());
drop(y);
assert!(Arc::get_mut(&mut x).is_some());
let _w = x.downgrade();
let _w = Arc::downgrade(&x);
assert!(Arc::get_mut(&mut x).is_none());
}
#[test]
fn test_cowarc_clone_make_unique() {
fn try_unwrap() {
let x = Arc::new(3);
assert_eq!(Arc::try_unwrap(x), Ok(3));
let x = Arc::new(4);
let _y = x.clone();
assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
let x = Arc::new(5);
let _w = Arc::downgrade(&x);
assert_eq!(Arc::try_unwrap(x), Ok(5));
}
#[test]
fn test_cowarc_clone_make_mut() {
let mut cow0 = Arc::new(75);
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *Arc::make_unique(&mut cow0));
assert!(75 == *Arc::make_unique(&mut cow1));
assert!(75 == *Arc::make_unique(&mut cow2));
assert!(75 == *Arc::make_mut(&mut cow0));
assert!(75 == *Arc::make_mut(&mut cow1));
assert!(75 == *Arc::make_mut(&mut cow2));
*Arc::make_unique(&mut cow0) += 1;
*Arc::make_unique(&mut cow1) += 2;
*Arc::make_unique(&mut cow2) += 3;
*Arc::make_mut(&mut cow0) += 1;
*Arc::make_mut(&mut cow1) += 2;
*Arc::make_mut(&mut cow2) += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
@ -928,8 +974,7 @@ mod tests {
assert!(75 == *cow1);
assert!(75 == *cow2);
*Arc::make_unique(&mut cow0) += 1;
*Arc::make_mut(&mut cow0) += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
assert!(75 == *cow2);
@ -944,12 +989,12 @@ mod tests {
#[test]
fn test_cowarc_clone_weak() {
let mut cow0 = Arc::new(75);
let cow1_weak = cow0.downgrade();
let cow1_weak = Arc::downgrade(&cow0);
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*Arc::make_unique(&mut cow0) += 1;
*Arc::make_mut(&mut cow0) += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());
@ -958,14 +1003,14 @@ mod tests {
#[test]
fn test_live() {
let x = Arc::new(5);
let y = x.downgrade();
let y = Arc::downgrade(&x);
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Arc::new(5);
let y = x.downgrade();
let y = Arc::downgrade(&x);
drop(x);
assert!(y.upgrade().is_none());
}
@ -977,7 +1022,7 @@ mod tests {
}
let a = Arc::new(Cycle { x: Mutex::new(None) });
let b = a.clone().downgrade();
let b = Arc::downgrade(&a.clone());
*a.x.lock().unwrap() = Some(b);
// hopefully we don't double-free (or leak)...
@ -995,7 +1040,7 @@ mod tests {
fn drop_arc_weak() {
let mut canary = atomic::AtomicUsize::new(0);
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
let arc_weak = arc.downgrade();
let arc_weak = Arc::downgrade(&arc);
assert!(canary.load(Acquire) == 0);
drop(arc);
assert!(canary.load(Acquire) == 1);
@ -1006,7 +1051,7 @@ mod tests {
fn test_strong_count() {
let a = Arc::new(0u32);
assert!(Arc::strong_count(&a) == 1);
let w = a.downgrade();
let w = Arc::downgrade(&a);
assert!(Arc::strong_count(&a) == 1);
let b = w.upgrade().expect("");
assert!(Arc::strong_count(&b) == 2);
@ -1024,7 +1069,7 @@ mod tests {
let a = Arc::new(0u32);
assert!(Arc::strong_count(&a) == 1);
assert!(Arc::weak_count(&a) == 0);
let w = a.downgrade();
let w = Arc::downgrade(&a);
assert!(Arc::strong_count(&a) == 1);
assert!(Arc::weak_count(&a) == 1);
let x = w.clone();
@ -1036,7 +1081,7 @@ mod tests {
let c = a.clone();
assert!(Arc::strong_count(&a) == 2);
assert!(Arc::weak_count(&a) == 0);
let d = c.downgrade();
let d = Arc::downgrade(&c);
assert!(Arc::weak_count(&c) == 1);
assert!(Arc::strong_count(&c) == 2);
@ -1059,7 +1104,7 @@ mod tests {
fn test_unsized() {
let x: Arc<[i32]> = Arc::new([1, 2, 3]);
assert_eq!(format!("{:?}", x), "[1, 2, 3]");
let y = x.clone().downgrade();
let y = Arc::downgrade(&x.clone());
drop(x);
assert!(y.upgrade().is_none());
}

View File

@ -8,6 +8,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// FIXME(27718): rc_counts stuff is useful internally, but was previously public
#![allow(deprecated)]
//! Thread-local reference-counted boxes (the `Rc<T>` type).
//!
//! The `Rc<T>` type provides shared ownership of an immutable value.
@ -126,8 +129,8 @@
//!
//! // Add the Gadgets to their Owner. To do this we mutably borrow from
//! // the RefCell holding the Owner's Gadgets.
//! gadget_owner.gadgets.borrow_mut().push(gadget1.clone().downgrade());
//! gadget_owner.gadgets.borrow_mut().push(gadget2.clone().downgrade());
//! gadget_owner.gadgets.borrow_mut().push(Rc::downgrade(&gadget1));
//! gadget_owner.gadgets.borrow_mut().push(Rc::downgrade(&gadget2));
//!
//! // Iterate over our Gadgets, printing their details out
//! for gadget_opt in gadget_owner.gadgets.borrow().iter() {
@ -161,7 +164,7 @@ use core::fmt;
use core::hash::{Hasher, Hash};
use core::intrinsics::{assume, drop_in_place, abort};
use core::marker::{self, Unsize};
use core::mem::{self, align_of, size_of, align_of_val, size_of_val, forget};
use core::mem::{self, align_of_val, size_of_val, forget};
use core::nonzero::NonZero;
use core::ops::{CoerceUnsized, Deref};
use core::ptr;
@ -218,10 +221,10 @@ impl<T> Rc<T> {
}
}
/// Unwraps the contained value if the `Rc<T>` is unique.
/// Unwraps the contained value if the `Rc<T>` has only one strong reference.
/// This will succeed even if there are outstanding weak references.
///
/// If the `Rc<T>` is not unique, an `Err` is returned with the same
/// `Rc<T>`.
/// Otherwise, an `Err` is returned with the same `Rc<T>`.
///
/// # Examples
///
@ -238,22 +241,32 @@ impl<T> Rc<T> {
/// assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
/// ```
#[inline]
#[unstable(feature = "rc_unique", issue = "27718")]
pub fn try_unwrap(rc: Rc<T>) -> Result<T, Rc<T>> {
if Rc::is_unique(&rc) {
#[unstable(feature = "rc_unique", reason= "needs FCP", issue = "27718")]
pub fn try_unwrap(this: Self) -> Result<T, Self> {
if Rc::would_unwrap(&this) {
unsafe {
let val = ptr::read(&*rc); // copy the contained object
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
align_of::<RcBox<T>>());
forget(rc);
let val = ptr::read(&*this); // copy the contained object
// Indicate to Weaks that they can't be promoted by decrememting
// the strong count, and then remove the implicit "strong weak"
// pointer while also handling drop logic by just crafting a
// fake Weak.
this.dec_strong();
let _weak = Weak { _ptr: this._ptr };
forget(this);
Ok(val)
}
} else {
Err(rc)
Err(this)
}
}
/// Checks if `Rc::try_unwrap` would return `Ok`.
#[unstable(feature = "rc_would_unwrap", reason = "just added for niche usecase",
issue = "27718")]
pub fn would_unwrap(this: &Self) -> bool {
Rc::strong_count(&this) == 1
}
}
impl<T: ?Sized> Rc<T> {
@ -268,25 +281,23 @@ impl<T: ?Sized> Rc<T> {
///
/// let five = Rc::new(5);
///
/// let weak_five = five.downgrade();
/// let weak_five = Rc::downgrade(&five);
/// ```
#[unstable(feature = "rc_weak",
reason = "Weak pointers may not belong in this module",
issue = "27718")]
pub fn downgrade(&self) -> Weak<T> {
self.inc_weak();
Weak { _ptr: self._ptr }
#[unstable(feature = "rc_weak", reason = "needs FCP", issue = "27718")]
pub fn downgrade(this: &Self) -> Weak<T> {
this.inc_weak();
Weak { _ptr: this._ptr }
}
/// Get the number of weak references to this value.
#[inline]
#[unstable(feature = "rc_counts", issue = "27718")]
pub fn weak_count(this: &Rc<T>) -> usize { this.weak() - 1 }
#[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "27718")]
pub fn weak_count(this: &Self) -> usize { this.weak() - 1 }
/// Get the number of strong references to this value.
#[inline]
#[unstable(feature = "rc_counts", issue= "27718")]
pub fn strong_count(this: &Rc<T>) -> usize { this.strong() }
#[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "27718")]
pub fn strong_count(this: &Self) -> usize { this.strong() }
/// Returns true if there are no other `Rc` or `Weak<T>` values that share
/// the same inner value.
@ -294,7 +305,7 @@ impl<T: ?Sized> Rc<T> {
/// # Examples
///
/// ```
/// #![feature(rc_unique)]
/// #![feature(rc_counts)]
///
/// use std::rc::Rc;
///
@ -303,13 +314,13 @@ impl<T: ?Sized> Rc<T> {
/// assert!(Rc::is_unique(&five));
/// ```
#[inline]
#[unstable(feature = "rc_unique", issue = "27718")]
pub fn is_unique(rc: &Rc<T>) -> bool {
Rc::weak_count(rc) == 0 && Rc::strong_count(rc) == 1
#[unstable(feature = "rc_counts", reason = "uniqueness has unclear meaning", issue = "27718")]
pub fn is_unique(this: &Self) -> bool {
Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
}
/// Returns a mutable reference to the contained value if the `Rc<T>` is
/// unique.
/// Returns a mutable reference to the contained value if the `Rc<T>` has
/// one strong reference and no weak references.
///
/// Returns `None` if the `Rc<T>` is not unique.
///
@ -328,10 +339,10 @@ impl<T: ?Sized> Rc<T> {
/// assert!(Rc::get_mut(&mut x).is_none());
/// ```
#[inline]
#[unstable(feature = "rc_unique", issue = "27718")]
pub fn get_mut(rc: &mut Rc<T>) -> Option<&mut T> {
if Rc::is_unique(rc) {
let inner = unsafe { &mut **rc._ptr };
#[unstable(feature = "rc_unique", reason = "needs FCP", issue = "27718")]
pub fn get_mut(this: &mut Self) -> Option<&mut T> {
if Rc::is_unique(this) {
let inner = unsafe { &mut **this._ptr };
Some(&mut inner.value)
} else {
None
@ -340,34 +351,62 @@ impl<T: ?Sized> Rc<T> {
}
impl<T: Clone> Rc<T> {
/// Make a mutable reference from the given `Rc<T>`.
#[inline]
#[unstable(feature = "rc_unique", reason = "renamed to Rc::make_mut", issue = "27718")]
#[deprecated(since = "1.4.0", reason = "renamed to Rc::make_mut")]
pub fn make_unique(&mut self) -> &mut T {
Rc::make_mut(self)
}
/// Make a mutable reference into the given `Rc<T>` by cloning the inner
/// data if the `Rc<T>` doesn't have one strong reference and no weak
/// references.
///
/// This is also referred to as a copy-on-write operation because the inner
/// data is cloned if the reference count is greater than one.
/// This is also referred to as a copy-on-write.
///
/// # Examples
///
/// ```
/// #![feature(rc_unique)]
///
/// use std::rc::Rc;
///
/// let mut five = Rc::new(5);
/// let mut data = Rc::new(5);
///
/// *Rc::make_mut(&mut data) += 1; // Won't clone anything
/// let mut other_data = data.clone(); // Won't clone inner data
/// *Rc::make_mut(&mut data) += 1; // Clones inner data
/// *Rc::make_mut(&mut data) += 1; // Won't clone anything
/// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
///
/// // Note: data and other_data now point to different numbers
/// assert_eq!(*data, 8);
/// assert_eq!(*other_data, 12);
///
/// let mut_five = five.make_unique();
/// ```
#[inline]
#[unstable(feature = "rc_unique", issue = "27718")]
pub fn make_unique(&mut self) -> &mut T {
if !Rc::is_unique(self) {
*self = Rc::new((**self).clone())
#[unstable(feature = "rc_unique", reason = "needs FCP", issue = "27718")]
pub fn make_mut(this: &mut Self) -> &mut T {
if Rc::strong_count(this) != 1 {
// Gotta clone the data, there are other Rcs
*this = Rc::new((**this).clone())
} else if Rc::weak_count(this) != 0 {
// Can just steal the data, all that's left is Weaks
unsafe {
let mut swap = Rc::new(ptr::read(&(**this._ptr).value));
mem::swap(this, &mut swap);
swap.dec_strong();
// Remove implicit strong-weak ref (no need to craft a fake
// Weak here -- we know other Weaks can clean up for us)
swap.dec_weak();
forget(swap);
}
}
// This unsafety is ok because we're guaranteed that the pointer
// returned is the *only* pointer that will ever be returned to T. Our
// reference count is guaranteed to be 1 at this point, and we required
// the `Rc<T>` itself to be `mut`, so we're returning the only possible
// reference to the inner value.
let inner = unsafe { &mut **self._ptr };
let inner = unsafe { &mut **this._ptr };
&mut inner.value
}
}
@ -413,7 +452,7 @@ impl<T: ?Sized> Drop for Rc<T> {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
ptr as *const () as usize != mem::POST_DROP_USIZE {
ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_strong();
if self.strong() == 0 {
// destroy the contained object
@ -653,9 +692,7 @@ impl<T> fmt::Pointer for Rc<T> {
///
/// See the [module level documentation](./index.html) for more.
#[unsafe_no_drop_flag]
#[unstable(feature = "rc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
#[unstable(feature = "rc_weak", reason = "needs FCP", issue = "27718")]
pub struct Weak<T: ?Sized> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
@ -667,11 +704,7 @@ impl<T: ?Sized> !marker::Sync for Weak<T> {}
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
#[unstable(feature = "rc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
impl<T: ?Sized> Weak<T> {
/// Upgrades a weak reference to a strong reference.
///
/// Upgrades the `Weak<T>` reference to an `Rc<T>`, if possible.
@ -688,10 +721,11 @@ impl<T: ?Sized> Weak<T> {
///
/// let five = Rc::new(5);
///
/// let weak_five = five.downgrade();
/// let weak_five = Rc::downgrade(&five);
///
/// let strong_five: Option<Rc<_>> = weak_five.upgrade();
/// ```
#[unstable(feature = "rc_weak", reason = "needs FCP", issue = "27718")]
pub fn upgrade(&self) -> Option<Rc<T>> {
if self.strong() == 0 {
None
@ -717,7 +751,7 @@ impl<T: ?Sized> Drop for Weak<T> {
///
/// {
/// let five = Rc::new(5);
/// let weak_five = five.downgrade();
/// let weak_five = Rc::downgrade(&five);
///
/// // stuff
///
@ -725,7 +759,7 @@ impl<T: ?Sized> Drop for Weak<T> {
/// }
/// {
/// let five = Rc::new(5);
/// let weak_five = five.downgrade();
/// let weak_five = Rc::downgrade(&five);
///
/// // stuff
///
@ -735,7 +769,7 @@ impl<T: ?Sized> Drop for Weak<T> {
unsafe {
let ptr = *self._ptr;
if !(*(&ptr as *const _ as *const *const ())).is_null() &&
ptr as *const () as usize != mem::POST_DROP_USIZE {
ptr as *const () as usize != mem::POST_DROP_USIZE {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
@ -748,9 +782,7 @@ impl<T: ?Sized> Drop for Weak<T> {
}
}
#[unstable(feature = "rc_weak",
reason = "Weak pointers may not belong in this module.",
issue = "27718")]
#[unstable(feature = "rc_weak", reason = "needs FCP", issue = "27718")]
impl<T: ?Sized> Clone for Weak<T> {
/// Makes a clone of the `Weak<T>`.
@ -764,7 +796,7 @@ impl<T: ?Sized> Clone for Weak<T> {
///
/// use std::rc::Rc;
///
/// let weak_five = Rc::new(5).downgrade();
/// let weak_five = Rc::downgrade(&Rc::new(5));
///
/// weak_five.clone();
/// ```
@ -888,14 +920,14 @@ mod tests {
#[test]
fn test_live() {
let x = Rc::new(5);
let y = x.downgrade();
let y = Rc::downgrade(&x);
assert!(y.upgrade().is_some());
}
#[test]
fn test_dead() {
let x = Rc::new(5);
let y = x.downgrade();
let y = Rc::downgrade(&x);
drop(x);
assert!(y.upgrade().is_none());
}
@ -907,7 +939,7 @@ mod tests {
}
let a = Rc::new(Cycle { x: RefCell::new(None) });
let b = a.clone().downgrade();
let b = Rc::downgrade(&a.clone());
*a.x.borrow_mut() = Some(b);
// hopefully we don't double-free (or leak)...
@ -921,7 +953,7 @@ mod tests {
assert!(!Rc::is_unique(&x));
drop(y);
assert!(Rc::is_unique(&x));
let w = x.downgrade();
let w = Rc::downgrade(&x);
assert!(!Rc::is_unique(&x));
drop(w);
assert!(Rc::is_unique(&x));
@ -931,7 +963,7 @@ mod tests {
fn test_strong_count() {
let a = Rc::new(0u32);
assert!(Rc::strong_count(&a) == 1);
let w = a.downgrade();
let w = Rc::downgrade(&a);
assert!(Rc::strong_count(&a) == 1);
let b = w.upgrade().expect("upgrade of live rc failed");
assert!(Rc::strong_count(&b) == 2);
@ -949,7 +981,7 @@ mod tests {
let a = Rc::new(0u32);
assert!(Rc::strong_count(&a) == 1);
assert!(Rc::weak_count(&a) == 0);
let w = a.downgrade();
let w = Rc::downgrade(&a);
assert!(Rc::strong_count(&a) == 1);
assert!(Rc::weak_count(&a) == 1);
drop(w);
@ -969,8 +1001,8 @@ mod tests {
let _y = x.clone();
assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
let x = Rc::new(5);
let _w = x.downgrade();
assert_eq!(Rc::try_unwrap(x), Err(Rc::new(5)));
let _w = Rc::downgrade(&x);
assert_eq!(Rc::try_unwrap(x), Ok(5));
}
#[test]
@ -982,7 +1014,7 @@ mod tests {
assert!(Rc::get_mut(&mut x).is_none());
drop(y);
assert!(Rc::get_mut(&mut x).is_some());
let _w = x.downgrade();
let _w = Rc::downgrade(&x);
assert!(Rc::get_mut(&mut x).is_none());
}
@ -992,13 +1024,13 @@ mod tests {
let mut cow1 = cow0.clone();
let mut cow2 = cow1.clone();
assert!(75 == *cow0.make_unique());
assert!(75 == *cow1.make_unique());
assert!(75 == *cow2.make_unique());
assert!(75 == *Rc::make_mut(&mut cow0));
assert!(75 == *Rc::make_mut(&mut cow1));
assert!(75 == *Rc::make_mut(&mut cow2));
*cow0.make_unique() += 1;
*cow1.make_unique() += 2;
*cow2.make_unique() += 3;
*Rc::make_mut(&mut cow0) += 1;
*Rc::make_mut(&mut cow1) += 2;
*Rc::make_mut(&mut cow2) += 3;
assert!(76 == *cow0);
assert!(77 == *cow1);
@ -1020,7 +1052,7 @@ mod tests {
assert!(75 == *cow1);
assert!(75 == *cow2);
*cow0.make_unique() += 1;
*Rc::make_mut(&mut cow0) += 1;
assert!(76 == *cow0);
assert!(75 == *cow1);
@ -1036,12 +1068,12 @@ mod tests {
#[test]
fn test_cowrc_clone_weak() {
let mut cow0 = Rc::new(75);
let cow1_weak = cow0.downgrade();
let cow1_weak = Rc::downgrade(&cow0);
assert!(75 == *cow0);
assert!(75 == *cow1_weak.upgrade().unwrap());
*cow0.make_unique() += 1;
*Rc::make_mut(&mut cow0) += 1;
assert!(76 == *cow0);
assert!(cow1_weak.upgrade().is_none());

View File

@ -258,7 +258,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
}
fn get_parent_link(&mut self, parent: &Rc<Module>, name: Name) -> ParentLink {
ModuleParentLink(parent.downgrade(), name)
ModuleParentLink(Rc::downgrade(parent), name)
}
/// Constructs the reduced graph for one item.
@ -390,7 +390,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
if let Some(crate_id) = self.session.cstore.find_extern_mod_stmt_cnum(item.id) {
let def_id = DefId { krate: crate_id, node: 0 };
self.external_exports.insert(def_id);
let parent_link = ModuleParentLink(parent.downgrade(), name);
let parent_link = ModuleParentLink(Rc::downgrade(parent), name);
let external_module = Rc::new(Module::new(parent_link,
Some(def_id),
NormalModuleKind,
@ -638,7 +638,7 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
block_id);
let new_module = Rc::new(Module::new(
BlockParentLink(parent.downgrade(), block_id),
BlockParentLink(Rc::downgrade(parent), block_id),
None,
AnonymousModuleKind,
false,

View File

@ -21,11 +21,11 @@
#![feature(associated_consts)]
#![feature(borrow_state)]
#![feature(rc_weak)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(slice_splits)]
#![feature(staged_api)]
#![feature(rc_weak)]
#[macro_use] extern crate log;
#[macro_use] extern crate syntax;

View File

@ -36,13 +36,13 @@
#![feature(path_relative_from)]
#![feature(path_relative_from)]
#![feature(quote)]
#![feature(rc_weak)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(unicode)]
#![feature(unicode)]
#![feature(vec_push_all)]
#![feature(rc_weak)]
#![allow(trivial_casts)]

View File

@ -109,7 +109,7 @@ pub fn namespace_for_item(cx: &CrateContext, def_id: ast::DefId) -> Rc<Namespace
let node = Rc::new(NamespaceTreeNode {
name: name,
scope: scope,
parent: parent_node.map(|parent| parent.downgrade()),
parent: parent_node.map(|parent| Rc::downgrade(&parent)),
});
debug_context(cx).namespace_map.borrow_mut()

View File

@ -36,7 +36,7 @@ fn main() {
let b: Rc<Baz> = a.clone();
assert_eq!(b.get(), 42);
let c: Weak<i32> = a.downgrade();
let c: Weak<i32> = Rc::downgrade(&a);
let d: Weak<Baz> = c.clone();
let _c = b.clone();
@ -44,5 +44,6 @@ fn main() {
let a: Rc<RefCell<i32>> = Rc::new(RefCell::new(42));
let b: Rc<RefCell<Baz>> = a.clone();
assert_eq!(b.borrow().get(), 42);
let c: Weak<RefCell<Baz>> = a.downgrade();
// FIXME
let c: Weak<RefCell<Baz>> = Rc::downgrade(&a) as Weak<_>;
}