Rollup merge of #119158 - JohnTheCoolingFan:arc-weak-clone-pretty, r=cuviper
Clean up alloc::sync::Weak Clone implementation Since both return points (tail and early return) return the same expression and the only difference is whether inner is available, the code that does the atomic operations and checks on inner was moved into the if body and the only return is at the tail. Original comments preserved.
This commit is contained in:
commit
c67ab2e0b4
@ -2917,20 +2917,17 @@ impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
|
||||
/// ```
|
||||
#[inline]
|
||||
fn clone(&self) -> Weak<T, A> {
|
||||
let inner = if let Some(inner) = self.inner() {
|
||||
inner
|
||||
} else {
|
||||
return Weak { ptr: self.ptr, alloc: self.alloc.clone() };
|
||||
};
|
||||
// See comments in Arc::clone() for why this is relaxed. This can use a
|
||||
// fetch_add (ignoring the lock) because the weak count is only locked
|
||||
// where are *no other* weak pointers in existence. (So we can't be
|
||||
// running this code in that case).
|
||||
let old_size = inner.weak.fetch_add(1, Relaxed);
|
||||
if let Some(inner) = self.inner() {
|
||||
// See comments in Arc::clone() for why this is relaxed. This can use a
|
||||
// fetch_add (ignoring the lock) because the weak count is only locked
|
||||
// where are *no other* weak pointers in existence. (So we can't be
|
||||
// running this code in that case).
|
||||
let old_size = inner.weak.fetch_add(1, Relaxed);
|
||||
|
||||
// See comments in Arc::clone() for why we do this (for mem::forget).
|
||||
if old_size > MAX_REFCOUNT {
|
||||
abort();
|
||||
// See comments in Arc::clone() for why we do this (for mem::forget).
|
||||
if old_size > MAX_REFCOUNT {
|
||||
abort();
|
||||
}
|
||||
}
|
||||
|
||||
Weak { ptr: self.ptr, alloc: self.alloc.clone() }
|
||||
|
Loading…
x
Reference in New Issue
Block a user