Rollup merge of #98233 - RalfJung:ref-alloc, r=thomcc

Remove accidental uses of `&A: Allocator`

Cc https://github.com/rust-lang/rust/issues/98232

Fixes https://github.com/rust-lang/rust/issues/98176 (for real this time)
This commit is contained in:
Dylan DPC 2022-06-19 15:26:31 +02:00 committed by GitHub
commit 6a2a56da45
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 10 additions and 10 deletions

View File

@ -1644,11 +1644,11 @@ impl<K, V, A: Allocator + Clone> IntoIter<K, V, A> {
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
if self.length == 0 {
self.range.deallocating_end(&self.alloc);
self.range.deallocating_end(self.alloc.clone());
None
} else {
self.length -= 1;
Some(unsafe { self.range.deallocating_next_unchecked(&self.alloc) })
Some(unsafe { self.range.deallocating_next_unchecked(self.alloc.clone()) })
}
}
@ -1658,11 +1658,11 @@ impl<K, V, A: Allocator + Clone> IntoIter<K, V, A> {
&mut self,
) -> Option<Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV>> {
if self.length == 0 {
self.range.deallocating_end(&self.alloc);
self.range.deallocating_end(self.alloc.clone());
None
} else {
self.length -= 1;
Some(unsafe { self.range.deallocating_next_back_unchecked(&self.alloc) })
Some(unsafe { self.range.deallocating_next_back_unchecked(self.alloc.clone()) })
}
}
}
@ -1849,7 +1849,7 @@ where
type Item = (K, V);
fn next(&mut self) -> Option<(K, V)> {
self.inner.next(&mut self.pred, &self.alloc)
self.inner.next(&mut self.pred, self.alloc.clone())
}
fn size_hint(&self) -> (usize, Option<usize>) {

View File

@ -1320,7 +1320,7 @@ where
fn next(&mut self) -> Option<T> {
let pred = &mut self.pred;
let mut mapped_pred = |k: &T, _v: &mut ()| pred(k);
self.inner.next(&mut mapped_pred, &self.alloc).map(|(k, _)| k)
self.inner.next(&mut mapped_pred, self.alloc.clone()).map(|(k, _)| k)
}
fn size_hint(&self) -> (usize, Option<usize>) {

View File

@ -187,7 +187,7 @@ impl System {
old_size => unsafe {
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
Allocator::deallocate(&self, ptr, old_layout);
Allocator::deallocate(self, ptr, old_layout);
Ok(new_ptr)
},
}
@ -254,7 +254,7 @@ unsafe impl Allocator for System {
match new_layout.size() {
// SAFETY: conditions must be upheld by the caller
0 => unsafe {
Allocator::deallocate(&self, ptr, old_layout);
Allocator::deallocate(self, ptr, old_layout);
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
},
@ -274,9 +274,9 @@ unsafe impl Allocator for System {
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
// for `dealloc` must be upheld by the caller.
new_size => unsafe {
let new_ptr = Allocator::allocate(&self, new_layout)?;
let new_ptr = Allocator::allocate(self, new_layout)?;
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
Allocator::deallocate(&self, ptr, old_layout);
Allocator::deallocate(self, ptr, old_layout);
Ok(new_ptr)
},
}