Auto merge of #113113 - Amanieu:box-vec-zst, r=Mark-Simulacrum
Eliminate ZST allocations in `Box` and `Vec` This PR fixes 2 issues with `Box` and `RawVec` related to ZST allocations. Specifically, the `Allocator` trait requires that: - If you allocate a zero-sized layout then you must later deallocate it, otherwise the allocator may leak memory. - You cannot pass a ZST pointer to the allocator that you haven't previously allocated. These restrictions exist because an allocator implementation is allowed to allocate non-zero amounts of memory for a zero-sized allocation. For example, `malloc` in libc does this. Currently, ZSTs are handled differently in `Box` and `Vec`: - `Vec` never allocates when `T` is a ZST or if the vector capacity is 0. - `Box` just blindly passes everything on to the allocator, including ZSTs. This causes problems due to the free conversions between `Box<[T]>` and `Vec<T>`, specifically that ZST allocations could get leaked or a dangling pointer could be passed to `deallocate`. This PR fixes this by changing `Box` to not allocate for zero-sized values and slices. It also fixes a bug in `RawVec::shrink` where shrinking to a size of zero did not actually free the backing memory.
This commit is contained in:
commit
cca3373706
@ -157,12 +157,12 @@
|
||||
use core::iter::FusedIterator;
|
||||
use core::marker::Tuple;
|
||||
use core::marker::Unsize;
|
||||
use core::mem;
|
||||
use core::mem::{self, SizedTypeProperties};
|
||||
use core::ops::{
|
||||
CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
|
||||
};
|
||||
use core::pin::Pin;
|
||||
use core::ptr::{self, Unique};
|
||||
use core::ptr::{self, NonNull, Unique};
|
||||
use core::task::{Context, Poll};
|
||||
|
||||
#[cfg(not(no_global_oom_handling))]
|
||||
@ -479,8 +479,12 @@ pub fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocE
|
||||
where
|
||||
A: Allocator,
|
||||
{
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
let ptr = alloc.allocate(layout)?.cast();
|
||||
let ptr = if T::IS_ZST {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
alloc.allocate(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
|
||||
}
|
||||
|
||||
@ -549,8 +553,12 @@ pub fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocE
|
||||
where
|
||||
A: Allocator,
|
||||
{
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
let ptr = alloc.allocate_zeroed(layout)?.cast();
|
||||
let ptr = if T::IS_ZST {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = Layout::new::<mem::MaybeUninit<T>>();
|
||||
alloc.allocate_zeroed(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
|
||||
}
|
||||
|
||||
@ -675,14 +683,16 @@ pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[inline]
|
||||
pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
|
||||
unsafe {
|
||||
let ptr = if T::IS_ZST || len == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
|
||||
Ok(l) => l,
|
||||
Err(_) => return Err(AllocError),
|
||||
};
|
||||
let ptr = Global.allocate(layout)?;
|
||||
Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
|
||||
}
|
||||
Global.allocate(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
|
||||
}
|
||||
|
||||
/// Constructs a new boxed slice with uninitialized contents, with the memory
|
||||
@ -707,14 +717,16 @@ pub fn try_new_uninit_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, Al
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[inline]
|
||||
pub fn try_new_zeroed_slice(len: usize) -> Result<Box<[mem::MaybeUninit<T>]>, AllocError> {
|
||||
unsafe {
|
||||
let ptr = if T::IS_ZST || len == 0 {
|
||||
NonNull::dangling()
|
||||
} else {
|
||||
let layout = match Layout::array::<mem::MaybeUninit<T>>(len) {
|
||||
Ok(l) => l,
|
||||
Err(_) => return Err(AllocError),
|
||||
};
|
||||
let ptr = Global.allocate_zeroed(layout)?;
|
||||
Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len))
|
||||
}
|
||||
Global.allocate_zeroed(layout)?.cast()
|
||||
};
|
||||
unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -1219,7 +1231,9 @@ fn drop(&mut self) {
|
||||
|
||||
unsafe {
|
||||
let layout = Layout::for_value_raw(ptr.as_ptr());
|
||||
self.1.deallocate(From::from(ptr.cast()), layout)
|
||||
if layout.size() != 0 {
|
||||
self.1.deallocate(From::from(ptr.cast()), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -432,16 +432,26 @@ fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> {
|
||||
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
|
||||
// See current_memory() why this assert is here
|
||||
let _: () = const { assert!(mem::size_of::<T>() % mem::align_of::<T>() == 0) };
|
||||
let ptr = unsafe {
|
||||
// `Layout::array` cannot overflow here because it would have
|
||||
// overflowed earlier when capacity was larger.
|
||||
let new_size = mem::size_of::<T>().unchecked_mul(cap);
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
self.alloc
|
||||
.shrink(ptr, layout, new_layout)
|
||||
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
|
||||
};
|
||||
self.set_ptr_and_cap(ptr, cap);
|
||||
|
||||
// If shrinking to 0, deallocate the buffer. We don't reach this point
|
||||
// for the T::IS_ZST case since current_memory() will have returned
|
||||
// None.
|
||||
if cap == 0 {
|
||||
unsafe { self.alloc.deallocate(ptr, layout) };
|
||||
self.ptr = Unique::dangling();
|
||||
self.cap = 0;
|
||||
} else {
|
||||
let ptr = unsafe {
|
||||
// `Layout::array` cannot overflow here because it would have
|
||||
// overflowed earlier when capacity was larger.
|
||||
let new_size = mem::size_of::<T>().unchecked_mul(cap);
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
self.alloc
|
||||
.shrink(ptr, layout, new_layout)
|
||||
.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?
|
||||
};
|
||||
self.set_ptr_and_cap(ptr, cap);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -2498,3 +2498,68 @@ fn test_into_flattened_size_overflow() {
|
||||
let v = vec![[(); usize::MAX]; 2];
|
||||
let _ = v.into_flattened();
|
||||
}
|
||||
|
||||
#[cfg(not(bootstrap))]
|
||||
#[test]
|
||||
fn test_box_zero_allocator() {
|
||||
use core::{alloc::AllocError, cell::RefCell};
|
||||
use std::collections::HashSet;
|
||||
|
||||
// Track ZST allocations and ensure that they all have a matching free.
|
||||
struct ZstTracker {
|
||||
state: RefCell<(HashSet<usize>, usize)>,
|
||||
}
|
||||
unsafe impl Allocator for ZstTracker {
|
||||
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
|
||||
let ptr = if layout.size() == 0 {
|
||||
let mut state = self.state.borrow_mut();
|
||||
let addr = state.1;
|
||||
assert!(state.0.insert(addr));
|
||||
state.1 += 1;
|
||||
std::println!("allocating {addr}");
|
||||
std::ptr::invalid_mut(addr)
|
||||
} else {
|
||||
unsafe { std::alloc::alloc(layout) }
|
||||
};
|
||||
Ok(NonNull::slice_from_raw_parts(NonNull::new(ptr).ok_or(AllocError)?, layout.size()))
|
||||
}
|
||||
|
||||
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() == 0 {
|
||||
let addr = ptr.as_ptr() as usize;
|
||||
let mut state = self.state.borrow_mut();
|
||||
std::println!("freeing {addr}");
|
||||
assert!(state.0.remove(&addr), "ZST free that wasn't allocated");
|
||||
} else {
|
||||
unsafe { std::alloc::dealloc(ptr.as_ptr(), layout) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Start the state at 100 to avoid returning null pointers.
|
||||
let alloc = ZstTracker { state: RefCell::new((HashSet::new(), 100)) };
|
||||
|
||||
// Ensure that unsizing retains the same behavior.
|
||||
{
|
||||
let b1: Box<[u8; 0], &ZstTracker> = Box::new_in([], &alloc);
|
||||
let b2: Box<[u8], &ZstTracker> = b1.clone();
|
||||
let _b3: Box<[u8], &ZstTracker> = b2.clone();
|
||||
}
|
||||
|
||||
// Ensure that shrinking doesn't leak a ZST allocation.
|
||||
{
|
||||
let mut v1: Vec<u8, &ZstTracker> = Vec::with_capacity_in(100, &alloc);
|
||||
v1.shrink_to_fit();
|
||||
}
|
||||
|
||||
// Ensure that conversion to/from vec works.
|
||||
{
|
||||
let v1: Vec<(), &ZstTracker> = Vec::with_capacity_in(100, &alloc);
|
||||
let _b1: Box<[()], &ZstTracker> = v1.into_boxed_slice();
|
||||
let b2: Box<[()], &ZstTracker> = Box::new_in([(), (), ()], &alloc);
|
||||
let _v2: Vec<(), &ZstTracker> = b2.into();
|
||||
}
|
||||
|
||||
// Ensure all ZSTs have been freed.
|
||||
assert!(alloc.state.borrow().0.is_empty());
|
||||
}
|
||||
|
@ -1,2 +1,2 @@
|
||||
thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:524:5
|
||||
thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:534:5
|
||||
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
|
||||
|
Loading…
Reference in New Issue
Block a user