From d24be14276bc5e52c6ab42d29ab2ed717b2b583c Mon Sep 17 00:00:00 2001 From: Amanieu d'Antras Date: Tue, 27 Jun 2023 23:33:44 +0100 Subject: [PATCH] Eliminate ZST allocations in `Box` and `Vec` --- library/alloc/src/boxed.rs | 44 ++++++++++----- library/alloc/src/raw_vec.rs | 30 ++++++---- library/alloc/tests/vec.rs | 65 ++++++++++++++++++++++ tests/ui/hygiene/panic-location.run.stderr | 2 +- 4 files changed, 115 insertions(+), 26 deletions(-) diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index 8ef2bac9282..8697a77db3b 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -157,12 +157,12 @@ use core::iter::FusedIterator; use core::marker::Tuple; use core::marker::Unsize; -use core::mem; +use core::mem::{self, SizedTypeProperties}; use core::ops::{ CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver, }; use core::pin::Pin; -use core::ptr::{self, Unique}; +use core::ptr::{self, NonNull, Unique}; use core::task::{Context, Poll}; #[cfg(not(no_global_oom_handling))] @@ -479,8 +479,12 @@ pub fn try_new_uninit_in(alloc: A) -> Result, A>, AllocE where A: Allocator, { - let layout = Layout::new::>(); - let ptr = alloc.allocate(layout)?.cast(); + let ptr = if T::IS_ZST { + NonNull::dangling() + } else { + let layout = Layout::new::>(); + alloc.allocate(layout)?.cast() + }; unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } } @@ -549,8 +553,12 @@ pub fn try_new_zeroed_in(alloc: A) -> Result, A>, AllocE where A: Allocator, { - let layout = Layout::new::>(); - let ptr = alloc.allocate_zeroed(layout)?.cast(); + let ptr = if T::IS_ZST { + NonNull::dangling() + } else { + let layout = Layout::new::>(); + alloc.allocate_zeroed(layout)?.cast() + }; unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) } } @@ -675,14 +683,16 @@ pub fn new_zeroed_slice(len: usize) -> Box<[mem::MaybeUninit]> { #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_uninit_slice(len: usize) -> Result]>, AllocError> { - unsafe { + let ptr = if T::IS_ZST || len == 0 { + NonNull::dangling() + } else { let layout = match Layout::array::>(len) { Ok(l) => l, Err(_) => return Err(AllocError), }; - let ptr = Global.allocate(layout)?; - Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len)) - } + Global.allocate(layout)?.cast() + }; + unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) } } /// Constructs a new boxed slice with uninitialized contents, with the memory @@ -707,14 +717,16 @@ pub fn try_new_uninit_slice(len: usize) -> Result]>, Al #[unstable(feature = "allocator_api", issue = "32838")] #[inline] pub fn try_new_zeroed_slice(len: usize) -> Result]>, AllocError> { - unsafe { + let ptr = if T::IS_ZST || len == 0 { + NonNull::dangling() + } else { let layout = match Layout::array::>(len) { Ok(l) => l, Err(_) => return Err(AllocError), }; - let ptr = Global.allocate_zeroed(layout)?; - Ok(RawVec::from_raw_parts_in(ptr.as_mut_ptr() as *mut _, len, Global).into_box(len)) - } + Global.allocate_zeroed(layout)?.cast() + }; + unsafe { Ok(RawVec::from_raw_parts_in(ptr.as_ptr(), len, Global).into_box(len)) } } } @@ -1219,7 +1231,9 @@ fn drop(&mut self) { unsafe { let layout = Layout::for_value_raw(ptr.as_ptr()); - self.1.deallocate(From::from(ptr.cast()), layout) + if layout.size() != 0 { + self.1.deallocate(From::from(ptr.cast()), layout); + } } } } diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs index dfd30d99cf0..01b03de6acb 100644 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@ -432,16 +432,26 @@ fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> { let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; // See current_memory() why this assert is here let _: () = const { assert!(mem::size_of::() % mem::align_of::() == 0) }; - let ptr = unsafe { - // `Layout::array` cannot overflow here because it would have - // overflowed earlier when capacity was larger. - let new_size = mem::size_of::().unchecked_mul(cap); - let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); - self.alloc - .shrink(ptr, layout, new_layout) - .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? - }; - self.set_ptr_and_cap(ptr, cap); + + // If shrinking to 0, deallocate the buffer. We don't reach this point + // for the T::IS_ZST case since current_memory() will have returned + // None. + if cap == 0 { + unsafe { self.alloc.deallocate(ptr, layout) }; + self.ptr = Unique::dangling(); + self.cap = 0; + } else { + let ptr = unsafe { + // `Layout::array` cannot overflow here because it would have + // overflowed earlier when capacity was larger. + let new_size = mem::size_of::().unchecked_mul(cap); + let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); + self.alloc + .shrink(ptr, layout, new_layout) + .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? + }; + self.set_ptr_and_cap(ptr, cap); + } Ok(()) } } diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs index 21824c8a17c..b721678eea6 100644 --- a/library/alloc/tests/vec.rs +++ b/library/alloc/tests/vec.rs @@ -2497,3 +2497,68 @@ fn test_into_flattened_size_overflow() { let v = vec![[(); usize::MAX]; 2]; let _ = v.into_flattened(); } + +#[cfg(not(bootstrap))] +#[test] +fn test_box_zero_allocator() { + use core::{alloc::AllocError, cell::RefCell}; + use std::collections::HashSet; + + // Track ZST allocations and ensure that they all have a matching free. + struct ZstTracker { + state: RefCell<(HashSet, usize)>, + } + unsafe impl Allocator for ZstTracker { + fn allocate(&self, layout: Layout) -> Result, AllocError> { + let ptr = if layout.size() == 0 { + let mut state = self.state.borrow_mut(); + let addr = state.1; + assert!(state.0.insert(addr)); + state.1 += 1; + std::println!("allocating {addr}"); + std::ptr::invalid_mut(addr) + } else { + unsafe { std::alloc::alloc(layout) } + }; + Ok(NonNull::slice_from_raw_parts(NonNull::new(ptr).ok_or(AllocError)?, layout.size())) + } + + unsafe fn deallocate(&self, ptr: NonNull, layout: Layout) { + if layout.size() == 0 { + let addr = ptr.as_ptr() as usize; + let mut state = self.state.borrow_mut(); + std::println!("freeing {addr}"); + assert!(state.0.remove(&addr), "ZST free that wasn't allocated"); + } else { + unsafe { std::alloc::dealloc(ptr.as_ptr(), layout) } + } + } + } + + // Start the state at 100 to avoid returning null pointers. + let alloc = ZstTracker { state: RefCell::new((HashSet::new(), 100)) }; + + // Ensure that unsizing retains the same behavior. + { + let b1: Box<[u8; 0], &ZstTracker> = Box::new_in([], &alloc); + let b2: Box<[u8], &ZstTracker> = b1.clone(); + let _b3: Box<[u8], &ZstTracker> = b2.clone(); + } + + // Ensure that shrinking doesn't leak a ZST allocation. + { + let mut v1: Vec = Vec::with_capacity_in(100, &alloc); + v1.shrink_to_fit(); + } + + // Ensure that conversion to/from vec works. + { + let v1: Vec<(), &ZstTracker> = Vec::with_capacity_in(100, &alloc); + let _b1: Box<[()], &ZstTracker> = v1.into_boxed_slice(); + let b2: Box<[()], &ZstTracker> = Box::new_in([(), (), ()], &alloc); + let _v2: Vec<(), &ZstTracker> = b2.into(); + } + + // Ensure all ZSTs have been freed. + assert!(alloc.state.borrow().0.is_empty()); +} diff --git a/tests/ui/hygiene/panic-location.run.stderr b/tests/ui/hygiene/panic-location.run.stderr index a7252a40027..55923cc5f6f 100644 --- a/tests/ui/hygiene/panic-location.run.stderr +++ b/tests/ui/hygiene/panic-location.run.stderr @@ -1,2 +1,2 @@ -thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:524:5 +thread 'main' panicked at 'capacity overflow', library/alloc/src/raw_vec.rs:534:5 note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace