Fix issues from review and unsoundness of RawVec::into_box
This commit is contained in:
parent
56cbf2f22a
commit
2526accdd3
@ -4,7 +4,7 @@
|
||||
|
||||
use core::intrinsics::{self, min_align_of_val, size_of_val};
|
||||
use core::ptr::{NonNull, Unique};
|
||||
use core::usize;
|
||||
use core::{mem, usize};
|
||||
|
||||
#[stable(feature = "alloc_module", since = "1.28.0")]
|
||||
#[doc(inline)]
|
||||
@ -165,102 +165,96 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl AllocRef for Global {
|
||||
#[inline]
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let new_size = layout.size();
|
||||
if new_size == 0 {
|
||||
Ok((layout.dangling(), 0))
|
||||
} else {
|
||||
unsafe {
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
|
||||
unsafe {
|
||||
if layout.size() == 0 {
|
||||
Ok(MemoryBlock::new(layout.dangling(), layout))
|
||||
} else {
|
||||
let raw_ptr = match init {
|
||||
AllocInit::Uninitialized => alloc(layout),
|
||||
AllocInit::Zeroed => alloc_zeroed(layout),
|
||||
};
|
||||
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
|
||||
Ok((ptr, new_size))
|
||||
Ok(MemoryBlock::new(ptr, layout))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() != 0 {
|
||||
dealloc(ptr.as_ptr(), layout)
|
||||
unsafe fn dealloc(&mut self, memory: MemoryBlock) {
|
||||
if memory.size() != 0 {
|
||||
dealloc(memory.ptr().as_ptr(), memory.layout())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn grow(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
memory: &mut MemoryBlock,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
) -> Result<(), AllocErr> {
|
||||
let old_size = memory.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `layout.size()`"
|
||||
"`new_size` must be greater than or equal to `memory.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
if old_size == 0 {
|
||||
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be greater than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size > old_size);
|
||||
let ptr =
|
||||
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?;
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
init.initialize_offset(ptr, new_layout, old_size);
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => return Err(AllocErr),
|
||||
ReallocPlacement::MayMove if memory.size() == 0 => {
|
||||
*memory = self.alloc(new_layout, init)?
|
||||
}
|
||||
ReallocPlacement::MayMove => {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
intrinsics::assume(new_size > old_size);
|
||||
let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size);
|
||||
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
|
||||
memory.init_offset(init, old_size);
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn shrink(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
memory: &mut MemoryBlock,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
) -> Result<(), AllocErr> {
|
||||
let old_size = memory.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
"`new_size` must be smaller than or equal to `memory.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let ptr = if new_size == 0 {
|
||||
self.dealloc(ptr, layout);
|
||||
layout.dangling()
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be smaller than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size < old_size);
|
||||
NonNull::new(realloc(ptr.as_ptr(), layout, new_size)).ok_or(AllocErr)?
|
||||
};
|
||||
Ok((ptr, new_size))
|
||||
ReallocPlacement::InPlace => return Err(AllocErr),
|
||||
ReallocPlacement::MayMove if new_size == 0 => {
|
||||
let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout);
|
||||
let old_memory = mem::replace(memory, new_memory);
|
||||
self.dealloc(old_memory)
|
||||
}
|
||||
ReallocPlacement::MayMove => {
|
||||
// `realloc` probably checks for `new_size < old_size` or something similar.
|
||||
intrinsics::assume(new_size < old_size);
|
||||
let ptr = realloc(memory.ptr().as_ptr(), memory.layout(), new_size);
|
||||
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@ -272,7 +266,7 @@ unsafe impl AllocRef for Global {
|
||||
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
match Global.alloc(layout, AllocInit::Uninitialized) {
|
||||
Ok((ptr, _)) => ptr.as_ptr(),
|
||||
Ok(memory) => memory.ptr().as_ptr(),
|
||||
Err(_) => handle_alloc_error(layout),
|
||||
}
|
||||
}
|
||||
@ -288,7 +282,7 @@ pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
|
||||
let size = size_of_val(ptr.as_ref());
|
||||
let align = min_align_of_val(ptr.as_ref());
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
Global.dealloc(ptr.cast().into(), layout)
|
||||
Global.dealloc(MemoryBlock::new(ptr.cast().into(), layout))
|
||||
}
|
||||
|
||||
/// Abort on memory allocation error or failure.
|
||||
|
@ -8,17 +8,17 @@ use test::Bencher;
|
||||
fn allocate_zeroed() {
|
||||
unsafe {
|
||||
let layout = Layout::from_size_align(1024, 1).unwrap();
|
||||
let (ptr, _) = Global
|
||||
let memory = Global
|
||||
.alloc(layout.clone(), AllocInit::Zeroed)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
let mut i = ptr.cast::<u8>().as_ptr();
|
||||
let mut i = memory.ptr().cast::<u8>().as_ptr();
|
||||
let end = i.add(layout.size());
|
||||
while i < end {
|
||||
assert_eq!(*i, 0);
|
||||
i = i.offset(1);
|
||||
}
|
||||
Global.dealloc(ptr, layout);
|
||||
Global.dealloc(memory);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -143,7 +143,6 @@ use core::ops::{
|
||||
};
|
||||
use core::pin::Pin;
|
||||
use core::ptr::{self, NonNull, Unique};
|
||||
use core::slice;
|
||||
use core::task::{Context, Poll};
|
||||
|
||||
use crate::alloc::{self, AllocInit, AllocRef, Global};
|
||||
@ -199,7 +198,7 @@ impl<T> Box<T> {
|
||||
let ptr = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
|
||||
.0
|
||||
.ptr()
|
||||
.cast();
|
||||
unsafe { Box::from_raw(ptr.as_ptr()) }
|
||||
}
|
||||
@ -228,7 +227,7 @@ impl<T> Box<T> {
|
||||
let ptr = Global
|
||||
.alloc(layout, AllocInit::Zeroed)
|
||||
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
|
||||
.0
|
||||
.ptr()
|
||||
.cast();
|
||||
unsafe { Box::from_raw(ptr.as_ptr()) }
|
||||
}
|
||||
@ -265,13 +264,7 @@ impl<T> Box<[T]> {
|
||||
/// ```
|
||||
#[unstable(feature = "new_uninit", issue = "63291")]
|
||||
pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
|
||||
let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
|
||||
let ptr = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| alloc::handle_alloc_error(layout))
|
||||
.0
|
||||
.cast();
|
||||
unsafe { Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len)) }
|
||||
unsafe { RawVec::with_capacity(len).into_box(len) }
|
||||
}
|
||||
}
|
||||
|
||||
@ -776,7 +769,7 @@ impl<T: Copy> From<&[T]> for Box<[T]> {
|
||||
let buf = RawVec::with_capacity(len);
|
||||
unsafe {
|
||||
ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
|
||||
buf.into_box().assume_init()
|
||||
buf.into_box(slice.len()).assume_init()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,6 +31,7 @@
|
||||
// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
|
||||
// This implies that even an empty internal node has at least one edge.
|
||||
|
||||
use core::alloc::MemoryBlock;
|
||||
use core::cmp::Ordering;
|
||||
use core::marker::PhantomData;
|
||||
use core::mem::{self, MaybeUninit};
|
||||
@ -227,7 +228,10 @@ impl<K, V> Root<K, V> {
|
||||
}
|
||||
|
||||
unsafe {
|
||||
Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
|
||||
Global.dealloc(MemoryBlock::new(
|
||||
NonNull::from(top).cast(),
|
||||
Layout::new::<InternalNode<K, V>>(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -392,14 +396,14 @@ impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
|
||||
let height = self.height;
|
||||
let node = self.node;
|
||||
let ret = self.ascend().ok();
|
||||
Global.dealloc(
|
||||
Global.dealloc(MemoryBlock::new(
|
||||
node.cast(),
|
||||
if height > 0 {
|
||||
Layout::new::<InternalNode<K, V>>()
|
||||
} else {
|
||||
Layout::new::<LeafNode<K, V>>()
|
||||
},
|
||||
);
|
||||
));
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -1142,7 +1146,7 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
|
||||
|
||||
(*left_node.as_leaf_mut()).len += right_len as u16 + 1;
|
||||
|
||||
if self.node.height > 1 {
|
||||
let layout = if self.node.height > 1 {
|
||||
ptr::copy_nonoverlapping(
|
||||
right_node.cast_unchecked().as_internal().edges.as_ptr(),
|
||||
left_node
|
||||
@ -1159,10 +1163,11 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::
|
||||
.correct_parent_link();
|
||||
}
|
||||
|
||||
Global.dealloc(right_node.node.cast(), Layout::new::<InternalNode<K, V>>());
|
||||
Layout::new::<InternalNode<K, V>>()
|
||||
} else {
|
||||
Global.dealloc(right_node.node.cast(), Layout::new::<LeafNode<K, V>>());
|
||||
}
|
||||
Layout::new::<LeafNode<K, V>>()
|
||||
};
|
||||
Global.dealloc(MemoryBlock::new(right_node.node.cast(), layout));
|
||||
|
||||
Handle::new_edge(self.node, self.idx)
|
||||
}
|
||||
|
@ -100,6 +100,7 @@
|
||||
#![feature(lang_items)]
|
||||
#![feature(libc)]
|
||||
#![cfg_attr(not(bootstrap), feature(negative_impls))]
|
||||
#![feature(new_uninit)]
|
||||
#![feature(nll)]
|
||||
#![feature(optin_builtin_traits)]
|
||||
#![feature(pattern)]
|
||||
|
@ -1,6 +1,7 @@
|
||||
#![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")]
|
||||
#![doc(hidden)]
|
||||
|
||||
use core::alloc::MemoryBlock;
|
||||
use core::cmp;
|
||||
use core::mem::{self, MaybeUninit};
|
||||
use core::ops::Drop;
|
||||
@ -24,6 +25,9 @@ mod tests;
|
||||
/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
|
||||
/// In particular:
|
||||
///
|
||||
/// * Produces `Unique::empty()` on zero-sized types.
|
||||
/// * Produces `Unique::empty()` on zero-length allocations.
|
||||
/// * Avoids freeing `Unique::empty()`.
|
||||
/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
|
||||
/// * Guards against 32-bit systems allocating more than isize::MAX bytes.
|
||||
/// * Guards against overflowing your length.
|
||||
@ -44,38 +48,7 @@ mod tests;
|
||||
pub struct RawVec<T, A: AllocRef = Global> {
|
||||
ptr: Unique<T>,
|
||||
cap: usize,
|
||||
a: A,
|
||||
}
|
||||
|
||||
impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Like `new`, but parameterized over the choice of allocator for
|
||||
/// the returned `RawVec`.
|
||||
pub const fn new_in(a: A) -> Self {
|
||||
// `cap: 0` means "unallocated". zero-sized allocations are handled by `AllocRef`
|
||||
Self { ptr: Unique::empty(), cap: 0, a }
|
||||
}
|
||||
|
||||
/// Like `with_capacity`, but parameterized over the choice of
|
||||
/// allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
pub fn with_capacity_in(capacity: usize, a: A) -> Self {
|
||||
Self::allocate_in(capacity, Uninitialized, a)
|
||||
}
|
||||
|
||||
/// Like `with_capacity_zeroed`, but parameterized over the choice
|
||||
/// of allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
pub fn with_capacity_zeroed_in(capacity: usize, a: A) -> Self {
|
||||
Self::allocate_in(capacity, Zeroed, a)
|
||||
}
|
||||
|
||||
fn allocate_in(capacity: usize, init: AllocInit, mut a: A) -> Self {
|
||||
let layout = Layout::array::<T>(capacity).unwrap_or_else(|_| capacity_overflow());
|
||||
alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow());
|
||||
|
||||
let (ptr, excess) = a.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
Self { ptr: ptr.cast().into(), cap: Self::capacity_from_bytes(excess), a }
|
||||
}
|
||||
alloc: A,
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, Global> {
|
||||
@ -126,23 +99,7 @@ impl<T> RawVec<T, Global> {
|
||||
pub fn with_capacity_zeroed(capacity: usize) -> Self {
|
||||
Self::with_capacity_zeroed_in(capacity, Global)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
|
||||
///
|
||||
/// # Undefined Behavior
|
||||
///
|
||||
/// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`.
|
||||
/// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems).
|
||||
/// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
|
||||
#[inline]
|
||||
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
|
||||
Self { ptr: Unique::new_unchecked(ptr), cap: capacity, a }
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, Global> {
|
||||
/// Reconstitutes a `RawVec` from a pointer and capacity.
|
||||
///
|
||||
/// # Undefined Behavior
|
||||
@ -166,6 +123,55 @@ impl<T> RawVec<T, Global> {
|
||||
}
|
||||
|
||||
impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Like `new`, but parameterized over the choice of allocator for
|
||||
/// the returned `RawVec`.
|
||||
pub const fn new_in(alloc: A) -> Self {
|
||||
// `cap: 0` means "unallocated". zero-sized types are ignored.
|
||||
Self { ptr: Unique::empty(), cap: 0, alloc }
|
||||
}
|
||||
|
||||
/// Like `with_capacity`, but parameterized over the choice of
|
||||
/// allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
|
||||
Self::allocate_in(capacity, Uninitialized, alloc)
|
||||
}
|
||||
|
||||
/// Like `with_capacity_zeroed`, but parameterized over the choice
|
||||
/// of allocator for the returned `RawVec`.
|
||||
#[inline]
|
||||
pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
|
||||
Self::allocate_in(capacity, Zeroed, alloc)
|
||||
}
|
||||
|
||||
fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self {
|
||||
if mem::size_of::<T>() == 0 {
|
||||
Self::new_in(alloc)
|
||||
} else {
|
||||
let layout = Layout::array::<T>(capacity).unwrap_or_else(|_| capacity_overflow());
|
||||
alloc_guard(layout.size()).unwrap_or_else(|_| capacity_overflow());
|
||||
|
||||
let memory = alloc.alloc(layout, init).unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
Self {
|
||||
ptr: memory.ptr().cast().into(),
|
||||
cap: Self::capacity_from_bytes(memory.size()),
|
||||
alloc,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
|
||||
///
|
||||
/// # Undefined Behavior
|
||||
///
|
||||
/// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`.
|
||||
/// The `capacity` cannot exceed `isize::MAX` (only a concern on 32-bit systems).
|
||||
/// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
|
||||
#[inline]
|
||||
pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
|
||||
Self { ptr: Unique::new_unchecked(ptr), cap: capacity, alloc: a }
|
||||
}
|
||||
|
||||
/// Gets a raw pointer to the start of the allocation. Note that this is
|
||||
/// `Unique::empty()` if `capacity == 0` or `T` is zero-sized. In the former case, you must
|
||||
/// be careful.
|
||||
@ -183,16 +189,16 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
|
||||
/// Returns a shared reference to the allocator backing this `RawVec`.
|
||||
pub fn alloc(&self) -> &A {
|
||||
&self.a
|
||||
&self.alloc
|
||||
}
|
||||
|
||||
/// Returns a mutable reference to the allocator backing this `RawVec`.
|
||||
pub fn alloc_mut(&mut self) -> &mut A {
|
||||
&mut self.a
|
||||
&mut self.alloc
|
||||
}
|
||||
|
||||
fn current_layout(&self) -> Option<Layout> {
|
||||
if self.cap == 0 {
|
||||
fn current_memory(&self) -> Option<MemoryBlock> {
|
||||
if mem::size_of::<T>() == 0 || self.cap == 0 {
|
||||
None
|
||||
} else {
|
||||
// We have an allocated chunk of memory, so we can bypass runtime
|
||||
@ -200,7 +206,8 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
unsafe {
|
||||
let align = mem::align_of::<T>();
|
||||
let size = mem::size_of::<T>() * self.cap;
|
||||
Some(Layout::from_size_align_unchecked(size, align))
|
||||
let layout = Layout::from_size_align_unchecked(size, align);
|
||||
Some(MemoryBlock::new(self.ptr.cast().into(), layout))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -454,14 +461,19 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Returns if the buffer needs to grow to fulfill the needed extra capacity.
|
||||
/// Mainly used to make inlining reserve-calls possible without inlining `grow`.
|
||||
fn needs_to_grow(&self, used_capacity: usize, needed_extra_capacity: usize) -> bool {
|
||||
needed_extra_capacity > self.capacity().wrapping_sub(used_capacity)
|
||||
mem::size_of::<T>() != 0
|
||||
&& needed_extra_capacity > self.capacity().wrapping_sub(used_capacity)
|
||||
}
|
||||
|
||||
fn capacity_from_bytes(excess: usize) -> usize {
|
||||
match mem::size_of::<T>() {
|
||||
0 => usize::MAX,
|
||||
elem_size => excess / elem_size,
|
||||
}
|
||||
debug_assert_ne!(mem::size_of::<T>(), 0);
|
||||
excess / mem::size_of::<T>()
|
||||
}
|
||||
|
||||
fn set_memory(&mut self, memory: MemoryBlock) {
|
||||
self.ptr = memory.ptr().cast().into();
|
||||
self.cap = Self::capacity_from_bytes(memory.size());
|
||||
drop(memory);
|
||||
}
|
||||
|
||||
/// Single method to handle all possibilities of growing the buffer.
|
||||
@ -471,9 +483,9 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(), TryReserveError> {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
let new_layout = match strategy {
|
||||
let layout = match strategy {
|
||||
Double => unsafe {
|
||||
let elem_size = mem::size_of::<T>();
|
||||
if elem_size == 0 {
|
||||
// Since we return a capacity of `usize::MAX` when `elem_size` is
|
||||
// 0, getting to here necessarily means the `RawVec` is overfull.
|
||||
@ -511,24 +523,24 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
}
|
||||
};
|
||||
|
||||
let allocation = if let Some(old_layout) = self.current_layout() {
|
||||
debug_assert!(old_layout.align() == new_layout.align());
|
||||
let memory = if let Some(mut memory) = self.current_memory() {
|
||||
debug_assert_eq!(memory.align(), layout.align());
|
||||
unsafe {
|
||||
self.a.grow(self.ptr.cast().into(), old_layout, new_layout.size(), placement, init)
|
||||
}
|
||||
self.alloc
|
||||
.grow(&mut memory, layout.size(), placement, init)
|
||||
.map_err(|_| AllocError { layout, non_exhaustive: () })?
|
||||
};
|
||||
memory
|
||||
} else {
|
||||
match placement {
|
||||
MayMove => self.a.alloc(new_layout, init),
|
||||
MayMove => self.alloc.alloc(layout, init),
|
||||
InPlace => Err(AllocErr),
|
||||
}
|
||||
.map_err(|_| AllocError { layout, non_exhaustive: () })?
|
||||
};
|
||||
|
||||
allocation
|
||||
.map(|(ptr, excess)| {
|
||||
self.ptr = ptr.cast().into();
|
||||
self.cap = Self::capacity_from_bytes(excess);
|
||||
})
|
||||
.map_err(|_| TryReserveError::AllocError { layout: new_layout, non_exhaustive: () })
|
||||
self.set_memory(memory);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn shrink(
|
||||
@ -538,64 +550,52 @@ impl<T, A: AllocRef> RawVec<T, A> {
|
||||
) -> Result<(), TryReserveError> {
|
||||
assert!(amount <= self.cap, "Tried to shrink to a larger capacity");
|
||||
|
||||
let elem_size = mem::size_of::<T>();
|
||||
let old_layout =
|
||||
if let Some(layout) = self.current_layout() { layout } else { return Ok(()) };
|
||||
let old_ptr = self.ptr.cast().into();
|
||||
let new_size = amount * elem_size;
|
||||
let mut memory = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
|
||||
let new_size = amount * mem::size_of::<T>();
|
||||
|
||||
let allocation = unsafe {
|
||||
if amount == 0 && placement == MayMove {
|
||||
self.dealloc_buffer();
|
||||
Ok((old_layout.dangling(), 0))
|
||||
} else {
|
||||
self.a.shrink(old_ptr, old_layout, new_size, placement)
|
||||
}
|
||||
};
|
||||
unsafe {
|
||||
self.alloc.shrink(&mut memory, new_size, placement).map_err(|_| {
|
||||
TryReserveError::AllocError {
|
||||
layout: Layout::from_size_align_unchecked(new_size, memory.align()),
|
||||
non_exhaustive: (),
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
allocation
|
||||
.map(|(ptr, excess)| {
|
||||
self.ptr = ptr.cast().into();
|
||||
self.cap = Self::capacity_from_bytes(excess);
|
||||
})
|
||||
.map_err(|_| TryReserveError::AllocError {
|
||||
layout: unsafe { Layout::from_size_align_unchecked(new_size, old_layout.align()) },
|
||||
non_exhaustive: (),
|
||||
})
|
||||
self.set_memory(memory);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> RawVec<T, Global> {
|
||||
/// Converts the entire buffer into `Box<[T]>`.
|
||||
/// Converts the entire buffer into `Box<[T]>` with the specified `len`.
|
||||
///
|
||||
/// Note that this will correctly reconstitute any `cap` changes
|
||||
/// that may have been performed. (See description of type for details.)
|
||||
pub fn into_box(self) -> Box<[MaybeUninit<T>]> {
|
||||
unsafe {
|
||||
// NOTE: not calling `capacity()` here; actually using the real `cap` field!
|
||||
let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit<T>, self.cap);
|
||||
let output = Box::from_raw(slice);
|
||||
mem::forget(self);
|
||||
output
|
||||
}
|
||||
}
|
||||
}
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `len` must be smaller than or equal to `self.capacity()`
|
||||
pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>]> {
|
||||
debug_assert!(
|
||||
len <= self.capacity(),
|
||||
"`len` must be smaller than or equal to `self.capacity()`"
|
||||
);
|
||||
|
||||
impl<T, A: AllocRef> RawVec<T, A> {
|
||||
/// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
|
||||
pub unsafe fn dealloc_buffer(&mut self) {
|
||||
if let Some(layout) = self.current_layout() {
|
||||
self.a.dealloc(self.ptr.cast().into(), layout);
|
||||
self.ptr = Unique::empty();
|
||||
self.cap = 0;
|
||||
}
|
||||
// NOTE: not calling `capacity()` here; actually using the real `cap` field!
|
||||
let slice = slice::from_raw_parts_mut(self.ptr() as *mut MaybeUninit<T>, len);
|
||||
let output = Box::from_raw(slice);
|
||||
mem::forget(self);
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec<T, A> {
|
||||
/// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
|
||||
fn drop(&mut self) {
|
||||
unsafe { self.dealloc_buffer() }
|
||||
if let Some(memory) = self.current_memory() {
|
||||
unsafe { self.alloc.dealloc(memory) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
use super::*;
|
||||
use core::ptr::NonNull;
|
||||
|
||||
#[test]
|
||||
fn allocator_param() {
|
||||
@ -13,6 +12,7 @@ fn allocator_param() {
|
||||
//
|
||||
// Instead, this just checks that the `RawVec` methods do at
|
||||
// least go through the Allocator API when it reserves
|
||||
|
||||
// storage.
|
||||
|
||||
// A dumb allocator that consumes a fixed amount of fuel
|
||||
@ -21,11 +21,7 @@ fn allocator_param() {
|
||||
fuel: usize,
|
||||
}
|
||||
unsafe impl AllocRef for BoundedAlloc {
|
||||
fn alloc(
|
||||
&mut self,
|
||||
layout: Layout,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
|
||||
let size = layout.size();
|
||||
if size > self.fuel {
|
||||
return Err(AllocErr);
|
||||
@ -38,16 +34,16 @@ fn allocator_param() {
|
||||
err @ Err(_) => err,
|
||||
}
|
||||
}
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
|
||||
Global.dealloc(ptr, layout)
|
||||
unsafe fn dealloc(&mut self, memory: MemoryBlock) {
|
||||
Global.dealloc(memory)
|
||||
}
|
||||
}
|
||||
|
||||
let a = BoundedAlloc { fuel: 500 };
|
||||
let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a);
|
||||
assert_eq!(v.a.fuel, 450);
|
||||
assert_eq!(v.alloc.fuel, 450);
|
||||
v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel)
|
||||
assert_eq!(v.a.fuel, 250);
|
||||
assert_eq!(v.alloc.fuel, 250);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -234,6 +234,7 @@ use crate::boxed::Box;
|
||||
#[cfg(test)]
|
||||
use std::boxed::Box;
|
||||
|
||||
use core::alloc::MemoryBlock;
|
||||
use core::any::Any;
|
||||
use core::array::LengthAtMost32;
|
||||
use core::borrow;
|
||||
@ -936,12 +937,12 @@ impl<T: ?Sized> Rc<T> {
|
||||
let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
|
||||
|
||||
// Allocate for the layout.
|
||||
let (mem, _) = Global
|
||||
let mem = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
// Initialize the RcBox
|
||||
let inner = mem_to_rcbox(mem.as_ptr());
|
||||
let inner = mem_to_rcbox(mem.ptr().as_ptr());
|
||||
debug_assert_eq!(Layout::for_value(&*inner), layout);
|
||||
|
||||
ptr::write(&mut (*inner).strong, Cell::new(1));
|
||||
@ -1031,7 +1032,7 @@ impl<T> Rc<[T]> {
|
||||
let slice = from_raw_parts_mut(self.elems, self.n_elems);
|
||||
ptr::drop_in_place(slice);
|
||||
|
||||
Global.dealloc(self.mem, self.layout);
|
||||
Global.dealloc(MemoryBlock::new(self.mem, self.layout));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1131,7 +1132,10 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
|
||||
self.dec_weak();
|
||||
|
||||
if self.weak() == 0 {
|
||||
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
|
||||
Global.dealloc(MemoryBlock::new(
|
||||
self.ptr.cast(),
|
||||
Layout::for_value(self.ptr.as_ref()),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1939,7 +1943,10 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
// the strong pointers have disappeared.
|
||||
if inner.weak() == 0 {
|
||||
unsafe {
|
||||
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
|
||||
Global.dealloc(MemoryBlock::new(
|
||||
self.ptr.cast(),
|
||||
Layout::for_value(self.ptr.as_ref()),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,6 +6,7 @@
|
||||
//!
|
||||
//! [arc]: struct.Arc.html
|
||||
|
||||
use core::alloc::MemoryBlock;
|
||||
use core::any::Any;
|
||||
use core::array::LengthAtMost32;
|
||||
use core::borrow;
|
||||
@ -770,7 +771,7 @@ impl<T: ?Sized> Arc<T> {
|
||||
|
||||
if self.inner().weak.fetch_sub(1, Release) == 1 {
|
||||
acquire!(self.inner().weak);
|
||||
Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()))
|
||||
Global.dealloc(MemoryBlock::new(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())))
|
||||
}
|
||||
}
|
||||
|
||||
@ -814,12 +815,12 @@ impl<T: ?Sized> Arc<T> {
|
||||
// reference (see #54908).
|
||||
let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
|
||||
|
||||
let (mem, _) = Global
|
||||
let mem = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
// Initialize the ArcInner
|
||||
let inner = mem_to_arcinner(mem.as_ptr());
|
||||
let inner = mem_to_arcinner(mem.ptr().as_ptr());
|
||||
debug_assert_eq!(Layout::for_value(&*inner), layout);
|
||||
|
||||
ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
|
||||
@ -909,7 +910,7 @@ impl<T> Arc<[T]> {
|
||||
let slice = from_raw_parts_mut(self.elems, self.n_elems);
|
||||
ptr::drop_in_place(slice);
|
||||
|
||||
Global.dealloc(self.mem.cast(), self.layout);
|
||||
Global.dealloc(MemoryBlock::new(self.mem.cast(), self.layout));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1734,7 +1735,12 @@ impl<T: ?Sized> Drop for Weak<T> {
|
||||
|
||||
if inner.weak.fetch_sub(1, Release) == 1 {
|
||||
acquire!(inner.weak);
|
||||
unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
|
||||
unsafe {
|
||||
Global.dealloc(MemoryBlock::new(
|
||||
self.ptr.cast(),
|
||||
Layout::for_value(self.ptr.as_ref()),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::alloc::{AllocInit, AllocRef, Global, Layout, System};
|
||||
use std::alloc::{AllocInit, AllocRef, Global, Layout, MemoryBlock, System};
|
||||
|
||||
/// Issue #45955 and #62251.
|
||||
#[test]
|
||||
@ -26,7 +26,7 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
|
||||
AllocInit::Uninitialized,
|
||||
)
|
||||
.unwrap()
|
||||
.0
|
||||
.ptr()
|
||||
})
|
||||
.collect();
|
||||
for &ptr in &pointers {
|
||||
@ -39,7 +39,10 @@ fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
|
||||
|
||||
// Clean up
|
||||
for &ptr in &pointers {
|
||||
allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap())
|
||||
allocator.dealloc(MemoryBlock::new(
|
||||
ptr,
|
||||
Layout::from_size_align(size, align).unwrap(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -678,8 +678,9 @@ impl<T> Vec<T> {
|
||||
unsafe {
|
||||
self.shrink_to_fit();
|
||||
let buf = ptr::read(&self.buf);
|
||||
let len = self.len();
|
||||
mem::forget(self);
|
||||
buf.into_box().assume_init()
|
||||
buf.into_box(len).assume_init()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,8 @@ pub use self::global::GlobalAlloc;
|
||||
pub use self::layout::{Layout, LayoutErr};
|
||||
|
||||
use crate::fmt;
|
||||
use crate::ptr::{self, NonNull};
|
||||
use crate::mem;
|
||||
use crate::ptr::{self, NonNull, Unique};
|
||||
|
||||
/// The `AllocErr` error indicates an allocation failure
|
||||
/// that may be due to resource exhaustion or to
|
||||
@ -41,49 +42,91 @@ pub enum AllocInit {
|
||||
Zeroed,
|
||||
}
|
||||
|
||||
impl AllocInit {
|
||||
/// Initialize the memory block referenced by `ptr` and specified by `Layout`.
|
||||
///
|
||||
/// This behaves like calling [`AllocInit::initialize_offset(ptr, layout, 0)`][off].
|
||||
///
|
||||
/// [off]: AllocInit::initialize_offset
|
||||
/// Represents a block of allocated memory returned by an allocator.
|
||||
#[derive(Debug)]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
#[must_use = "`MemoryBlock` should be passed to `AllocRef::dealloc`"]
|
||||
pub struct MemoryBlock {
|
||||
ptr: Unique<u8>,
|
||||
layout: Layout,
|
||||
}
|
||||
|
||||
impl MemoryBlock {
|
||||
/// Creates a new `MemoryBlock`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `layout` must [*fit*] the block of memory referenced by `ptr`
|
||||
/// * The block must be allocated with the same alignment as [`layout.align()`], and
|
||||
/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where:
|
||||
/// - `min` is the size requested size when allocating the block, and
|
||||
/// - `max` is the size of the memory block.
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const unsafe fn new(ptr: NonNull<u8>, layout: Layout) -> Self {
|
||||
Self { ptr: Unique::new_unchecked(ptr.as_ptr()), layout }
|
||||
}
|
||||
|
||||
/// Acquires the underlying `NonNull<u8>` pointer.
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const fn ptr(&self) -> NonNull<u8> {
|
||||
// SAFETY: Unique<T> is always non-null
|
||||
unsafe { NonNull::new_unchecked(self.ptr.as_ptr()) }
|
||||
}
|
||||
|
||||
/// Returns the layout describing the memory block.
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const fn layout(&self) -> Layout {
|
||||
self.layout
|
||||
}
|
||||
|
||||
/// Returns the size of the memory block.
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const fn size(&self) -> usize {
|
||||
self.layout().size()
|
||||
}
|
||||
|
||||
/// Returns the minimum alignment of the memory block.
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub const fn align(&self) -> usize {
|
||||
self.layout().align()
|
||||
}
|
||||
|
||||
/// Initialize the memory block like specified by `init`.
|
||||
///
|
||||
/// This behaves like calling [`MemoryBlock::initialize_offset(ptr, layout, 0)`][off].
|
||||
///
|
||||
/// [off]: MemoryBlock::init_offset
|
||||
///
|
||||
/// [*fit*]: trait.AllocRef.html#memory-fitting
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe fn initialize(self, ptr: NonNull<u8>, layout: Layout) {
|
||||
self.initialize_offset(ptr, layout, 0)
|
||||
pub fn init(&mut self, init: AllocInit) {
|
||||
// SAFETY: 0 is always smaller or equal to the size
|
||||
unsafe { self.init_offset(init, 0) }
|
||||
}
|
||||
|
||||
/// Initialize the memory block referenced by `ptr` and specified by `Layout` at the specified
|
||||
/// `offset`.
|
||||
/// Initialize the memory block like specified by `init` at the specified `offset`.
|
||||
///
|
||||
/// This is a no-op for [`AllocInit::Uninitialized`] and writes zeroes for [`AllocInit::Zeroed`]
|
||||
/// at `ptr + offset` until `ptr + layout.size()`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `layout` must [*fit*] the block of memory referenced by `ptr`
|
||||
///
|
||||
/// * `offset` must be smaller than or equal to `layout.size()`
|
||||
/// * `offset` must be smaller than or equal to `size()`
|
||||
///
|
||||
/// [*fit*]: trait.AllocRef.html#memory-fitting
|
||||
#[inline]
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe fn initialize_offset(self, ptr: NonNull<u8>, layout: Layout, offset: usize) {
|
||||
debug_assert!(
|
||||
offset <= layout.size(),
|
||||
"`offset` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
match self {
|
||||
pub unsafe fn init_offset(&mut self, init: AllocInit, offset: usize) {
|
||||
debug_assert!(offset <= self.size(), "`offset` must be smaller than or equal to `size()`");
|
||||
match init {
|
||||
AllocInit::Uninitialized => (),
|
||||
AllocInit::Zeroed => {
|
||||
let new_ptr = ptr.as_ptr().add(offset);
|
||||
let size = layout.size() - offset;
|
||||
ptr::write_bytes(new_ptr, 0, size);
|
||||
self.ptr().as_ptr().add(offset).write_bytes(0, self.size() - offset)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -116,70 +159,23 @@ pub enum ReallocPlacement {
|
||||
///
|
||||
/// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying
|
||||
/// allocator does not support this (like jemalloc) or return a null pointer (such as
|
||||
/// `libc::malloc`), this case must be caught. [`Layout::dangling()`][] then can be used to create
|
||||
/// an aligned `NonNull<u8>`.
|
||||
///
|
||||
/// ### Currently allocated memory
|
||||
///
|
||||
/// Some of the methods require that a memory block be *currently allocated* via an allocator. This
|
||||
/// means that:
|
||||
///
|
||||
/// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or
|
||||
/// [`shrink`], and
|
||||
///
|
||||
/// * the memory block has not been subsequently deallocated, where blocks are either deallocated
|
||||
/// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or
|
||||
/// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer
|
||||
/// remains valid.
|
||||
///
|
||||
/// [`alloc`]: AllocRef::alloc
|
||||
/// [`grow`]: AllocRef::grow
|
||||
/// [`shrink`]: AllocRef::shrink
|
||||
/// [`dealloc`]: AllocRef::dealloc
|
||||
///
|
||||
/// ### Memory fitting
|
||||
///
|
||||
/// Some of the methods require that a layout *fit* a memory block. What it means for a layout to
|
||||
/// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the
|
||||
/// following conditions must hold:
|
||||
///
|
||||
/// * The block must be allocated with the same alignment as [`layout.align()`], and
|
||||
///
|
||||
/// * The provided [`layout.size()`] must fall in the range `min ..= max`, where:
|
||||
/// - `min` is the size of the layout most recently used to allocate the block, and
|
||||
/// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`].
|
||||
///
|
||||
/// [`layout.align()`]: Layout::align
|
||||
/// [`layout.size()`]: Layout::size
|
||||
///
|
||||
/// ### Notes
|
||||
///
|
||||
/// * if a layout `k` fits a memory block (denoted by `ptr`) currently allocated via an allocator
|
||||
/// `a`, then it is legal to use that layout to deallocate it, i.e.,
|
||||
/// [`a.dealloc(ptr, k);`][`dealloc`], and
|
||||
///
|
||||
/// * if an allocator does not support overallocating, it is fine to simply return
|
||||
/// [`layout.size()`] as the actual size.
|
||||
/// `libc::malloc`), this case must be caught.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * Pointers returned from an allocator must point to valid memory and retain their validity until
|
||||
/// the instance and all of its clones are dropped,
|
||||
/// * Memory blocks returned from an allocator must point to valid memory and retain their validity
|
||||
/// until the instance and all of its clones are dropped, and
|
||||
///
|
||||
/// * cloning or moving the allocator must not invalidate pointers returned from this allocator.
|
||||
/// A cloned allocator must behave like the same allocator, and
|
||||
///
|
||||
/// * any pointer to a memory block which is [*currently allocated*] may be passed to any other
|
||||
/// method of the allocator.
|
||||
/// * cloning or moving the allocator must not invalidate memory blocks returned from this
|
||||
/// allocator. A cloned allocator must behave like the same allocator.
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
pub unsafe trait AllocRef {
|
||||
/// On success, returns a pointer meeting the size and alignment guarantees of `layout` and the
|
||||
/// actual size of the allocated block, which is greater than or equal to `layout.size()`.
|
||||
/// On success, returns a memory block meeting the size and alignment guarantees of `layout`.
|
||||
///
|
||||
/// The returned block of storage is initialized as specified by [`init`], all the way up to
|
||||
/// the returned `actual_size`.
|
||||
/// The returned block may have a larger size than specified by `layout.size()` and is
|
||||
/// initialized as specified by [`init`], all the way up to the returned size of the block.
|
||||
///
|
||||
/// [`init`]: AllocInit
|
||||
///
|
||||
@ -196,58 +192,32 @@ pub unsafe trait AllocRef {
|
||||
/// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar.
|
||||
///
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr>;
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr>;
|
||||
|
||||
/// Deallocates the memory referenced by `ptr`.
|
||||
/// Deallocates the memory denoted by `memory`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `ptr` must denote a block of memory [*currently allocated*] via this allocator,
|
||||
///
|
||||
/// * `layout` must [*fit*] that block of memory, and
|
||||
///
|
||||
/// * the alignment of the `layout` must match the alignment used to allocate that block of
|
||||
/// memory.
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
/// [*fit*]: #memory-fitting
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout);
|
||||
/// `memory` must be a memory block returned by this allocator.
|
||||
unsafe fn dealloc(&mut self, memory: MemoryBlock);
|
||||
|
||||
/// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
|
||||
///
|
||||
/// Returns a pointer and the actual size of the allocated block. The pointer is suitable for
|
||||
/// holding data described by a new layout with `layout`’s alignment and a size given by
|
||||
/// `new_size`. To accomplish this, the allocator may extend the allocation referenced by `ptr`
|
||||
/// to fit the new layout.
|
||||
///
|
||||
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
|
||||
/// transferred to this allocator. The memory may or may not have been freed, and should be
|
||||
/// considered unusable (unless of course it was transferred back to the caller again via the
|
||||
/// return value of this method).
|
||||
///
|
||||
/// If this method returns `Err`, then ownership of the memory block has not been transferred to
|
||||
/// this allocator, and the contents of the memory block are unaltered.
|
||||
/// Attempts to extend the memory block.
|
||||
///
|
||||
/// The behavior of how the allocator tries to grow the memory is specified by [`placement`].
|
||||
/// The first `layout.size()` bytes of memory are preserved or copied as appropriate from `ptr`,
|
||||
/// and the remaining bytes, from `layout.size()` to the returned actual size, are initialized
|
||||
/// according to [`init`].
|
||||
/// The first `memory.size()` bytes are preserved or copied as appropriate from `ptr`, and the
|
||||
/// remaining bytes up to the new `memory.size()` are initialized according to [`init`].
|
||||
///
|
||||
/// [`placement`]: ReallocPlacement
|
||||
/// [`init`]: AllocInit
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `ptr` must be [*currently allocated*] via this allocator,
|
||||
///
|
||||
/// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.)
|
||||
///
|
||||
// We can't require that `new_size` is strictly greater than `layout.size()` because of ZSTs.
|
||||
/// * `memory` must be a memory block returned by this allocator.
|
||||
// We can't require that `new_size` is strictly greater than `memory.size()` because of ZSTs.
|
||||
// An alternative would be
|
||||
// * `new_size must be strictly greater than `layout.size()` or both are zero
|
||||
/// * `new_size` must be greater than or equal to `layout.size()`
|
||||
///
|
||||
/// * `new_size`, when rounded up to the nearest multiple of `layout.align()`, must not overflow
|
||||
// * `new_size must be strictly greater than `memory.size()` or both are zero
|
||||
/// * `new_size` must be greater than or equal to `memory.size()`
|
||||
/// * `new_size`, when rounded up to the nearest multiple of `memory.align()`, must not overflow
|
||||
/// (i.e., the rounded value must be less than `usize::MAX`).
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
@ -268,64 +238,50 @@ pub unsafe trait AllocRef {
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn grow(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
memory: &mut MemoryBlock,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if new_size == old_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
) -> Result<(), AllocErr> {
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let (new_ptr, alloc_size) =
|
||||
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)?;
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), old_size);
|
||||
self.dealloc(ptr, layout);
|
||||
Ok((new_ptr, alloc_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
ReallocPlacement::MayMove => {
|
||||
let old_size = memory.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `memory.size()`"
|
||||
);
|
||||
|
||||
if new_size == old_size {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
|
||||
let new_memory = self.alloc(new_layout, init)?;
|
||||
ptr::copy_nonoverlapping(
|
||||
memory.ptr().as_ptr(),
|
||||
new_memory.ptr().as_ptr(),
|
||||
old_size,
|
||||
);
|
||||
self.dealloc(mem::replace(memory, new_memory));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
|
||||
/// Attempts to shrink the memory block.
|
||||
///
|
||||
/// Returns a pointer and the actual size of the allocated block. The pointer is suitable for
|
||||
/// holding data described by a new layout with `layout`’s alignment and a size given by
|
||||
/// `new_size`. To accomplish this, the allocator may shrink the allocation referenced by `ptr`
|
||||
/// to fit the new layout.
|
||||
///
|
||||
/// The behavior on how the allocator tries to shrink the memory can be specified by
|
||||
/// [`placement`].
|
||||
///
|
||||
/// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been
|
||||
/// transferred to this allocator. The memory may or may not have been freed, and should be
|
||||
/// considered unusable unless it was transferred back to the caller again via the
|
||||
/// return value of this method.
|
||||
///
|
||||
/// If this method returns `Err`, then ownership of the memory block has not been transferred to
|
||||
/// this allocator, and the contents of the memory block are unaltered.
|
||||
/// The behavior of how the allocator tries to shrink the memory is specified by [`placement`].
|
||||
///
|
||||
/// [`placement`]: ReallocPlacement
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// * `ptr` must be [*currently allocated*] via this allocator,
|
||||
///
|
||||
/// * `layout` must [*fit*] the `ptr`. (The `new_size` argument need not fit it.)
|
||||
///
|
||||
// We can't require that `new_size` is strictly smaller than `layout.size()` because of ZSTs.
|
||||
/// * `memory` must be a memory block returned by this allocator.
|
||||
// We can't require that `new_size` is strictly smaller than `memory.size()` because of ZSTs.
|
||||
// An alternative would be
|
||||
// * `new_size must be strictly smaller than `layout.size()` or both are zero
|
||||
/// * `new_size` must be smaller than or equal to `layout.size()`
|
||||
// * `new_size must be strictly smaller than `memory.size()` or both are zero
|
||||
/// * `new_size` must be smaller than or equal to `memory.size()`
|
||||
///
|
||||
/// [*currently allocated*]: #currently-allocated-memory
|
||||
/// [*fit*]: #memory-fitting
|
||||
@ -333,7 +289,7 @@ pub unsafe trait AllocRef {
|
||||
/// # Errors
|
||||
///
|
||||
/// Returns `Err` if the new layout does not meet the allocator's size and alignment
|
||||
/// constraints of the allocator, or if shrinking otherwise fails.
|
||||
/// constraints of the allocator, or if growing otherwise fails.
|
||||
///
|
||||
/// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or
|
||||
/// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement
|
||||
@ -345,32 +301,33 @@ pub unsafe trait AllocRef {
|
||||
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
|
||||
unsafe fn shrink(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
memory: &mut MemoryBlock,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if new_size == old_size {
|
||||
return Ok((ptr, new_size));
|
||||
}
|
||||
|
||||
) -> Result<(), AllocErr> {
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let (new_ptr, alloc_size) = self.alloc(
|
||||
Layout::from_size_align_unchecked(new_size, layout.align()),
|
||||
AllocInit::Uninitialized,
|
||||
)?;
|
||||
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), new_size);
|
||||
self.dealloc(ptr, layout);
|
||||
Ok((new_ptr, alloc_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
ReallocPlacement::MayMove => {
|
||||
let old_size = memory.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
);
|
||||
|
||||
if new_size == old_size {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
|
||||
let new_memory = self.alloc(new_layout, AllocInit::Uninitialized)?;
|
||||
ptr::copy_nonoverlapping(
|
||||
memory.ptr().as_ptr(),
|
||||
new_memory.ptr().as_ptr(),
|
||||
new_size,
|
||||
);
|
||||
self.dealloc(mem::replace(memory, new_memory));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -137,104 +137,98 @@ pub struct System;
|
||||
#[unstable(feature = "allocator_api", issue = "32838")]
|
||||
unsafe impl AllocRef for System {
|
||||
#[inline]
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let new_size = layout.size();
|
||||
if new_size == 0 {
|
||||
Ok((layout.dangling(), 0))
|
||||
} else {
|
||||
unsafe {
|
||||
fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
|
||||
unsafe {
|
||||
if layout.size() == 0 {
|
||||
Ok(MemoryBlock::new(layout.dangling(), layout))
|
||||
} else {
|
||||
let raw_ptr = match init {
|
||||
AllocInit::Uninitialized => GlobalAlloc::alloc(self, layout),
|
||||
AllocInit::Zeroed => GlobalAlloc::alloc_zeroed(self, layout),
|
||||
};
|
||||
let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
|
||||
Ok((ptr, new_size))
|
||||
Ok(MemoryBlock::new(ptr, layout))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
|
||||
if layout.size() != 0 {
|
||||
GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
|
||||
unsafe fn dealloc(&mut self, memory: MemoryBlock) {
|
||||
if memory.size() != 0 {
|
||||
GlobalAlloc::dealloc(self, memory.ptr().as_ptr(), memory.layout())
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn grow(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
memory: &mut MemoryBlock,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
init: AllocInit,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
) -> Result<(), AllocErr> {
|
||||
let old_size = memory.size();
|
||||
debug_assert!(
|
||||
new_size >= old_size,
|
||||
"`new_size` must be greater than or equal to `layout.size()`"
|
||||
"`new_size` must be greater than or equal to `memory.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
if old_size == 0 {
|
||||
self.alloc(Layout::from_size_align_unchecked(new_size, layout.align()), init)
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be greater than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size > old_size);
|
||||
let ptr =
|
||||
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)?;
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
|
||||
init.initialize_offset(ptr, new_layout, old_size);
|
||||
Ok((ptr, new_size))
|
||||
}
|
||||
ReallocPlacement::InPlace => return Err(AllocErr),
|
||||
ReallocPlacement::MayMove if memory.size() == 0 => {
|
||||
*memory = self.alloc(new_layout, init)?
|
||||
}
|
||||
ReallocPlacement::MayMove => {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
intrinsics::assume(new_size > old_size);
|
||||
let ptr =
|
||||
GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size);
|
||||
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
|
||||
memory.init_offset(init, old_size);
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn shrink(
|
||||
&mut self,
|
||||
ptr: NonNull<u8>,
|
||||
layout: Layout,
|
||||
memory: &mut MemoryBlock,
|
||||
new_size: usize,
|
||||
placement: ReallocPlacement,
|
||||
) -> Result<(NonNull<u8>, usize), AllocErr> {
|
||||
let old_size = layout.size();
|
||||
) -> Result<(), AllocErr> {
|
||||
let old_size = memory.size();
|
||||
debug_assert!(
|
||||
new_size <= old_size,
|
||||
"`new_size` must be smaller than or equal to `layout.size()`"
|
||||
"`new_size` must be smaller than or equal to `memory.size()`"
|
||||
);
|
||||
|
||||
if old_size == new_size {
|
||||
return Ok((ptr, new_size));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let new_layout = Layout::from_size_align_unchecked(new_size, memory.align());
|
||||
match placement {
|
||||
ReallocPlacement::MayMove => {
|
||||
let ptr = if new_size == 0 {
|
||||
self.dealloc(ptr, layout);
|
||||
layout.dangling()
|
||||
} else {
|
||||
// `realloc` probably checks for `new_size > old_size` or something similar.
|
||||
// `new_size` must be smaller than or equal to `old_size` due to the safety constraint,
|
||||
// and `new_size` == `old_size` was caught before
|
||||
intrinsics::assume(new_size < old_size);
|
||||
NonNull::new(GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size))
|
||||
.ok_or(AllocErr)?
|
||||
};
|
||||
Ok((ptr, new_size))
|
||||
ReallocPlacement::InPlace => return Err(AllocErr),
|
||||
ReallocPlacement::MayMove if new_size == 0 => {
|
||||
let new_memory = MemoryBlock::new(new_layout.dangling(), new_layout);
|
||||
let old_memory = mem::replace(memory, new_memory);
|
||||
self.dealloc(old_memory)
|
||||
}
|
||||
ReallocPlacement::MayMove => {
|
||||
// `realloc` probably checks for `new_size < old_size` or something similar.
|
||||
intrinsics::assume(new_size < old_size);
|
||||
let ptr =
|
||||
GlobalAlloc::realloc(self, memory.ptr().as_ptr(), memory.layout(), new_size);
|
||||
*memory = MemoryBlock::new(NonNull::new(ptr).ok_or(AllocErr)?, new_layout);
|
||||
}
|
||||
ReallocPlacement::InPlace => Err(AllocErr),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -37,10 +37,10 @@ fn main() {
|
||||
unsafe {
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
helper::work_with(&ptr);
|
||||
let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
helper::work_with(&memory.ptr());
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
Global.dealloc(memory);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 2);
|
||||
|
||||
let s = String::with_capacity(10);
|
||||
@ -49,10 +49,10 @@ fn main() {
|
||||
drop(s);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
|
||||
let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
helper::work_with(&ptr);
|
||||
System.dealloc(ptr, layout);
|
||||
helper::work_with(&memory.ptr());
|
||||
System.dealloc(memory);
|
||||
assert_eq!(HITS.load(Ordering::SeqCst), n + 4);
|
||||
}
|
||||
}
|
||||
|
@ -20,16 +20,16 @@ fn main() {
|
||||
let n = GLOBAL.0.load(Ordering::SeqCst);
|
||||
let layout = Layout::from_size_align(4, 2).unwrap();
|
||||
|
||||
let (ptr, _) = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
helper::work_with(&ptr);
|
||||
let memory = Global.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
helper::work_with(&memory.ptr());
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 1);
|
||||
Global.dealloc(ptr, layout.clone());
|
||||
Global.dealloc(memory);
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
|
||||
let (ptr, _) = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
let memory = System.alloc(layout.clone(), AllocInit::Uninitialized).unwrap();
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
helper::work_with(&ptr);
|
||||
System.dealloc(ptr, layout);
|
||||
helper::work_with(&memory.ptr());
|
||||
System.dealloc(memory);
|
||||
assert_eq!(GLOBAL.0.load(Ordering::SeqCst), n + 2);
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,9 @@
|
||||
|
||||
#![feature(allocator_api)]
|
||||
|
||||
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, ReallocPlacement};
|
||||
use std::alloc::{
|
||||
handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock, ReallocPlacement,
|
||||
};
|
||||
use std::ptr::{self, NonNull};
|
||||
|
||||
fn main() {
|
||||
@ -41,15 +43,15 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("allocate({:?})", layout);
|
||||
}
|
||||
|
||||
let (ptr, _) = Global
|
||||
let memory = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
|
||||
if PRINT {
|
||||
println!("allocate({:?}) = {:?}", layout, ptr);
|
||||
println!("allocate({:?}) = {:?}", layout, memory.ptr());
|
||||
}
|
||||
|
||||
ptr.cast().as_ptr()
|
||||
memory.ptr().cast().as_ptr()
|
||||
}
|
||||
|
||||
unsafe fn deallocate(ptr: *mut u8, layout: Layout) {
|
||||
@ -57,7 +59,7 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("deallocate({:?}, {:?}", ptr, layout);
|
||||
}
|
||||
|
||||
Global.dealloc(NonNull::new_unchecked(ptr), layout);
|
||||
Global.dealloc(MemoryBlock::new(NonNull::new_unchecked(ptr), layout));
|
||||
}
|
||||
|
||||
unsafe fn reallocate(ptr: *mut u8, old: Layout, new: Layout) -> *mut u8 {
|
||||
@ -65,28 +67,28 @@ unsafe fn test_triangle() -> bool {
|
||||
println!("reallocate({:?}, old={:?}, new={:?})", ptr, old, new);
|
||||
}
|
||||
|
||||
let allocation = if new.size() > old.size() {
|
||||
let mut memory = MemoryBlock::new(NonNull::new_unchecked(ptr), old);
|
||||
let result = if new.size() > old.size() {
|
||||
Global.grow(
|
||||
NonNull::new_unchecked(ptr),
|
||||
old,
|
||||
&mut memory,
|
||||
new.size(),
|
||||
ReallocPlacement::MayMove,
|
||||
AllocInit::Uninitialized,
|
||||
)
|
||||
} else if new.size() < old.size() {
|
||||
Global.shrink(NonNull::new_unchecked(ptr), old, new.size(), ReallocPlacement::MayMove)
|
||||
Global.shrink(&mut memory, new.size(), ReallocPlacement::MayMove)
|
||||
} else {
|
||||
return ptr;
|
||||
};
|
||||
|
||||
let (ptr, _) = allocation.unwrap_or_else(|_| {
|
||||
result.unwrap_or_else(|_| {
|
||||
handle_alloc_error(Layout::from_size_align_unchecked(new.size(), old.align()))
|
||||
});
|
||||
|
||||
if PRINT {
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, ptr);
|
||||
println!("reallocate({:?}, old={:?}, new={:?}) = {:?}", ptr, old, new, memory.ptr());
|
||||
}
|
||||
ptr.cast().as_ptr()
|
||||
memory.ptr().cast().as_ptr()
|
||||
}
|
||||
|
||||
fn idx_to_size(i: usize) -> usize {
|
||||
|
@ -4,7 +4,7 @@
|
||||
// pretty-expanded FIXME #23616
|
||||
#![feature(allocator_api)]
|
||||
|
||||
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout};
|
||||
use std::alloc::{handle_alloc_error, AllocInit, AllocRef, Global, Layout, MemoryBlock};
|
||||
use std::ptr::NonNull;
|
||||
|
||||
struct arena(());
|
||||
@ -25,10 +25,10 @@ struct Ccx {
|
||||
fn alloc(_bcx: &arena) -> &Bcx<'_> {
|
||||
unsafe {
|
||||
let layout = Layout::new::<Bcx>();
|
||||
let (ptr, _) = Global
|
||||
let memory = Global
|
||||
.alloc(layout, AllocInit::Uninitialized)
|
||||
.unwrap_or_else(|_| handle_alloc_error(layout));
|
||||
&*(ptr.as_ptr() as *const _)
|
||||
&*(memory.ptr().as_ptr() as *const _)
|
||||
}
|
||||
}
|
||||
|
||||
@ -40,7 +40,10 @@ fn g(fcx: &Fcx) {
|
||||
let bcx = Bcx { fcx };
|
||||
let bcx2 = h(&bcx);
|
||||
unsafe {
|
||||
Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
|
||||
Global.dealloc(MemoryBlock::new(
|
||||
NonNull::new_unchecked(bcx2 as *const _ as *mut _),
|
||||
Layout::new::<Bcx>(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user