Constify Box<T, A> methods

This commit is contained in:
woppopo 2021-12-23 22:03:12 +09:00
parent 69ac533527
commit eb4fc640b0
5 changed files with 224 additions and 32 deletions

View File

@ -323,17 +323,21 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
#[cfg_attr(not(test), lang = "box_free")]
#[inline]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
// This signature has to be the same as `Box`, otherwise an ICE will happen.
// When an additional parameter to `Box` is added (like `A: Allocator`), this has to be added here as
// well.
// For example if `Box` is changed to `struct Box<T: ?Sized, A: Allocator>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: Allocator>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A) {
pub(crate) const unsafe fn box_free<T: ?Sized, A: ~const Allocator + ~const Drop>(
ptr: Unique<T>,
alloc: A,
) {
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
alloc.deallocate(ptr.cast().into(), layout)
alloc.deallocate(From::from(ptr.cast()), layout)
}
}
@ -361,13 +365,22 @@ pub(crate) unsafe fn box_free<T: ?Sized, A: Allocator>(ptr: Unique<T>, alloc: A)
/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
#[stable(feature = "global_alloc", since = "1.28.0")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[cfg(all(not(no_global_oom_handling), not(test)))]
#[rustc_allocator_nounwind]
#[cold]
pub fn handle_alloc_error(layout: Layout) -> ! {
unsafe {
__rust_alloc_error_handler(layout.size(), layout.align());
pub const fn handle_alloc_error(layout: Layout) -> ! {
const fn ct_error(_: Layout) -> ! {
panic!("allocation failed");
}
fn rt_error(layout: Layout) -> ! {
unsafe {
__rust_alloc_error_handler(layout.size(), layout.align());
}
}
unsafe { core::intrinsics::const_eval_select((layout,), ct_error, rt_error) }
}
// For alloc test `std::alloc::handle_alloc_error` can be used directly.

View File

@ -346,9 +346,13 @@ impl<T, A: Allocator> Box<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[must_use]
#[inline]
pub fn new_in(x: T, alloc: A) -> Self {
pub const fn new_in(x: T, alloc: A) -> Self
where
A: ~const Allocator + ~const Drop,
{
let mut boxed = Self::new_uninit_in(alloc);
unsafe {
boxed.as_mut_ptr().write(x);
@ -372,8 +376,13 @@ pub fn new_in(x: T, alloc: A) -> Self {
/// # Ok::<(), std::alloc::AllocError>(())
/// ```
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub fn try_new_in(x: T, alloc: A) -> Result<Self, AllocError> {
pub const fn try_new_in(x: T, alloc: A) -> Result<Self, AllocError>
where
T: ~const Drop,
A: ~const Allocator + ~const Drop,
{
let mut boxed = Self::try_new_uninit_in(alloc)?;
unsafe {
boxed.as_mut_ptr().write(x);
@ -402,10 +411,14 @@ pub fn try_new_in(x: T, alloc: A) -> Result<Self, AllocError> {
/// assert_eq!(*five, 5)
/// ```
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[cfg(not(no_global_oom_handling))]
#[must_use]
// #[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
pub const fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A>
where
A: ~const Allocator + ~const Drop,
{
let layout = Layout::new::<mem::MaybeUninit<T>>();
// NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
// That would make code size bigger.
@ -439,7 +452,11 @@ pub fn new_uninit_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
/// ```
#[unstable(feature = "allocator_api", issue = "32838")]
// #[unstable(feature = "new_uninit", issue = "63291")]
pub fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError> {
#[rustc_const_unstable(feature = "const_box", issue = "none")]
pub const fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError>
where
A: ~const Allocator + ~const Drop,
{
let layout = Layout::new::<mem::MaybeUninit<T>>();
let ptr = alloc.allocate(layout)?.cast();
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
@ -466,10 +483,14 @@ pub fn try_new_uninit_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocE
///
/// [zeroed]: mem::MaybeUninit::zeroed
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[cfg(not(no_global_oom_handling))]
// #[unstable(feature = "new_uninit", issue = "63291")]
#[must_use]
pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
pub const fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A>
where
A: ~const Allocator + ~const Drop,
{
let layout = Layout::new::<mem::MaybeUninit<T>>();
// NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
// That would make code size bigger.
@ -503,7 +524,11 @@ pub fn new_zeroed_in(alloc: A) -> Box<mem::MaybeUninit<T>, A> {
/// [zeroed]: mem::MaybeUninit::zeroed
#[unstable(feature = "allocator_api", issue = "32838")]
// #[unstable(feature = "new_uninit", issue = "63291")]
pub fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError> {
#[rustc_const_unstable(feature = "const_box", issue = "none")]
pub const fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocError>
where
A: ~const Allocator + ~const Drop,
{
let layout = Layout::new::<mem::MaybeUninit<T>>();
let ptr = alloc.allocate_zeroed(layout)?.cast();
unsafe { Ok(Box::from_raw_in(ptr.as_ptr(), alloc)) }
@ -513,20 +538,22 @@ pub fn try_new_zeroed_in(alloc: A) -> Result<Box<mem::MaybeUninit<T>, A>, AllocE
/// `x` will be pinned in memory and unable to be moved.
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[must_use]
#[inline(always)]
pub fn pin_in(x: T, alloc: A) -> Pin<Self>
pub const fn pin_in(x: T, alloc: A) -> Pin<Self>
where
A: 'static,
A: 'static + ~const Allocator + ~const Drop,
{
Self::new_in(x, alloc).into()
Self::into_pin(Self::new_in(x, alloc))
}
/// Converts a `Box<T>` into a `Box<[T]>`
///
/// This conversion does not allocate on the heap and happens in place.
#[unstable(feature = "box_into_boxed_slice", issue = "71582")]
pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> {
#[rustc_const_unstable(feature = "const_box", issue = "none")]
pub const fn into_boxed_slice(boxed: Self) -> Box<[T], A> {
let (raw, alloc) = Box::into_raw_with_allocator(boxed);
unsafe { Box::from_raw_in(raw as *mut [T; 1], alloc) }
}
@ -543,8 +570,12 @@ pub fn into_boxed_slice(boxed: Self) -> Box<[T], A> {
/// assert_eq!(Box::into_inner(c), 5);
/// ```
#[unstable(feature = "box_into_inner", issue = "80437")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub fn into_inner(boxed: Self) -> T {
pub const fn into_inner(boxed: Self) -> T
where
Self: ~const Drop,
{
*boxed
}
}
@ -758,8 +789,9 @@ impl<T, A: Allocator> Box<mem::MaybeUninit<T>, A> {
/// assert_eq!(*five, 5)
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub unsafe fn assume_init(self) -> Box<T, A> {
pub const unsafe fn assume_init(self) -> Box<T, A> {
let (raw, alloc) = Box::into_raw_with_allocator(self);
unsafe { Box::from_raw_in(raw as *mut T, alloc) }
}
@ -792,8 +824,9 @@ pub unsafe fn assume_init(self) -> Box<T, A> {
/// }
/// ```
#[unstable(feature = "new_uninit", issue = "63291")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub fn write(mut boxed: Self, value: T) -> Box<T, A> {
pub const fn write(mut boxed: Self, value: T) -> Box<T, A> {
unsafe {
(*boxed).write(value);
boxed.assume_init()
@ -938,8 +971,9 @@ impl<T: ?Sized, A: Allocator> Box<T, A> {
/// [memory layout]: self#memory-layout
/// [`Layout`]: crate::Layout
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self {
pub const unsafe fn from_raw_in(raw: *mut T, alloc: A) -> Self {
Box(unsafe { Unique::new_unchecked(raw) }, alloc)
}
@ -1035,8 +1069,9 @@ pub fn into_raw(b: Self) -> *mut T {
///
/// [memory layout]: self#memory-layout
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) {
pub const fn into_raw_with_allocator(b: Self) -> (*mut T, A) {
let (leaked, alloc) = Box::into_unique(b);
(leaked.as_ptr(), alloc)
}
@ -1046,9 +1081,10 @@ pub fn into_raw_with_allocator(b: Self) -> (*mut T, A) {
issue = "none",
reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead"
)]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
#[doc(hidden)]
pub fn into_unique(b: Self) -> (Unique<T>, A) {
pub const fn into_unique(b: Self) -> (Unique<T>, A) {
// Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
// raw pointer for the type system. Turning it directly into a raw pointer would not be
// recognized as "releasing" the unique pointer to permit aliased raw accesses,
@ -1064,8 +1100,9 @@ pub fn into_unique(b: Self) -> (Unique<T>, A) {
/// to call it as `Box::allocator(&b)` instead of `b.allocator()`. This
/// is so that there is no conflict with a method on the inner type.
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub fn allocator(b: &Self) -> &A {
pub const fn allocator(b: &Self) -> &A {
&b.1
}
@ -1105,8 +1142,9 @@ pub fn allocator(b: &Self) -> &A {
/// assert_eq!(*static_ref, [4, 2, 3]);
/// ```
#[stable(feature = "box_leak", since = "1.26.0")]
#[rustc_const_unstable(feature = "const_box", issue = "none")]
#[inline]
pub fn leak<'a>(b: Self) -> &'a mut T
pub const fn leak<'a>(b: Self) -> &'a mut T
where
A: 'a,
{
@ -1119,7 +1157,8 @@ pub fn leak<'a>(b: Self) -> &'a mut T
///
/// This is also available via [`From`].
#[unstable(feature = "box_into_pin", issue = "62370")]
pub fn into_pin(boxed: Self) -> Pin<Self>
#[rustc_const_unstable(feature = "const_box", issue = "none")]
pub const fn into_pin(boxed: Self) -> Pin<Self>
where
A: 'static,
{
@ -1131,7 +1170,8 @@ pub fn into_pin(boxed: Self) -> Pin<Self>
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Box<T, A> {
#[rustc_const_unstable(feature = "const_box", issue = "none")]
unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> const Drop for Box<T, A> {
fn drop(&mut self) {
// FIXME: Do nothing, drop is currently performed by compiler.
}
@ -1341,7 +1381,8 @@ fn from(t: T) -> Self {
}
#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Pin<Box<T, A>>
#[rustc_const_unstable(feature = "const_box", issue = "none")]
impl<T: ?Sized, A: Allocator> const From<Box<T, A>> for Pin<Box<T, A>>
where
A: 'static,
{
@ -1720,7 +1761,8 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: Allocator> Deref for Box<T, A> {
#[rustc_const_unstable(feature = "const_box", issue = "none")]
impl<T: ?Sized, A: Allocator> const Deref for Box<T, A> {
type Target = T;
fn deref(&self) -> &T {
@ -1729,7 +1771,8 @@ fn deref(&self) -> &T {
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized, A: Allocator> DerefMut for Box<T, A> {
#[rustc_const_unstable(feature = "const_box", issue = "none")]
impl<T: ?Sized, A: Allocator> const DerefMut for Box<T, A> {
fn deref_mut(&mut self) -> &mut T {
&mut **self
}
@ -1908,7 +1951,8 @@ fn as_mut(&mut self) -> &mut T {
* could have a method to project a Pin<T> from it.
*/
#[stable(feature = "pin", since = "1.33.0")]
impl<T: ?Sized, A: Allocator> Unpin for Box<T, A> where A: 'static {}
#[rustc_const_unstable(feature = "const_box", issue = "none")]
impl<T: ?Sized, A: Allocator> const Unpin for Box<T, A> where A: 'static {}
#[unstable(feature = "generator_trait", issue = "43122")]
impl<G: ?Sized + Generator<R> + Unpin, R, A: Allocator> Generator<R> for Box<G, A>

View File

@ -97,8 +97,18 @@
#![feature(async_stream)]
#![feature(coerce_unsized)]
#![cfg_attr(not(no_global_oom_handling), feature(const_btree_new))]
#![feature(const_box)]
#![feature(const_cow_is_borrowed)]
#![feature(const_convert)]
#![feature(const_size_of_val)]
#![feature(const_align_of_val)]
#![feature(const_ptr_read)]
#![feature(const_maybe_uninit_write)]
#![feature(const_maybe_uninit_as_mut_ptr)]
#![feature(const_refs_to_cell)]
#![feature(core_intrinsics)]
#![feature(const_eval_select)]
#![feature(const_pin)]
#![feature(dispatch_from_dyn)]
#![feature(exact_size_is_empty)]
#![feature(extend_one)]
@ -134,8 +144,13 @@
#![feature(box_syntax)]
#![feature(cfg_sanitize)]
#![feature(cfg_target_has_atomic)]
#![feature(const_deref)]
#![feature(const_fn_trait_bound)]
#![feature(const_mut_refs)]
#![feature(const_ptr_write)]
#![feature(const_precise_live_drops)]
#![feature(const_trait_impl)]
#![feature(const_try)]
#![cfg_attr(bootstrap, feature(destructuring_assignment))]
#![feature(dropck_eyepatch)]
#![feature(exclusive_range_pattern)]

View File

@ -1,6 +1,7 @@
use std::cell::Cell;
use std::mem::MaybeUninit;
use std::ptr::NonNull;
use core::alloc::{AllocError, Allocator, Layout};
use core::cell::Cell;
use core::mem::MaybeUninit;
use core::ptr::NonNull;
#[test]
fn uninitialized_zero_size_box() {
@ -57,3 +58,110 @@ fn box_deref_lval() {
x.set(1000);
assert_eq!(x.get(), 1000);
}
pub struct ConstAllocator;
unsafe impl const Allocator for ConstAllocator {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
match layout.size() {
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
_ => unsafe {
let ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
Ok(NonNull::new_unchecked(ptr as *mut [u8; 0] as *mut [u8]))
},
}
}
unsafe fn deallocate(&self, _ptr: NonNull<u8>, layout: Layout) {
match layout.size() {
0 => { /* do nothing */ }
_ => { /* do nothing too */ }
}
}
fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
let ptr = self.allocate(layout)?;
if layout.size() > 0 {
unsafe {
ptr.as_mut_ptr().write_bytes(0, layout.size());
}
}
Ok(ptr)
}
unsafe fn grow(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() >= old_layout.size(),
"`new_layout.size()` must be greater than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate(new_layout)?;
if new_layout.size() > 0 {
new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), old_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
unsafe fn grow_zeroed(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
let new_ptr = self.grow(ptr, old_layout, new_layout)?;
if new_layout.size() > 0 {
let old_size = old_layout.size();
let new_size = new_layout.size();
let raw_ptr = new_ptr.as_mut_ptr();
raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
}
Ok(new_ptr)
}
unsafe fn shrink(
&self,
ptr: NonNull<u8>,
old_layout: Layout,
new_layout: Layout,
) -> Result<NonNull<[u8]>, AllocError> {
debug_assert!(
new_layout.size() <= old_layout.size(),
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
);
let new_ptr = self.allocate(new_layout)?;
if new_layout.size() > 0 {
new_ptr.as_mut_ptr().copy_from_nonoverlapping(ptr.as_ptr(), new_layout.size());
self.deallocate(ptr, old_layout);
}
Ok(new_ptr)
}
fn by_ref(&self) -> &Self
where
Self: Sized,
{
self
}
}
#[test]
fn const_box() {
const VALUE: u32 = {
let mut boxed = Box::new_in(1u32, ConstAllocator);
assert!(*boxed == 1);
*boxed = 42;
assert!(*boxed == 42);
*boxed
};
assert!(VALUE == 42);
}

View File

@ -1,8 +1,19 @@
#![feature(allocator_api)]
#![feature(alloc_layout_extra)]
#![feature(assert_matches)]
#![feature(box_syntax)]
#![feature(cow_is_borrowed)]
#![feature(const_box)]
#![feature(const_convert)]
#![feature(const_cow_is_borrowed)]
#![feature(const_heap)]
#![feature(const_intrinsic_copy)]
#![feature(const_mut_refs)]
#![feature(const_nonnull_slice_from_raw_parts)]
#![feature(const_ptr_offset)]
#![feature(const_ptr_write)]
#![feature(const_try)]
#![feature(core_intrinsics)]
#![feature(drain_filter)]
#![feature(exact_size_is_empty)]
#![feature(new_uninit)]
@ -26,6 +37,7 @@
#![feature(const_default_impls)]
#![feature(const_trait_impl)]
#![feature(const_str_from_utf8)]
#![feature(nonnull_slice_from_raw_parts)]
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};