Actually deprecate the Heap type

This commit is contained in:
Simon Sapin 2018-04-03 21:15:06 +02:00
parent 88ebd2d752
commit e521b8b472
9 changed files with 47 additions and 46 deletions

View File

@ -81,8 +81,12 @@ fn __rust_shrink_in_place(ptr: *mut u8,
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
pub use self::Global as Heap;
pub type Heap = Global;
#[unstable(feature = "allocator_api", issue = "32838")]
#[rustc_deprecated(since = "1.27.0", reason = "type renamed to `Global`")]
#[allow(non_upper_case_globals)]
pub const Heap: Global = Global;
unsafe impl Alloc for Global {
#[inline]
@ -268,7 +272,7 @@ mod tests {
extern crate test;
use self::test::Bencher;
use boxed::Box;
use heap::{Global, Alloc, Layout};
use alloc::{Global, Alloc, Layout};
#[test]
fn allocate_zeroed() {

View File

@ -21,7 +21,6 @@
use core::borrow;
use core::fmt;
use core::cmp::Ordering;
use core::heap::{Alloc, Layout};
use core::intrinsics::abort;
use core::mem::{self, align_of_val, size_of_val, uninitialized};
use core::ops::Deref;
@ -32,7 +31,7 @@
use core::{isize, usize};
use core::convert::From;
use heap::{Heap, box_free};
use alloc::{Global, Alloc, Layout, box_free};
use boxed::Box;
use string::String;
use vec::Vec;
@ -521,7 +520,7 @@ unsafe fn drop_slow(&mut self) {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
}
}
@ -555,8 +554,8 @@ unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
let layout = Layout::for_value(&*fake_ptr);
let mem = Heap.alloc(layout)
.unwrap_or_else(|e| Heap.oom(e));
let mem = Global.alloc(layout)
.unwrap_or_else(|e| Global.oom(e));
// Initialize the real ArcInner
let inner = set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>;
@ -640,7 +639,7 @@ fn drop(&mut self) {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Heap.dealloc(self.mem, self.layout.clone());
Global.dealloc(self.mem, self.layout.clone());
}
}
}
@ -1161,7 +1160,7 @@ fn drop(&mut self) {
if self.inner().weak.fetch_sub(1, Release) == 1 {
atomic::fence(Acquire);
unsafe {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr))
}
}
}

View File

@ -41,14 +41,13 @@
// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
// This implies that even an empty internal node has at least one edge.
use core::heap::{Alloc, Layout};
use core::marker::PhantomData;
use core::mem;
use core::ptr::{self, Unique, NonNull};
use core::slice;
use alloc::{Global, Alloc, Layout};
use boxed::Box;
use heap::Heap;
const B: usize = 6;
pub const MIN_LEN: usize = B - 1;
@ -250,7 +249,7 @@ pub fn pop_level(&mut self) {
self.as_mut().as_leaf_mut().parent = ptr::null();
unsafe {
Heap.dealloc(top, Layout::new::<InternalNode<K, V>>());
Global.dealloc(top, Layout::new::<InternalNode<K, V>>());
}
}
}
@ -436,7 +435,7 @@ pub unsafe fn deallocate_and_ascend(self) -> Option<
> {
let ptr = self.as_leaf() as *const LeafNode<K, V> as *const u8 as *mut u8;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
Global.dealloc(ptr, Layout::new::<LeafNode<K, V>>());
ret
}
}
@ -457,7 +456,7 @@ pub unsafe fn deallocate_and_ascend(self) -> Option<
> {
let ptr = self.as_internal() as *const InternalNode<K, V> as *const u8 as *mut u8;
let ret = self.ascend().ok();
Heap.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
Global.dealloc(ptr, Layout::new::<InternalNode<K, V>>());
ret
}
}
@ -1239,12 +1238,12 @@ pub fn merge(mut self)
).correct_parent_link();
}
Heap.dealloc(
Global.dealloc(
right_node.node.as_ptr() as *mut u8,
Layout::new::<InternalNode<K, V>>(),
);
} else {
Heap.dealloc(
Global.dealloc(
right_node.node.as_ptr() as *mut u8,
Layout::new::<LeafNode<K, V>>(),
);

View File

@ -8,13 +8,12 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use alloc::{Alloc, Layout, Global};
use core::cmp;
use core::heap::{Alloc, Layout};
use core::mem;
use core::ops::Drop;
use core::ptr::{self, Unique};
use core::slice;
use heap::Heap;
use super::boxed::Box;
use super::allocator::CollectionAllocErr;
use super::allocator::CollectionAllocErr::*;
@ -47,7 +46,7 @@
/// field. This allows zero-sized types to not be special-cased by consumers of
/// this type.
#[allow(missing_debug_implementations)]
pub struct RawVec<T, A: Alloc = Heap> {
pub struct RawVec<T, A: Alloc = Global> {
ptr: Unique<T>,
cap: usize,
a: A,
@ -114,14 +113,14 @@ fn allocate_in(cap: usize, zeroed: bool, mut a: A) -> Self {
}
}
impl<T> RawVec<T, Heap> {
impl<T> RawVec<T, Global> {
/// Creates the biggest possible RawVec (on the system heap)
/// without allocating. If T has positive size, then this makes a
/// RawVec with capacity 0. If T has 0 size, then it makes a
/// RawVec with capacity `usize::MAX`. Useful for implementing
/// delayed allocation.
pub fn new() -> Self {
Self::new_in(Heap)
Self::new_in(Global)
}
/// Creates a RawVec (on the system heap) with exactly the
@ -141,13 +140,13 @@ pub fn new() -> Self {
/// Aborts on OOM
#[inline]
pub fn with_capacity(cap: usize) -> Self {
RawVec::allocate_in(cap, false, Heap)
RawVec::allocate_in(cap, false, Global)
}
/// Like `with_capacity` but guarantees the buffer is zeroed.
#[inline]
pub fn with_capacity_zeroed(cap: usize) -> Self {
RawVec::allocate_in(cap, true, Heap)
RawVec::allocate_in(cap, true, Global)
}
}
@ -168,7 +167,7 @@ pub unsafe fn from_raw_parts_in(ptr: *mut T, cap: usize, a: A) -> Self {
}
}
impl<T> RawVec<T, Heap> {
impl<T> RawVec<T, Global> {
/// Reconstitutes a RawVec from a pointer, capacity.
///
/// # Undefined Behavior
@ -180,7 +179,7 @@ pub unsafe fn from_raw_parts(ptr: *mut T, cap: usize) -> Self {
RawVec {
ptr: Unique::new_unchecked(ptr),
cap,
a: Heap,
a: Global,
}
}
@ -678,7 +677,7 @@ pub fn shrink_to_fit(&mut self, amount: usize) {
}
}
impl<T> RawVec<T, Heap> {
impl<T> RawVec<T, Global> {
/// Converts the entire buffer into `Box<[T]>`.
///
/// While it is not *strictly* Undefined Behavior to call
@ -763,13 +762,13 @@ unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
if size > self.fuel {
return Err(AllocErr::Unsupported { details: "fuel exhausted" });
}
match Heap.alloc(layout) {
match Global.alloc(layout) {
ok @ Ok(_) => { self.fuel -= size; ok }
err @ Err(_) => err,
}
}
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
Heap.dealloc(ptr, layout)
Global.dealloc(ptr, layout)
}
}

View File

@ -250,7 +250,6 @@
use core::cmp::Ordering;
use core::fmt;
use core::hash::{Hash, Hasher};
use core::heap::{Alloc, Layout};
use core::intrinsics::abort;
use core::marker;
use core::marker::{Unsize, PhantomData};
@ -260,7 +259,7 @@
use core::ptr::{self, NonNull};
use core::convert::From;
use heap::{Heap, box_free};
use alloc::{Global, Alloc, Layout, box_free};
use string::String;
use vec::Vec;
@ -668,8 +667,8 @@ unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
let layout = Layout::for_value(&*fake_ptr);
let mem = Heap.alloc(layout)
.unwrap_or_else(|e| Heap.oom(e));
let mem = Global.alloc(layout)
.unwrap_or_else(|e| Global.oom(e));
// Initialize the real RcBox
let inner = set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>;
@ -752,7 +751,7 @@ fn drop(&mut self) {
let slice = from_raw_parts_mut(self.elems, self.n_elems);
ptr::drop_in_place(slice);
Heap.dealloc(self.mem, self.layout.clone());
Global.dealloc(self.mem, self.layout.clone());
}
}
}
@ -847,7 +846,7 @@ fn drop(&mut self) {
self.dec_weak();
if self.weak() == 0 {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
}
}
}
@ -1273,7 +1272,7 @@ fn drop(&mut self) {
// the weak count starts at 1, and will only go to zero if all
// the strong pointers have disappeared.
if self.weak() == 0 {
Heap.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
Global.dealloc(ptr as *mut u8, Layout::for_value(&*ptr));
}
}
}

View File

@ -9,7 +9,7 @@
// except according to those terms.
use alloc_system::System;
use std::heap::{Heap, Alloc, Layout};
use std::alloc::{Global, Alloc, Layout};
/// https://github.com/rust-lang/rust/issues/45955
///
@ -22,7 +22,7 @@ fn alloc_system_overaligned_request() {
#[test]
fn std_heap_overaligned_request() {
check_overalign_requests(Heap)
check_overalign_requests(Global)
}
fn check_overalign_requests<T: Alloc>(mut allocator: T) {

View File

@ -12,7 +12,8 @@
#![unstable(issue = "32838", feature = "allocator_api")]
#[doc(inline)] pub use alloc_crate::alloc::Heap;
#[doc(inline)] #[allow(deprecated)] pub use alloc_crate::alloc::Heap;
#[doc(inline)] pub use alloc_crate::alloc::Global;
#[doc(inline)] pub use alloc_system::System;
#[doc(inline)] pub use core::alloc::*;

View File

@ -11,13 +11,13 @@
use self::Entry::*;
use self::VacantEntryState::*;
use alloc::{Global, Alloc, CollectionAllocErr};
use cell::Cell;
use borrow::Borrow;
use cmp::max;
use fmt::{self, Debug};
#[allow(deprecated)]
use hash::{Hash, Hasher, BuildHasher, SipHasher13};
use heap::{Heap, Alloc, CollectionAllocErr};
use iter::{FromIterator, FusedIterator};
use mem::{self, replace};
use ops::{Deref, Index};
@ -784,7 +784,7 @@ fn raw_capacity(&self) -> usize {
pub fn reserve(&mut self, additional: usize) {
match self.try_reserve(additional) {
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
Ok(()) => { /* yay */ }
}
}

View File

@ -8,9 +8,9 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use alloc::{Global, Alloc, Layout, CollectionAllocErr};
use cmp;
use hash::{BuildHasher, Hash, Hasher};
use heap::{Heap, Alloc, Layout, CollectionAllocErr};
use marker;
use mem::{align_of, size_of, needs_drop};
use mem;
@ -754,7 +754,7 @@ unsafe fn try_new_uninitialized(capacity: usize) -> Result<RawTable<K, V>, Colle
return Err(CollectionAllocErr::CapacityOverflow);
}
let buffer = Heap.alloc(Layout::from_size_align(size, alignment)
let buffer = Global.alloc(Layout::from_size_align(size, alignment)
.ok_or(CollectionAllocErr::CapacityOverflow)?)?;
let hashes = buffer as *mut HashUint;
@ -772,7 +772,7 @@ unsafe fn try_new_uninitialized(capacity: usize) -> Result<RawTable<K, V>, Colle
unsafe fn new_uninitialized(capacity: usize) -> RawTable<K, V> {
match Self::try_new_uninitialized(capacity) {
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
Ok(table) => { table }
}
}
@ -811,7 +811,7 @@ pub fn try_new(capacity: usize) -> Result<RawTable<K, V>, CollectionAllocErr> {
pub fn new(capacity: usize) -> RawTable<K, V> {
match Self::try_new(capacity) {
Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
Err(CollectionAllocErr::AllocErr(e)) => Heap.oom(e),
Err(CollectionAllocErr::AllocErr(e)) => Global.oom(e),
Ok(table) => { table }
}
}
@ -1185,8 +1185,8 @@ fn drop(&mut self) {
debug_assert!(!oflo, "should be impossible");
unsafe {
Heap.dealloc(self.hashes.ptr() as *mut u8,
Layout::from_size_align(size, align).unwrap());
Global.dealloc(self.hashes.ptr() as *mut u8,
Layout::from_size_align(size, align).unwrap());
// Remember how everything was allocated out of one buffer
// during initialization? We only need one call to free here.
}