2014-12-12 17:39:27 -06:00
|
|
|
// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
|
2014-04-25 01:19:34 -05:00
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#![unstable(feature = "allocator_api",
|
2015-06-09 13:52:41 -05:00
|
|
|
reason = "the precise API and guarantees it provides may be tweaked \
|
|
|
|
slightly, especially to possibly take into account the \
|
|
|
|
types being stored to make room for a future \
|
2015-08-13 00:19:08 -05:00
|
|
|
tracing garbage collector",
|
2017-06-03 16:54:08 -05:00
|
|
|
issue = "32838")]
|
2015-06-09 13:52:41 -05:00
|
|
|
|
2016-11-11 04:55:47 -06:00
|
|
|
use core::intrinsics::{min_align_of_val, size_of_val};
|
2017-06-03 16:54:08 -05:00
|
|
|
use core::mem::{self, ManuallyDrop};
|
|
|
|
use core::usize;
|
2015-02-07 17:49:54 -06:00
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
pub use allocator::*;
|
|
|
|
#[doc(hidden)]
|
|
|
|
pub mod __core {
|
|
|
|
pub use core::*;
|
2015-06-25 12:07:01 -05:00
|
|
|
}
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
extern "Rust" {
|
|
|
|
#[allocator]
|
|
|
|
fn __rust_alloc(size: usize, align: usize, err: *mut u8) -> *mut u8;
|
2017-08-11 17:59:11 -05:00
|
|
|
#[cold]
|
2017-06-03 16:54:08 -05:00
|
|
|
fn __rust_oom(err: *const u8) -> !;
|
|
|
|
fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
|
|
|
|
fn __rust_usable_size(layout: *const u8,
|
|
|
|
min: *mut usize,
|
|
|
|
max: *mut usize);
|
|
|
|
fn __rust_realloc(ptr: *mut u8,
|
|
|
|
old_size: usize,
|
|
|
|
old_align: usize,
|
|
|
|
new_size: usize,
|
|
|
|
new_align: usize,
|
|
|
|
err: *mut u8) -> *mut u8;
|
|
|
|
fn __rust_alloc_zeroed(size: usize, align: usize, err: *mut u8) -> *mut u8;
|
|
|
|
fn __rust_alloc_excess(size: usize,
|
|
|
|
align: usize,
|
|
|
|
excess: *mut usize,
|
|
|
|
err: *mut u8) -> *mut u8;
|
|
|
|
fn __rust_realloc_excess(ptr: *mut u8,
|
|
|
|
old_size: usize,
|
|
|
|
old_align: usize,
|
|
|
|
new_size: usize,
|
|
|
|
new_align: usize,
|
|
|
|
excess: *mut usize,
|
|
|
|
err: *mut u8) -> *mut u8;
|
|
|
|
fn __rust_grow_in_place(ptr: *mut u8,
|
|
|
|
old_size: usize,
|
|
|
|
old_align: usize,
|
|
|
|
new_size: usize,
|
|
|
|
new_align: usize) -> u8;
|
|
|
|
fn __rust_shrink_in_place(ptr: *mut u8,
|
|
|
|
old_size: usize,
|
|
|
|
old_align: usize,
|
|
|
|
new_size: usize,
|
|
|
|
new_align: usize) -> u8;
|
2015-02-07 17:49:54 -06:00
|
|
|
}
|
|
|
|
|
2017-05-23 07:47:41 -05:00
|
|
|
#[derive(Copy, Clone, Default, Debug)]
|
2017-06-03 16:54:08 -05:00
|
|
|
pub struct Heap;
|
2017-05-23 07:47:41 -05:00
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
unsafe impl Alloc for Heap {
|
|
|
|
#[inline]
|
2017-05-23 07:47:41 -05:00
|
|
|
unsafe fn alloc(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
2017-06-03 16:54:08 -05:00
|
|
|
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
|
|
|
let ptr = __rust_alloc(layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
&mut *err as *mut AllocErr as *mut u8);
|
|
|
|
if ptr.is_null() {
|
|
|
|
Err(ManuallyDrop::into_inner(err))
|
2017-05-23 07:47:41 -05:00
|
|
|
} else {
|
2017-06-03 16:54:08 -05:00
|
|
|
Ok(ptr)
|
2017-05-23 07:47:41 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#[inline]
|
2017-08-11 17:59:11 -05:00
|
|
|
#[cold]
|
2017-06-03 16:54:08 -05:00
|
|
|
fn oom(&mut self, err: AllocErr) -> ! {
|
|
|
|
unsafe {
|
|
|
|
__rust_oom(&err as *const AllocErr as *const u8)
|
2017-05-23 07:47:41 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#[inline]
|
2017-05-23 07:47:41 -05:00
|
|
|
unsafe fn dealloc(&mut self, ptr: *mut u8, layout: Layout) {
|
2017-06-03 16:54:08 -05:00
|
|
|
__rust_dealloc(ptr, layout.size(), layout.align())
|
2017-05-23 07:47:41 -05:00
|
|
|
}
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#[inline]
|
2017-05-23 07:47:41 -05:00
|
|
|
fn usable_size(&self, layout: &Layout) -> (usize, usize) {
|
2017-06-03 16:54:08 -05:00
|
|
|
let mut min = 0;
|
|
|
|
let mut max = 0;
|
|
|
|
unsafe {
|
|
|
|
__rust_usable_size(layout as *const Layout as *const u8,
|
|
|
|
&mut min,
|
|
|
|
&mut max);
|
|
|
|
}
|
|
|
|
(min, max)
|
2017-05-23 07:47:41 -05:00
|
|
|
}
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#[inline]
|
2017-05-23 07:47:41 -05:00
|
|
|
unsafe fn realloc(&mut self,
|
|
|
|
ptr: *mut u8,
|
|
|
|
layout: Layout,
|
|
|
|
new_layout: Layout)
|
|
|
|
-> Result<*mut u8, AllocErr>
|
|
|
|
{
|
2017-06-03 16:54:08 -05:00
|
|
|
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
|
|
|
let ptr = __rust_realloc(ptr,
|
|
|
|
layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
new_layout.size(),
|
|
|
|
new_layout.align(),
|
|
|
|
&mut *err as *mut AllocErr as *mut u8);
|
|
|
|
if ptr.is_null() {
|
|
|
|
Err(ManuallyDrop::into_inner(err))
|
2017-05-23 07:47:41 -05:00
|
|
|
} else {
|
2017-06-03 16:54:08 -05:00
|
|
|
mem::forget(err);
|
|
|
|
Ok(ptr)
|
2017-05-23 07:47:41 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#[inline]
|
|
|
|
unsafe fn alloc_zeroed(&mut self, layout: Layout) -> Result<*mut u8, AllocErr> {
|
|
|
|
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
|
|
|
let ptr = __rust_alloc_zeroed(layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
&mut *err as *mut AllocErr as *mut u8);
|
|
|
|
if ptr.is_null() {
|
|
|
|
Err(ManuallyDrop::into_inner(err))
|
|
|
|
} else {
|
|
|
|
Ok(ptr)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
unsafe fn alloc_excess(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
|
|
|
|
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
|
|
|
let mut size = 0;
|
|
|
|
let ptr = __rust_alloc_excess(layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
&mut size,
|
|
|
|
&mut *err as *mut AllocErr as *mut u8);
|
|
|
|
if ptr.is_null() {
|
|
|
|
Err(ManuallyDrop::into_inner(err))
|
|
|
|
} else {
|
|
|
|
Ok(Excess(ptr, size))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
unsafe fn realloc_excess(&mut self,
|
|
|
|
ptr: *mut u8,
|
|
|
|
layout: Layout,
|
|
|
|
new_layout: Layout) -> Result<Excess, AllocErr> {
|
|
|
|
let mut err = ManuallyDrop::new(mem::uninitialized::<AllocErr>());
|
|
|
|
let mut size = 0;
|
|
|
|
let ptr = __rust_realloc_excess(ptr,
|
|
|
|
layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
new_layout.size(),
|
|
|
|
new_layout.align(),
|
|
|
|
&mut size,
|
|
|
|
&mut *err as *mut AllocErr as *mut u8);
|
|
|
|
if ptr.is_null() {
|
|
|
|
Err(ManuallyDrop::into_inner(err))
|
|
|
|
} else {
|
|
|
|
Ok(Excess(ptr, size))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
2017-05-23 07:47:41 -05:00
|
|
|
unsafe fn grow_in_place(&mut self,
|
|
|
|
ptr: *mut u8,
|
|
|
|
layout: Layout,
|
|
|
|
new_layout: Layout)
|
|
|
|
-> Result<(), CannotReallocInPlace>
|
|
|
|
{
|
|
|
|
debug_assert!(new_layout.size() >= layout.size());
|
2017-06-03 16:54:08 -05:00
|
|
|
debug_assert!(new_layout.align() == layout.align());
|
|
|
|
let ret = __rust_grow_in_place(ptr,
|
|
|
|
layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
new_layout.size(),
|
|
|
|
new_layout.align());
|
|
|
|
if ret != 0 {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
Err(CannotReallocInPlace)
|
2017-05-23 07:47:41 -05:00
|
|
|
}
|
|
|
|
}
|
2014-05-06 16:01:16 -05:00
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
#[inline]
|
|
|
|
unsafe fn shrink_in_place(&mut self,
|
|
|
|
ptr: *mut u8,
|
|
|
|
layout: Layout,
|
|
|
|
new_layout: Layout) -> Result<(), CannotReallocInPlace> {
|
|
|
|
debug_assert!(new_layout.size() <= layout.size());
|
|
|
|
debug_assert!(new_layout.align() == layout.align());
|
|
|
|
let ret = __rust_shrink_in_place(ptr,
|
|
|
|
layout.size(),
|
|
|
|
layout.align(),
|
|
|
|
new_layout.size(),
|
|
|
|
new_layout.align());
|
|
|
|
if ret != 0 {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
Err(CannotReallocInPlace)
|
|
|
|
}
|
|
|
|
}
|
2014-05-11 16:41:15 -05:00
|
|
|
}
|
|
|
|
|
2014-09-15 14:37:01 -05:00
|
|
|
/// An arbitrary non-null address to represent zero-size allocations.
|
|
|
|
///
|
2015-06-25 12:07:01 -05:00
|
|
|
/// This preserves the non-null invariant for types like `Box<T>`. The address
|
|
|
|
/// may overlap with non-zero-size memory allocations.
|
2017-05-04 13:48:58 -05:00
|
|
|
#[rustc_deprecated(since = "1.19", reason = "Use Unique/Shared::empty() instead")]
|
|
|
|
#[unstable(feature = "heap_api", issue = "27700")]
|
|
|
|
pub const EMPTY: *mut () = 1 as *mut ();
|
2014-05-20 01:19:56 -05:00
|
|
|
|
2014-05-06 21:03:14 -05:00
|
|
|
/// The allocator for unique pointers.
|
2016-12-15 19:00:19 -06:00
|
|
|
// This function must not unwind. If it does, MIR trans will fail.
|
2014-05-12 01:51:00 -05:00
|
|
|
#[cfg(not(test))]
|
2015-05-09 14:50:28 -05:00
|
|
|
#[lang = "exchange_malloc"]
|
2014-05-06 21:03:14 -05:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
|
2014-05-06 21:03:14 -05:00
|
|
|
if size == 0 {
|
2017-05-04 13:48:58 -05:00
|
|
|
align as *mut u8
|
2014-05-06 21:03:14 -05:00
|
|
|
} else {
|
2017-06-03 16:54:08 -05:00
|
|
|
let layout = Layout::from_size_align_unchecked(size, align);
|
|
|
|
Heap.alloc(layout).unwrap_or_else(|err| {
|
|
|
|
Heap.oom(err)
|
|
|
|
})
|
2014-05-06 21:03:14 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-20 07:08:41 -05:00
|
|
|
#[cfg_attr(not(test), lang = "box_free")]
|
2016-01-28 15:59:00 -06:00
|
|
|
#[inline]
|
2017-04-20 07:08:41 -05:00
|
|
|
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: *mut T) {
|
2016-11-11 04:55:47 -06:00
|
|
|
let size = size_of_val(&*ptr);
|
|
|
|
let align = min_align_of_val(&*ptr);
|
2016-01-28 15:59:00 -06:00
|
|
|
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
|
|
|
|
if size != 0 {
|
2017-06-03 16:54:08 -05:00
|
|
|
let layout = Layout::from_size_align_unchecked(size, align);
|
|
|
|
Heap.dealloc(ptr as *mut u8, layout);
|
2016-01-28 15:59:00 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-05-06 21:03:14 -05:00
|
|
|
#[cfg(test)]
|
2015-04-24 10:30:41 -05:00
|
|
|
mod tests {
|
2014-05-06 21:03:14 -05:00
|
|
|
extern crate test;
|
|
|
|
use self::test::Bencher;
|
2015-02-17 14:41:32 -06:00
|
|
|
use boxed::Box;
|
2017-06-03 16:54:08 -05:00
|
|
|
use heap::{Heap, Alloc, Layout};
|
2014-05-06 21:03:14 -05:00
|
|
|
|
2017-03-09 19:53:01 -06:00
|
|
|
#[test]
|
|
|
|
fn allocate_zeroed() {
|
|
|
|
unsafe {
|
2017-06-03 16:54:08 -05:00
|
|
|
let layout = Layout::from_size_align(1024, 1).unwrap();
|
|
|
|
let ptr = Heap.alloc_zeroed(layout.clone())
|
|
|
|
.unwrap_or_else(|e| Heap.oom(e));
|
2017-03-09 19:53:01 -06:00
|
|
|
|
2017-06-03 16:54:08 -05:00
|
|
|
let end = ptr.offset(layout.size() as isize);
|
2017-03-09 19:53:01 -06:00
|
|
|
let mut i = ptr;
|
|
|
|
while i < end {
|
|
|
|
assert_eq!(*i, 0);
|
|
|
|
i = i.offset(1);
|
|
|
|
}
|
2017-06-03 16:54:08 -05:00
|
|
|
Heap.dealloc(ptr, layout);
|
2014-10-02 02:29:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-05-06 21:03:14 -05:00
|
|
|
#[bench]
|
|
|
|
fn alloc_owned_small(b: &mut Bencher) {
|
|
|
|
b.iter(|| {
|
2015-02-17 14:41:32 -06:00
|
|
|
let _: Box<_> = box 10;
|
2014-05-06 21:03:14 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|