rust/src/libarena/lib.rs

671 lines
20 KiB
Rust
Raw Normal View History

2014-04-02 22:06:55 -05:00
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
2014-08-04 05:48:39 -05:00
//! The arena, a fast but limited type of allocator.
//!
//! Arenas are a type of allocator that destroy the objects within, all at
//! once, once the arena itself is destroyed. They do not support deallocation
//! of individual objects while the arena itself is still alive. The benefit
//! of an arena is very fast allocation; just a pointer bump.
2014-04-04 06:57:39 -05:00
//!
2014-08-04 05:48:39 -05:00
//! This crate has two arenas implemented: `TypedArena`, which is a simpler
//! arena but can only hold objects of a single type, and `Arena`, which is a
//! more complex, slower arena which can hold objects of any type.
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
#![cfg_attr(stage0, feature(custom_attribute))]
#![crate_name = "arena"]
#![unstable(feature = "rustc_private")]
Preliminary feature staging This partially implements the feature staging described in the [release channel RFC][rc]. It does not yet fully conform to the RFC as written, but does accomplish its goals sufficiently for the 1.0 alpha release. It has three primary user-visible effects: * On the nightly channel, use of unstable APIs generates a warning. * On the beta channel, use of unstable APIs generates a warning. * On the beta channel, use of feature gates generates a warning. Code that does not trigger these warnings is considered 'stable', modulo pre-1.0 bugs. Disabling the warnings for unstable APIs continues to be done in the existing (i.e. old) style, via `#[allow(...)]`, not that specified in the RFC. I deem this marginally acceptable since any code that must do this is not using the stable dialect of Rust. Use of feature gates is itself gated with the new 'unstable_features' lint, on nightly set to 'allow', and on beta 'warn'. The attribute scheme used here corresponds to an older version of the RFC, with the `#[staged_api]` crate attribute toggling the staging behavior of the stability attributes, but the user impact is only in-tree so I'm not concerned about having to make design changes later (and I may ultimately prefer the scheme here after all, with the `#[staged_api]` crate attribute). Since the Rust codebase itself makes use of unstable features the compiler and build system to a midly elaborate dance to allow it to bootstrap while disobeying these lints (which would otherwise be errors because Rust builds with `-D warnings`). This patch includes one significant hack that causes a regression. Because the `format_args!` macro emits calls to unstable APIs it would trigger the lint. I added a hack to the lint to make it not trigger, but this in turn causes arguments to `println!` not to be checked for feature gates. I don't presently understand macro expansion well enough to fix. This is bug #20661. Closes #16678 [rc]: https://github.com/rust-lang/rfcs/blob/master/text/0507-release-channels.md
2015-01-06 08:26:08 -06:00
#![staged_api]
#![crate_type = "rlib"]
#![crate_type = "dylib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/")]
#![feature(alloc)]
#![feature(box_syntax)]
#![feature(core)]
#![feature(staged_api)]
#![feature(unboxed_closures)]
#![cfg_attr(test, feature(test))]
extern crate alloc;
2013-12-30 19:32:53 -06:00
use std::cell::{Cell, RefCell};
2014-02-06 01:34:33 -06:00
use std::cmp;
use std::intrinsics;
use std::marker;
use std::mem;
use std::ptr;
2014-01-31 22:53:11 -06:00
use std::rc::Rc;
use std::rt::heap::{allocate, deallocate};
// The way arena uses arrays is really deeply awful. The arrays are
// allocated, and have capacities reserved, but the fill for the array
// will always stay at 0.
#[derive(Clone, PartialEq)]
2013-01-22 10:44:24 -06:00
struct Chunk {
data: Rc<RefCell<Vec<u8>>>,
fill: Cell<usize>,
is_copy: Cell<bool>,
2013-01-22 10:44:24 -06:00
}
2014-01-31 22:53:11 -06:00
impl Chunk {
fn capacity(&self) -> usize {
2014-03-20 17:05:56 -05:00
self.data.borrow().capacity()
2014-01-31 22:53:11 -06:00
}
2014-06-25 14:47:34 -05:00
unsafe fn as_ptr(&self) -> *const u8 {
2014-03-20 17:05:56 -05:00
self.data.borrow().as_ptr()
2014-01-31 22:53:11 -06:00
}
}
2014-04-04 06:57:39 -05:00
/// A slower reflection-based arena that can allocate objects of any type.
///
2014-08-04 05:48:39 -05:00
/// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
2014-04-04 06:57:39 -05:00
/// each allocated object, the arena stores a pointer to the type descriptor
2014-08-04 05:48:39 -05:00
/// followed by the object (potentially with alignment padding after each
/// element). When the arena is destroyed, it iterates through all of its
2014-04-04 06:57:39 -05:00
/// chunks, and uses the tydesc information to trace through the objects,
2014-08-04 05:48:39 -05:00
/// calling the destructors on them. One subtle point that needs to be
/// addressed is how to handle panics while running the user provided
2014-04-04 06:57:39 -05:00
/// initializer function. It is important to not run the destructor on
/// uninitialized objects, but how to detect them is somewhat subtle. Since
2014-08-04 05:48:39 -05:00
/// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
2014-04-04 06:57:39 -05:00
/// the most recent object. To solve this without requiring extra space, we
/// use the low order bit of the tydesc pointer to encode whether the object
/// it describes has been fully initialized.
///
2014-08-04 05:48:39 -05:00
/// As an optimization, objects with destructors are stored in different chunks
/// than objects without destructors. This reduces overhead when initializing
/// plain-old-data (`Copy` types) and means we don't need to waste time running
/// their destructors.
pub struct Arena<'longer_than_self> {
2013-05-03 17:57:18 -05:00
// The head is separated out from the list as a unbenchmarked
2014-04-04 06:57:39 -05:00
// microoptimization, to avoid needing to case on the list to access the
// head.
2014-06-09 22:29:36 -05:00
head: RefCell<Chunk>,
copy_head: RefCell<Chunk>,
chunks: RefCell<Vec<Chunk>>,
_marker: marker::PhantomData<*mut &'longer_than_self()>,
2012-11-13 20:38:18 -06:00
}
impl<'a> Arena<'a> {
2014-08-04 05:48:39 -05:00
/// Allocates a new Arena with 32 bytes preallocated.
pub fn new() -> Arena<'a> {
2015-01-24 08:39:32 -06:00
Arena::new_with_size(32)
}
2014-08-04 05:48:39 -05:00
/// Allocates a new Arena with `initial_size` bytes preallocated.
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
Arena {
2014-06-09 22:29:36 -05:00
head: RefCell::new(chunk(initial_size, false)),
copy_head: RefCell::new(chunk(initial_size, true)),
chunks: RefCell::new(Vec::new()),
_marker: marker::PhantomData,
}
}
}
2012-03-20 21:06:04 -05:00
fn chunk(size: usize, is_copy: bool) -> Chunk {
2013-01-22 10:44:24 -06:00
Chunk {
data: Rc::new(RefCell::new(Vec::with_capacity(size))),
2015-01-24 08:39:32 -06:00
fill: Cell::new(0),
is_copy: Cell::new(is_copy),
2013-01-22 10:44:24 -06:00
}
2012-03-20 21:06:04 -05:00
}
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
2013-09-16 20:18:07 -05:00
fn drop(&mut self) {
unsafe {
2014-06-09 22:29:36 -05:00
destroy_chunk(&*self.head.borrow());
for chunk in self.chunks.borrow().iter() {
if !chunk.is_copy.get() {
destroy_chunk(chunk);
}
}
}
2013-01-22 10:44:24 -06:00
}
2012-03-20 21:06:04 -05:00
}
#[inline]
fn round_up(base: usize, align: usize) -> usize {
(base.checked_add(align - 1)).unwrap() & !(align - 1)
}
// Walk down a chunk, running the destructors for any objects stored
// in it.
2012-09-19 20:14:30 -05:00
unsafe fn destroy_chunk(chunk: &Chunk) {
let mut idx = 0;
2014-01-31 22:53:11 -06:00
let buf = chunk.as_ptr();
2013-12-30 19:32:53 -06:00
let fill = chunk.fill.get();
while idx < fill {
let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize));
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
let (size, align) = ((*tydesc).size, (*tydesc).align);
2014-06-25 14:47:34 -05:00
let after_tydesc = idx + mem::size_of::<*const TyDesc>();
let start = round_up(after_tydesc, align);
//debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
// start, size, align, is_done);
if is_done {
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
}
// Find where the next tydesc lives
2014-06-25 14:47:34 -05:00
idx = round_up(start + size, mem::align_of::<*const TyDesc>());
}
}
// We encode whether the object a tydesc describes has been
// initialized in the arena in the low bit of the tydesc pointer. This
// is necessary in order to properly do cleanup if a panic occurs
// during an initializer.
#[inline]
fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
p as usize | (is_done as usize)
}
#[inline]
fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
2014-06-25 14:47:34 -05:00
((p & !1) as *const TyDesc, p & 1 == 1)
}
// HACK(eddyb) TyDesc replacement using a trait object vtable.
// This could be replaced in the future with a custom DST layout,
// or `&'static (drop_glue, size, align)` created by a `const fn`.
struct TyDesc {
drop_glue: fn(*const i8),
size: usize,
align: usize
}
2015-03-31 18:58:15 -05:00
trait AllTypes { fn dummy(&self) { } }
impl<T:?Sized> AllTypes for T { }
unsafe fn get_tydesc<T>() -> *const TyDesc {
use std::raw::TraitObject;
let ptr = &*(1 as *const T);
// Can use any trait that is implemented for all types.
2015-03-31 18:58:15 -05:00
let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
obj.vtable as *const TyDesc
}
impl<'longer_than_self> Arena<'longer_than_self> {
fn chunk_size(&self) -> usize {
2014-06-09 22:29:36 -05:00
self.copy_head.borrow().capacity()
2014-01-31 22:53:11 -06:00
}
2014-06-09 22:29:36 -05:00
// Functions for the POD part of the arena
fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
// Allocate a new chunk.
2014-02-06 01:34:33 -06:00
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
2014-06-09 22:29:36 -05:00
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
*self.copy_head.borrow_mut() =
2015-01-24 08:39:32 -06:00
chunk((new_min_chunk_size + 1).next_power_of_two(), true);
return self.alloc_copy_inner(n_bytes, align);
}
#[inline]
fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
2014-06-09 22:29:36 -05:00
let start = round_up(self.copy_head.borrow().fill.get(), align);
let end = start + n_bytes;
if end > self.chunk_size() {
return self.alloc_copy_grow(n_bytes, align);
}
2014-06-09 22:29:36 -05:00
let copy_head = self.copy_head.borrow();
copy_head.fill.set(end);
2014-06-09 22:29:36 -05:00
unsafe {
copy_head.as_ptr().offset(start as isize)
}
}
#[inline]
2014-12-07 10:11:15 -06:00
fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
core: Remove the cast module This commit revisits the `cast` module in libcore and libstd, and scrutinizes all functions inside of it. The result was to remove the `cast` module entirely, folding all functionality into the `mem` module. Specifically, this is the fate of each function in the `cast` module. * transmute - This function was moved to `mem`, but it is now marked as #[unstable]. This is due to planned changes to the `transmute` function and how it can be invoked (see the #[unstable] comment). For more information, see RFC 5 and #12898 * transmute_copy - This function was moved to `mem`, with clarification that is is not an error to invoke it with T/U that are different sizes, but rather that it is strongly discouraged. This function is now #[stable] * forget - This function was moved to `mem` and marked #[stable] * bump_box_refcount - This function was removed due to the deprecation of managed boxes as well as its questionable utility. * transmute_mut - This function was previously deprecated, and removed as part of this commit. * transmute_mut_unsafe - This function doesn't serve much of a purpose when it can be achieved with an `as` in safe code, so it was removed. * transmute_lifetime - This function was removed because it is likely a strong indication that code is incorrect in the first place. * transmute_mut_lifetime - This function was removed for the same reasons as `transmute_lifetime` * copy_lifetime - This function was moved to `mem`, but it is marked `#[unstable]` now due to the likelihood of being removed in the future if it is found to not be very useful. * copy_mut_lifetime - This function was also moved to `mem`, but had the same treatment as `copy_lifetime`. * copy_lifetime_vec - This function was removed because it is not used today, and its existence is not necessary with DST (copy_lifetime will suffice). In summary, the cast module was stripped down to these functions, and then the functions were moved to the `mem` module. transmute - #[unstable] transmute_copy - #[stable] forget - #[stable] copy_lifetime - #[unstable] copy_mut_lifetime - #[unstable] [breaking-change]
2014-05-09 12:34:51 -05:00
let ptr = ptr as *mut T;
ptr::write(&mut (*ptr), op());
return &mut *ptr;
}
}
// Functions for the non-POD part of the arena
fn alloc_noncopy_grow(&self, n_bytes: usize,
align: usize) -> (*const u8, *const u8) {
// Allocate a new chunk.
2014-02-06 01:34:33 -06:00
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
2014-06-09 22:29:36 -05:00
self.chunks.borrow_mut().push(self.head.borrow().clone());
*self.head.borrow_mut() =
2015-01-24 08:39:32 -06:00
chunk((new_min_chunk_size + 1).next_power_of_two(), false);
return self.alloc_noncopy_inner(n_bytes, align);
}
#[inline]
fn alloc_noncopy_inner(&self, n_bytes: usize,
align: usize) -> (*const u8, *const u8) {
2014-06-09 22:29:36 -05:00
// Be careful to not maintain any `head` borrows active, because
// `alloc_noncopy_grow` borrows it mutably.
let (start, end, tydesc_start, head_capacity) = {
let head = self.head.borrow();
let fill = head.fill.get();
let tydesc_start = fill;
2014-06-25 14:47:34 -05:00
let after_tydesc = fill + mem::size_of::<*const TyDesc>();
core: Remove the cast module This commit revisits the `cast` module in libcore and libstd, and scrutinizes all functions inside of it. The result was to remove the `cast` module entirely, folding all functionality into the `mem` module. Specifically, this is the fate of each function in the `cast` module. * transmute - This function was moved to `mem`, but it is now marked as #[unstable]. This is due to planned changes to the `transmute` function and how it can be invoked (see the #[unstable] comment). For more information, see RFC 5 and #12898 * transmute_copy - This function was moved to `mem`, with clarification that is is not an error to invoke it with T/U that are different sizes, but rather that it is strongly discouraged. This function is now #[stable] * forget - This function was moved to `mem` and marked #[stable] * bump_box_refcount - This function was removed due to the deprecation of managed boxes as well as its questionable utility. * transmute_mut - This function was previously deprecated, and removed as part of this commit. * transmute_mut_unsafe - This function doesn't serve much of a purpose when it can be achieved with an `as` in safe code, so it was removed. * transmute_lifetime - This function was removed because it is likely a strong indication that code is incorrect in the first place. * transmute_mut_lifetime - This function was removed for the same reasons as `transmute_lifetime` * copy_lifetime - This function was moved to `mem`, but it is marked `#[unstable]` now due to the likelihood of being removed in the future if it is found to not be very useful. * copy_mut_lifetime - This function was also moved to `mem`, but had the same treatment as `copy_lifetime`. * copy_lifetime_vec - This function was removed because it is not used today, and its existence is not necessary with DST (copy_lifetime will suffice). In summary, the cast module was stripped down to these functions, and then the functions were moved to the `mem` module. transmute - #[unstable] transmute_copy - #[stable] forget - #[stable] copy_lifetime - #[unstable] copy_mut_lifetime - #[unstable] [breaking-change]
2014-05-09 12:34:51 -05:00
let start = round_up(after_tydesc, align);
let end = start + n_bytes;
2013-04-29 17:23:04 -05:00
2014-06-09 22:29:36 -05:00
(start, end, tydesc_start, head.capacity())
};
2013-06-25 21:19:38 -05:00
2014-06-09 22:29:36 -05:00
if end > head_capacity {
return self.alloc_noncopy_grow(n_bytes, align);
}
2014-06-09 22:29:36 -05:00
let head = self.head.borrow();
2014-06-25 14:47:34 -05:00
head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
2014-06-09 22:29:36 -05:00
unsafe {
let buf = head.as_ptr();
return (buf.offset(tydesc_start as isize), buf.offset(start as isize));
}
}
#[inline]
2014-12-07 10:11:15 -06:00
fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
let tydesc = get_tydesc::<T>();
let (ty_ptr, ptr) =
self.alloc_noncopy_inner(mem::size_of::<T>(),
mem::min_align_of::<T>());
let ty_ptr = ty_ptr as *mut usize;
core: Remove the cast module This commit revisits the `cast` module in libcore and libstd, and scrutinizes all functions inside of it. The result was to remove the `cast` module entirely, folding all functionality into the `mem` module. Specifically, this is the fate of each function in the `cast` module. * transmute - This function was moved to `mem`, but it is now marked as #[unstable]. This is due to planned changes to the `transmute` function and how it can be invoked (see the #[unstable] comment). For more information, see RFC 5 and #12898 * transmute_copy - This function was moved to `mem`, with clarification that is is not an error to invoke it with T/U that are different sizes, but rather that it is strongly discouraged. This function is now #[stable] * forget - This function was moved to `mem` and marked #[stable] * bump_box_refcount - This function was removed due to the deprecation of managed boxes as well as its questionable utility. * transmute_mut - This function was previously deprecated, and removed as part of this commit. * transmute_mut_unsafe - This function doesn't serve much of a purpose when it can be achieved with an `as` in safe code, so it was removed. * transmute_lifetime - This function was removed because it is likely a strong indication that code is incorrect in the first place. * transmute_mut_lifetime - This function was removed for the same reasons as `transmute_lifetime` * copy_lifetime - This function was moved to `mem`, but it is marked `#[unstable]` now due to the likelihood of being removed in the future if it is found to not be very useful. * copy_mut_lifetime - This function was also moved to `mem`, but had the same treatment as `copy_lifetime`. * copy_lifetime_vec - This function was removed because it is not used today, and its existence is not necessary with DST (copy_lifetime will suffice). In summary, the cast module was stripped down to these functions, and then the functions were moved to the `mem` module. transmute - #[unstable] transmute_copy - #[stable] forget - #[stable] copy_lifetime - #[unstable] copy_mut_lifetime - #[unstable] [breaking-change]
2014-05-09 12:34:51 -05:00
let ptr = ptr as *mut T;
// Write in our tydesc along with a bit indicating that it
// has *not* been initialized yet.
core: Remove the cast module This commit revisits the `cast` module in libcore and libstd, and scrutinizes all functions inside of it. The result was to remove the `cast` module entirely, folding all functionality into the `mem` module. Specifically, this is the fate of each function in the `cast` module. * transmute - This function was moved to `mem`, but it is now marked as #[unstable]. This is due to planned changes to the `transmute` function and how it can be invoked (see the #[unstable] comment). For more information, see RFC 5 and #12898 * transmute_copy - This function was moved to `mem`, with clarification that is is not an error to invoke it with T/U that are different sizes, but rather that it is strongly discouraged. This function is now #[stable] * forget - This function was moved to `mem` and marked #[stable] * bump_box_refcount - This function was removed due to the deprecation of managed boxes as well as its questionable utility. * transmute_mut - This function was previously deprecated, and removed as part of this commit. * transmute_mut_unsafe - This function doesn't serve much of a purpose when it can be achieved with an `as` in safe code, so it was removed. * transmute_lifetime - This function was removed because it is likely a strong indication that code is incorrect in the first place. * transmute_mut_lifetime - This function was removed for the same reasons as `transmute_lifetime` * copy_lifetime - This function was moved to `mem`, but it is marked `#[unstable]` now due to the likelihood of being removed in the future if it is found to not be very useful. * copy_mut_lifetime - This function was also moved to `mem`, but had the same treatment as `copy_lifetime`. * copy_lifetime_vec - This function was removed because it is not used today, and its existence is not necessary with DST (copy_lifetime will suffice). In summary, the cast module was stripped down to these functions, and then the functions were moved to the `mem` module. transmute - #[unstable] transmute_copy - #[stable] forget - #[stable] copy_lifetime - #[unstable] copy_mut_lifetime - #[unstable] [breaking-change]
2014-05-09 12:34:51 -05:00
*ty_ptr = mem::transmute(tydesc);
// Actually initialize it
ptr::write(&mut(*ptr), op());
// Now that we are done, update the tydesc to indicate that
// the object is there.
*ty_ptr = bitpack_tydesc_ptr(tydesc, true);
return &mut *ptr;
}
}
2014-08-04 05:48:39 -05:00
/// Allocates a new item in the arena, using `op` to initialize the value,
/// and returns a reference to it.
#[inline]
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
if intrinsics::needs_drop::<T>() {
2014-06-09 22:29:36 -05:00
self.alloc_noncopy(op)
} else {
2014-06-09 22:29:36 -05:00
self.alloc_copy(op)
}
}
}
}
2012-03-20 21:06:04 -05:00
#[test]
fn test_arena_destructors() {
let arena = Arena::new();
2015-01-24 08:39:32 -06:00
for i in 0..10 {
// Arena allocate something with drop glue to make sure it
// doesn't leak.
2014-04-02 22:06:55 -05:00
arena.alloc(|| Rc::new(i));
// Allocate something with funny size and alignment, to keep
// things interesting.
arena.alloc(|| [0u8, 1u8, 2u8]);
}
}
2013-04-29 17:23:04 -05:00
#[test]
#[should_panic]
fn test_arena_destructors_fail() {
let arena = Arena::new();
// Put some stuff in the arena.
2015-01-24 08:39:32 -06:00
for i in 0..10 {
// Arena allocate something with drop glue to make sure it
// doesn't leak.
2014-04-02 22:06:55 -05:00
arena.alloc(|| { Rc::new(i) });
// Allocate something with funny size and alignment, to keep
// things interesting.
arena.alloc(|| { [0u8, 1, 2] });
}
// Now, panic while allocating
arena.alloc::<Rc<i32>, _>(|| {
panic!();
});
}
2014-04-04 06:57:39 -05:00
/// A faster arena that can hold objects of only one type.
pub struct TypedArena<T> {
/// A pointer to the next object to be allocated.
2014-06-25 14:47:34 -05:00
ptr: Cell<*const T>,
/// A pointer to the end of the allocated area. When this pointer is
/// reached, a new chunk is allocated.
2014-06-25 14:47:34 -05:00
end: Cell<*const T>,
/// A pointer to the first arena segment.
first: RefCell<*mut TypedArenaChunk<T>>,
/// Marker indicating that dropping the arena causes its owned
/// instances of `T` to be dropped.
_own: marker::PhantomData<T>,
}
struct TypedArenaChunk<T> {
marker: marker::PhantomData<T>,
/// Pointer to the next arena segment.
next: *mut TypedArenaChunk<T>,
/// The number of elements that this chunk can hold.
capacity: usize,
// Objects follow here, suitably aligned.
}
fn calculate_size<T>(capacity: usize) -> usize {
let mut size = mem::size_of::<TypedArenaChunk<T>>();
size = round_up(size, mem::min_align_of::<T>());
let elem_size = mem::size_of::<T>();
let elems_size = elem_size.checked_mul(capacity).unwrap();
size = size.checked_add(elems_size).unwrap();
size
}
impl<T> TypedArenaChunk<T> {
#[inline]
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
-> *mut TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
if chunk.is_null() { alloc::oom() }
(*chunk).next = next;
(*chunk).capacity = capacity;
chunk
}
/// Destroys this arena chunk. If the type descriptor is supplied, the
/// drop glue is called; otherwise, drop glue is not called.
#[inline]
unsafe fn destroy(&mut self, len: usize) {
// Destroy all the allocated objects.
if intrinsics::needs_drop::<T>() {
let mut start = self.start();
for _ in 0..len {
2014-06-25 14:47:34 -05:00
ptr::read(start as *const T); // run the destructor on the pointer
start = start.offset(mem::size_of::<T>() as isize)
}
}
// Destroy the next chunk.
let next = self.next;
let size = calculate_size::<T>(self.capacity);
Add trivial cast lints. This permits all coercions to be performed in casts, but adds lints to warn in those cases. Part of this patch moves cast checking to a later stage of type checking. We acquire obligations to check casts as part of type checking where we previously checked them. Once we have type checked a function or module, then we check any cast obligations which have been acquired. That means we have more type information available to check casts (this was crucial to making coercions work properly in place of some casts), but it means that casts cannot feed input into type inference. [breaking change] * Adds two new lints for trivial casts and trivial numeric casts, these are warn by default, but can cause errors if you build with warnings as errors. Previously, trivial numeric casts and casts to trait objects were allowed. * The unused casts lint has gone. * Interactions between casting and type inference have changed in subtle ways. Two ways this might manifest are: - You may need to 'direct' casts more with extra type information, for example, in some cases where `foo as _ as T` succeeded, you may now need to specify the type for `_` - Casts do not influence inference of integer types. E.g., the following used to type check: ``` let x = 42; let y = &x as *const u32; ``` Because the cast would inform inference that `x` must have type `u32`. This no longer applies and the compiler will fallback to `i32` for `x` and thus there will be a type error in the cast. The solution is to add more type information: ``` let x: u32 = 42; let y = &x as *const u32; ```
2015-03-19 23:15:27 -05:00
let self_ptr: *mut TypedArenaChunk<T> = self;
deallocate(self_ptr as *mut u8, size,
mem::min_align_of::<TypedArenaChunk<T>>());
2014-12-29 18:38:07 -06:00
if !next.is_null() {
let capacity = (*next).capacity;
(*next).destroy(capacity);
}
}
// Returns a pointer to the first allocated object.
#[inline]
2014-06-25 14:47:34 -05:00
fn start(&self) -> *const u8 {
let this: *const TypedArenaChunk<T> = self;
unsafe {
mem::transmute(round_up(this.offset(1) as usize,
mem::min_align_of::<T>()))
}
}
// Returns a pointer to the end of the allocated space.
#[inline]
2014-06-25 14:47:34 -05:00
fn end(&self) -> *const u8 {
unsafe {
let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
self.start().offset(size as isize)
}
}
}
impl<T> TypedArena<T> {
2014-08-04 05:48:39 -05:00
/// Creates a new `TypedArena` with preallocated space for eight objects.
#[inline]
pub fn new() -> TypedArena<T> {
TypedArena::with_capacity(8)
}
2014-08-04 05:48:39 -05:00
/// Creates a new `TypedArena` with preallocated space for the given number of
/// objects.
#[inline]
pub fn with_capacity(capacity: usize) -> TypedArena<T> {
unsafe {
2014-09-14 22:27:36 -05:00
let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
TypedArena {
ptr: Cell::new((*chunk).start() as *const T),
end: Cell::new((*chunk).end() as *const T),
first: RefCell::new(chunk),
_own: marker::PhantomData,
}
}
}
2014-08-04 05:48:39 -05:00
/// Allocates an object in the `TypedArena`, returning a reference to it.
#[inline]
pub fn alloc(&self, object: T) -> &mut T {
if self.ptr == self.end {
self.grow()
}
let ptr: &mut T = unsafe {
let ptr: &mut T = mem::transmute(self.ptr.clone());
ptr::write(ptr, object);
self.ptr.set(self.ptr.get().offset(1));
ptr
};
ptr
}
/// Grows the arena.
#[inline(never)]
fn grow(&self) {
unsafe {
let chunk = *self.first.borrow_mut();
let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
self.ptr.set((*chunk).start() as *const T);
self.end.set((*chunk).end() as *const T);
*self.first.borrow_mut() = chunk
}
}
}
impl<T> Drop for TypedArena<T> {
fn drop(&mut self) {
unsafe {
// Determine how much was filled.
let start = self.first.borrow().as_ref().unwrap().start() as usize;
let end = self.ptr.get() as usize;
let diff = (end - start) / mem::size_of::<T>();
// Pass that to the `destroy` method.
(**self.first.borrow_mut()).destroy(diff)
}
}
}
#[cfg(test)]
2014-02-13 19:49:11 -06:00
mod tests {
extern crate test;
use self::test::Bencher;
use super::{Arena, TypedArena};
2014-09-09 04:32:58 -05:00
#[allow(dead_code)]
struct Point {
x: i32,
y: i32,
z: i32,
}
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: u8 }
struct Outer<'a> { inner: &'a Inner }
enum EI<'e> { I(Inner), O(Outer<'e>) }
struct Wrap<'a>(TypedArena<EI<'a>>);
impl<'a> Wrap<'a> {
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
let r: &EI = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
} else {
panic!("mismatch");
}
}
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
let r: &EI = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
} else {
panic!("mismatch");
}
}
}
let arena = Wrap(TypedArena::new());
let result = arena.alloc_outer(|| Outer {
inner: arena.alloc_inner(|| Inner { value: 10 }) });
assert_eq!(result.inner.value, 10);
}
#[test]
pub fn test_copy() {
let arena = TypedArena::new();
2015-01-24 08:39:32 -06:00
for _ in 0..100000 {
arena.alloc(Point {
x: 1,
y: 2,
z: 3,
});
}
}
#[bench]
pub fn bench_copy(b: &mut Bencher) {
let arena = TypedArena::new();
b.iter(|| {
arena.alloc(Point {
x: 1,
y: 2,
z: 3,
})
})
}
#[bench]
pub fn bench_copy_nonarena(b: &mut Bencher) {
b.iter(|| {
let _: Box<_> = box Point {
x: 1,
y: 2,
z: 3,
};
})
}
#[bench]
pub fn bench_copy_old_arena(b: &mut Bencher) {
let arena = Arena::new();
b.iter(|| {
arena.alloc(|| {
Point {
x: 1,
y: 2,
z: 3,
}
})
})
}
2014-09-09 04:32:58 -05:00
#[allow(dead_code)]
struct Noncopy {
string: String,
array: Vec<i32>,
}
#[test]
pub fn test_noncopy() {
let arena = TypedArena::new();
2015-01-24 08:39:32 -06:00
for _ in 0..100000 {
arena.alloc(Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
});
}
}
#[bench]
pub fn bench_noncopy(b: &mut Bencher) {
let arena = TypedArena::new();
b.iter(|| {
arena.alloc(Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
})
})
}
#[bench]
pub fn bench_noncopy_nonarena(b: &mut Bencher) {
b.iter(|| {
let _: Box<_> = box Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
};
})
}
#[bench]
pub fn bench_noncopy_old_arena(b: &mut Bencher) {
let arena = Arena::new();
b.iter(|| {
arena.alloc(|| Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
})
})
}
}