2014-04-02 22:06:55 -05:00
|
|
|
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
2012-12-03 18:48:01 -06:00
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
2014-08-04 05:48:39 -05:00
|
|
|
|
2014-01-06 19:03:30 -06:00
|
|
|
//! The arena, a fast but limited type of allocator.
|
|
|
|
//!
|
|
|
|
//! Arenas are a type of allocator that destroy the objects within, all at
|
|
|
|
//! once, once the arena itself is destroyed. They do not support deallocation
|
|
|
|
//! of individual objects while the arena itself is still alive. The benefit
|
|
|
|
//! of an arena is very fast allocation; just a pointer bump.
|
2014-04-04 06:57:39 -05:00
|
|
|
//!
|
2014-08-04 05:48:39 -05:00
|
|
|
//! This crate has two arenas implemented: `TypedArena`, which is a simpler
|
|
|
|
//! arena but can only hold objects of a single type, and `Arena`, which is a
|
|
|
|
//! more complex, slower arena which can hold objects of any type.
|
2012-08-21 17:32:30 -05:00
|
|
|
|
2015-03-05 10:53:51 -06:00
|
|
|
// Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
|
|
|
|
#![cfg_attr(stage0, feature(custom_attribute))]
|
2014-07-01 09:12:04 -05:00
|
|
|
#![crate_name = "arena"]
|
2015-01-22 20:22:03 -06:00
|
|
|
#![unstable(feature = "rustc_private")]
|
Preliminary feature staging
This partially implements the feature staging described in the
[release channel RFC][rc]. It does not yet fully conform to the RFC as
written, but does accomplish its goals sufficiently for the 1.0 alpha
release.
It has three primary user-visible effects:
* On the nightly channel, use of unstable APIs generates a warning.
* On the beta channel, use of unstable APIs generates a warning.
* On the beta channel, use of feature gates generates a warning.
Code that does not trigger these warnings is considered 'stable',
modulo pre-1.0 bugs.
Disabling the warnings for unstable APIs continues to be done in the
existing (i.e. old) style, via `#[allow(...)]`, not that specified in
the RFC. I deem this marginally acceptable since any code that must do
this is not using the stable dialect of Rust.
Use of feature gates is itself gated with the new 'unstable_features'
lint, on nightly set to 'allow', and on beta 'warn'.
The attribute scheme used here corresponds to an older version of the
RFC, with the `#[staged_api]` crate attribute toggling the staging
behavior of the stability attributes, but the user impact is only
in-tree so I'm not concerned about having to make design changes later
(and I may ultimately prefer the scheme here after all, with the
`#[staged_api]` crate attribute).
Since the Rust codebase itself makes use of unstable features the
compiler and build system to a midly elaborate dance to allow it to
bootstrap while disobeying these lints (which would otherwise be
errors because Rust builds with `-D warnings`).
This patch includes one significant hack that causes a
regression. Because the `format_args!` macro emits calls to unstable
APIs it would trigger the lint. I added a hack to the lint to make it
not trigger, but this in turn causes arguments to `println!` not to be
checked for feature gates. I don't presently understand macro
expansion well enough to fix. This is bug #20661.
Closes #16678
[rc]: https://github.com/rust-lang/rfcs/blob/master/text/0507-release-channels.md
2015-01-06 08:26:08 -06:00
|
|
|
#![staged_api]
|
2014-03-21 20:05:05 -05:00
|
|
|
#![crate_type = "rlib"]
|
|
|
|
#![crate_type = "dylib"]
|
|
|
|
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
|
2015-05-15 18:04:01 -05:00
|
|
|
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
|
2014-10-09 12:47:22 -05:00
|
|
|
html_root_url = "http://doc.rust-lang.org/nightly/")]
|
2014-06-17 18:00:04 -05:00
|
|
|
|
2015-01-22 20:22:03 -06:00
|
|
|
#![feature(alloc)]
|
2015-01-30 14:26:44 -06:00
|
|
|
#![feature(box_syntax)]
|
2015-06-09 13:18:03 -05:00
|
|
|
#![feature(core_intrinsics)]
|
|
|
|
#![feature(ptr_as_ref)]
|
|
|
|
#![feature(raw)]
|
2015-01-30 14:26:44 -06:00
|
|
|
#![feature(staged_api)]
|
|
|
|
#![feature(unboxed_closures)]
|
2015-01-22 14:33:46 -06:00
|
|
|
#![cfg_attr(test, feature(test))]
|
2014-03-27 17:13:16 -05:00
|
|
|
|
2014-10-28 16:06:06 -05:00
|
|
|
extern crate alloc;
|
|
|
|
|
2013-12-30 19:32:53 -06:00
|
|
|
use std::cell::{Cell, RefCell};
|
2014-02-06 01:34:33 -06:00
|
|
|
use std::cmp;
|
2014-05-05 20:56:44 -05:00
|
|
|
use std::intrinsics;
|
2015-01-21 13:02:52 -06:00
|
|
|
use std::marker;
|
2014-05-05 20:56:44 -05:00
|
|
|
use std::mem;
|
2014-05-29 19:40:18 -05:00
|
|
|
use std::ptr;
|
2014-01-31 22:53:11 -06:00
|
|
|
use std::rc::Rc;
|
2014-09-05 08:08:30 -05:00
|
|
|
use std::rt::heap::{allocate, deallocate};
|
2013-03-05 14:21:02 -06:00
|
|
|
|
2012-08-21 17:32:30 -05:00
|
|
|
// The way arena uses arrays is really deeply awful. The arrays are
|
|
|
|
// allocated, and have capacities reserved, but the fill for the array
|
|
|
|
// will always stay at 0.
|
2015-01-03 21:54:18 -06:00
|
|
|
#[derive(Clone, PartialEq)]
|
2013-01-22 10:44:24 -06:00
|
|
|
struct Chunk {
|
2014-09-05 08:08:30 -05:00
|
|
|
data: Rc<RefCell<Vec<u8>>>,
|
2015-02-09 01:00:46 -06:00
|
|
|
fill: Cell<usize>,
|
2014-03-26 18:01:11 -05:00
|
|
|
is_copy: Cell<bool>,
|
2013-01-22 10:44:24 -06:00
|
|
|
}
|
2014-09-05 08:08:30 -05:00
|
|
|
|
2014-01-31 22:53:11 -06:00
|
|
|
impl Chunk {
|
2015-02-09 01:00:46 -06:00
|
|
|
fn capacity(&self) -> usize {
|
2014-03-20 17:05:56 -05:00
|
|
|
self.data.borrow().capacity()
|
2014-01-31 22:53:11 -06:00
|
|
|
}
|
|
|
|
|
2014-06-25 14:47:34 -05:00
|
|
|
unsafe fn as_ptr(&self) -> *const u8 {
|
2014-03-20 17:05:56 -05:00
|
|
|
self.data.borrow().as_ptr()
|
2014-01-31 22:53:11 -06:00
|
|
|
}
|
|
|
|
}
|
2012-07-11 17:00:40 -05:00
|
|
|
|
2014-04-04 06:57:39 -05:00
|
|
|
/// A slower reflection-based arena that can allocate objects of any type.
|
|
|
|
///
|
2014-08-04 05:48:39 -05:00
|
|
|
/// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
|
2014-04-04 06:57:39 -05:00
|
|
|
/// each allocated object, the arena stores a pointer to the type descriptor
|
2014-08-04 05:48:39 -05:00
|
|
|
/// followed by the object (potentially with alignment padding after each
|
|
|
|
/// element). When the arena is destroyed, it iterates through all of its
|
2014-04-04 06:57:39 -05:00
|
|
|
/// chunks, and uses the tydesc information to trace through the objects,
|
2014-08-04 05:48:39 -05:00
|
|
|
/// calling the destructors on them. One subtle point that needs to be
|
2014-10-09 14:17:22 -05:00
|
|
|
/// addressed is how to handle panics while running the user provided
|
2014-04-04 06:57:39 -05:00
|
|
|
/// initializer function. It is important to not run the destructor on
|
|
|
|
/// uninitialized objects, but how to detect them is somewhat subtle. Since
|
2014-08-04 05:48:39 -05:00
|
|
|
/// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
|
2014-04-04 06:57:39 -05:00
|
|
|
/// the most recent object. To solve this without requiring extra space, we
|
|
|
|
/// use the low order bit of the tydesc pointer to encode whether the object
|
|
|
|
/// it describes has been fully initialized.
|
|
|
|
///
|
2014-08-04 05:48:39 -05:00
|
|
|
/// As an optimization, objects with destructors are stored in different chunks
|
|
|
|
/// than objects without destructors. This reduces overhead when initializing
|
|
|
|
/// plain-old-data (`Copy` types) and means we don't need to waste time running
|
|
|
|
/// their destructors.
|
2015-02-02 07:10:36 -06:00
|
|
|
pub struct Arena<'longer_than_self> {
|
2013-05-03 17:57:18 -05:00
|
|
|
// The head is separated out from the list as a unbenchmarked
|
2014-04-04 06:57:39 -05:00
|
|
|
// microoptimization, to avoid needing to case on the list to access the
|
|
|
|
// head.
|
2014-06-09 22:29:36 -05:00
|
|
|
head: RefCell<Chunk>,
|
|
|
|
copy_head: RefCell<Chunk>,
|
2014-03-27 17:13:16 -05:00
|
|
|
chunks: RefCell<Vec<Chunk>>,
|
2015-02-12 09:39:32 -06:00
|
|
|
_marker: marker::PhantomData<*mut &'longer_than_self()>,
|
2012-11-13 20:38:18 -06:00
|
|
|
}
|
|
|
|
|
2015-02-02 07:10:36 -06:00
|
|
|
impl<'a> Arena<'a> {
|
2014-08-04 05:48:39 -05:00
|
|
|
/// Allocates a new Arena with 32 bytes preallocated.
|
2015-02-02 07:10:36 -06:00
|
|
|
pub fn new() -> Arena<'a> {
|
2015-01-24 08:39:32 -06:00
|
|
|
Arena::new_with_size(32)
|
2013-08-05 03:43:40 -05:00
|
|
|
}
|
|
|
|
|
2014-08-04 05:48:39 -05:00
|
|
|
/// Allocates a new Arena with `initial_size` bytes preallocated.
|
2015-02-02 07:10:36 -06:00
|
|
|
pub fn new_with_size(initial_size: usize) -> Arena<'a> {
|
2013-08-05 03:43:40 -05:00
|
|
|
Arena {
|
2014-06-09 22:29:36 -05:00
|
|
|
head: RefCell::new(chunk(initial_size, false)),
|
|
|
|
copy_head: RefCell::new(chunk(initial_size, true)),
|
2014-03-18 23:31:40 -05:00
|
|
|
chunks: RefCell::new(Vec::new()),
|
2015-02-12 09:39:32 -06:00
|
|
|
_marker: marker::PhantomData,
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
|
|
|
}
|
2012-07-11 17:00:40 -05:00
|
|
|
}
|
2012-03-20 21:06:04 -05:00
|
|
|
|
2015-02-09 01:00:46 -06:00
|
|
|
fn chunk(size: usize, is_copy: bool) -> Chunk {
|
2013-01-22 10:44:24 -06:00
|
|
|
Chunk {
|
2014-03-05 17:28:08 -06:00
|
|
|
data: Rc::new(RefCell::new(Vec::with_capacity(size))),
|
2015-01-24 08:39:32 -06:00
|
|
|
fill: Cell::new(0),
|
2014-03-26 18:01:11 -05:00
|
|
|
is_copy: Cell::new(is_copy),
|
2013-01-22 10:44:24 -06:00
|
|
|
}
|
2012-03-20 21:06:04 -05:00
|
|
|
}
|
|
|
|
|
2015-02-02 07:10:36 -06:00
|
|
|
impl<'longer_than_self> Drop for Arena<'longer_than_self> {
|
2013-09-16 20:18:07 -05:00
|
|
|
fn drop(&mut self) {
|
2013-08-05 03:43:40 -05:00
|
|
|
unsafe {
|
2014-06-09 22:29:36 -05:00
|
|
|
destroy_chunk(&*self.head.borrow());
|
2015-06-11 07:56:07 -05:00
|
|
|
for chunk in self.chunks.borrow().iter() {
|
2014-03-26 18:01:11 -05:00
|
|
|
if !chunk.is_copy.get() {
|
2013-08-05 03:43:40 -05:00
|
|
|
destroy_chunk(chunk);
|
|
|
|
}
|
2014-02-24 20:18:19 -06:00
|
|
|
}
|
2013-08-05 03:43:40 -05:00
|
|
|
}
|
2013-01-22 10:44:24 -06:00
|
|
|
}
|
2012-03-20 21:06:04 -05:00
|
|
|
}
|
|
|
|
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
fn round_up(base: usize, align: usize) -> usize {
|
2014-11-09 07:11:28 -06:00
|
|
|
(base.checked_add(align - 1)).unwrap() & !(align - 1)
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Walk down a chunk, running the destructors for any objects stored
|
|
|
|
// in it.
|
2012-09-19 20:14:30 -05:00
|
|
|
unsafe fn destroy_chunk(chunk: &Chunk) {
|
2012-08-21 17:32:30 -05:00
|
|
|
let mut idx = 0;
|
2014-01-31 22:53:11 -06:00
|
|
|
let buf = chunk.as_ptr();
|
2013-12-30 19:32:53 -06:00
|
|
|
let fill = chunk.fill.get();
|
2012-08-21 17:32:30 -05:00
|
|
|
|
|
|
|
while idx < fill {
|
2015-02-09 01:00:46 -06:00
|
|
|
let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize));
|
2012-08-21 17:32:30 -05:00
|
|
|
let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
|
2013-06-04 23:43:41 -05:00
|
|
|
let (size, align) = ((*tydesc).size, (*tydesc).align);
|
2012-08-21 17:32:30 -05:00
|
|
|
|
2014-06-25 14:47:34 -05:00
|
|
|
let after_tydesc = idx + mem::size_of::<*const TyDesc>();
|
2012-08-21 17:32:30 -05:00
|
|
|
|
2014-01-06 19:03:30 -06:00
|
|
|
let start = round_up(after_tydesc, align);
|
2012-08-21 17:32:30 -05:00
|
|
|
|
2013-10-21 15:08:31 -05:00
|
|
|
//debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
|
2012-08-21 17:32:30 -05:00
|
|
|
// start, size, align, is_done);
|
|
|
|
if is_done {
|
2015-02-09 01:00:46 -06:00
|
|
|
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Find where the next tydesc lives
|
2014-06-25 14:47:34 -05:00
|
|
|
idx = round_up(start + size, mem::align_of::<*const TyDesc>());
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
2012-08-02 18:00:45 -05:00
|
|
|
}
|
|
|
|
|
2012-08-21 17:32:30 -05:00
|
|
|
// We encode whether the object a tydesc describes has been
|
|
|
|
// initialized in the arena in the low bit of the tydesc pointer. This
|
2014-10-09 14:17:22 -05:00
|
|
|
// is necessary in order to properly do cleanup if a panic occurs
|
2012-08-21 17:32:30 -05:00
|
|
|
// during an initializer.
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
|
|
|
|
p as usize | (is_done as usize)
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
|
2014-06-25 14:47:34 -05:00
|
|
|
((p & !1) as *const TyDesc, p & 1 == 1)
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
|
|
|
|
2015-03-14 21:01:57 -05:00
|
|
|
// HACK(eddyb) TyDesc replacement using a trait object vtable.
|
|
|
|
// This could be replaced in the future with a custom DST layout,
|
|
|
|
// or `&'static (drop_glue, size, align)` created by a `const fn`.
|
|
|
|
struct TyDesc {
|
|
|
|
drop_glue: fn(*const i8),
|
|
|
|
size: usize,
|
|
|
|
align: usize
|
|
|
|
}
|
|
|
|
|
2015-03-31 18:58:15 -05:00
|
|
|
trait AllTypes { fn dummy(&self) { } }
|
|
|
|
impl<T:?Sized> AllTypes for T { }
|
|
|
|
|
2015-03-14 21:01:57 -05:00
|
|
|
unsafe fn get_tydesc<T>() -> *const TyDesc {
|
|
|
|
use std::raw::TraitObject;
|
|
|
|
|
|
|
|
let ptr = &*(1 as *const T);
|
|
|
|
|
|
|
|
// Can use any trait that is implemented for all types.
|
2015-03-31 18:58:15 -05:00
|
|
|
let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
|
2015-03-14 21:01:57 -05:00
|
|
|
obj.vtable as *const TyDesc
|
|
|
|
}
|
|
|
|
|
2015-02-02 07:10:36 -06:00
|
|
|
impl<'longer_than_self> Arena<'longer_than_self> {
|
2015-02-09 01:00:46 -06:00
|
|
|
fn chunk_size(&self) -> usize {
|
2014-06-09 22:29:36 -05:00
|
|
|
self.copy_head.borrow().capacity()
|
2014-01-31 22:53:11 -06:00
|
|
|
}
|
2014-06-09 22:29:36 -05:00
|
|
|
|
2012-10-05 16:58:42 -05:00
|
|
|
// Functions for the POD part of the arena
|
2015-02-09 01:00:46 -06:00
|
|
|
fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
|
2012-10-05 16:58:42 -05:00
|
|
|
// Allocate a new chunk.
|
2014-02-06 01:34:33 -06:00
|
|
|
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
|
2014-06-09 22:29:36 -05:00
|
|
|
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
|
|
|
|
|
|
|
|
*self.copy_head.borrow_mut() =
|
2015-01-24 08:39:32 -06:00
|
|
|
chunk((new_min_chunk_size + 1).next_power_of_two(), true);
|
2012-10-05 16:58:42 -05:00
|
|
|
|
2014-03-26 18:01:11 -05:00
|
|
|
return self.alloc_copy_inner(n_bytes, align);
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
|
|
|
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
|
2014-06-09 22:29:36 -05:00
|
|
|
let start = round_up(self.copy_head.borrow().fill.get(), align);
|
|
|
|
|
|
|
|
let end = start + n_bytes;
|
|
|
|
if end > self.chunk_size() {
|
|
|
|
return self.alloc_copy_grow(n_bytes, align);
|
|
|
|
}
|
2012-10-05 16:58:42 -05:00
|
|
|
|
2014-06-09 22:29:36 -05:00
|
|
|
let copy_head = self.copy_head.borrow();
|
|
|
|
copy_head.fill.set(end);
|
2012-10-05 16:58:42 -05:00
|
|
|
|
2014-06-09 22:29:36 -05:00
|
|
|
unsafe {
|
2015-02-09 01:00:46 -06:00
|
|
|
copy_head.as_ptr().offset(start as isize)
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2014-12-07 10:11:15 -06:00
|
|
|
fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
|
2012-10-05 16:58:42 -05:00
|
|
|
unsafe {
|
2014-05-17 02:56:00 -05:00
|
|
|
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
|
|
|
|
mem::min_align_of::<T>());
|
core: Remove the cast module
This commit revisits the `cast` module in libcore and libstd, and scrutinizes
all functions inside of it. The result was to remove the `cast` module entirely,
folding all functionality into the `mem` module. Specifically, this is the fate
of each function in the `cast` module.
* transmute - This function was moved to `mem`, but it is now marked as
#[unstable]. This is due to planned changes to the `transmute`
function and how it can be invoked (see the #[unstable] comment).
For more information, see RFC 5 and #12898
* transmute_copy - This function was moved to `mem`, with clarification that is
is not an error to invoke it with T/U that are different
sizes, but rather that it is strongly discouraged. This
function is now #[stable]
* forget - This function was moved to `mem` and marked #[stable]
* bump_box_refcount - This function was removed due to the deprecation of
managed boxes as well as its questionable utility.
* transmute_mut - This function was previously deprecated, and removed as part
of this commit.
* transmute_mut_unsafe - This function doesn't serve much of a purpose when it
can be achieved with an `as` in safe code, so it was
removed.
* transmute_lifetime - This function was removed because it is likely a strong
indication that code is incorrect in the first place.
* transmute_mut_lifetime - This function was removed for the same reasons as
`transmute_lifetime`
* copy_lifetime - This function was moved to `mem`, but it is marked
`#[unstable]` now due to the likelihood of being removed in
the future if it is found to not be very useful.
* copy_mut_lifetime - This function was also moved to `mem`, but had the same
treatment as `copy_lifetime`.
* copy_lifetime_vec - This function was removed because it is not used today,
and its existence is not necessary with DST
(copy_lifetime will suffice).
In summary, the cast module was stripped down to these functions, and then the
functions were moved to the `mem` module.
transmute - #[unstable]
transmute_copy - #[stable]
forget - #[stable]
copy_lifetime - #[unstable]
copy_mut_lifetime - #[unstable]
[breaking-change]
2014-05-09 12:34:51 -05:00
|
|
|
let ptr = ptr as *mut T;
|
2014-05-29 19:40:18 -05:00
|
|
|
ptr::write(&mut (*ptr), op());
|
2014-10-27 15:31:41 -05:00
|
|
|
return &mut *ptr;
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Functions for the non-POD part of the arena
|
2015-02-09 01:00:46 -06:00
|
|
|
fn alloc_noncopy_grow(&self, n_bytes: usize,
|
|
|
|
align: usize) -> (*const u8, *const u8) {
|
2012-10-05 16:58:42 -05:00
|
|
|
// Allocate a new chunk.
|
2014-02-06 01:34:33 -06:00
|
|
|
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
|
2014-06-09 22:29:36 -05:00
|
|
|
self.chunks.borrow_mut().push(self.head.borrow().clone());
|
|
|
|
|
|
|
|
*self.head.borrow_mut() =
|
2015-01-24 08:39:32 -06:00
|
|
|
chunk((new_min_chunk_size + 1).next_power_of_two(), false);
|
2012-10-05 16:58:42 -05:00
|
|
|
|
2014-03-26 18:01:11 -05:00
|
|
|
return self.alloc_noncopy_inner(n_bytes, align);
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
|
|
|
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
fn alloc_noncopy_inner(&self, n_bytes: usize,
|
|
|
|
align: usize) -> (*const u8, *const u8) {
|
2014-06-09 22:29:36 -05:00
|
|
|
// Be careful to not maintain any `head` borrows active, because
|
|
|
|
// `alloc_noncopy_grow` borrows it mutably.
|
|
|
|
let (start, end, tydesc_start, head_capacity) = {
|
|
|
|
let head = self.head.borrow();
|
|
|
|
let fill = head.fill.get();
|
|
|
|
|
|
|
|
let tydesc_start = fill;
|
2014-06-25 14:47:34 -05:00
|
|
|
let after_tydesc = fill + mem::size_of::<*const TyDesc>();
|
core: Remove the cast module
This commit revisits the `cast` module in libcore and libstd, and scrutinizes
all functions inside of it. The result was to remove the `cast` module entirely,
folding all functionality into the `mem` module. Specifically, this is the fate
of each function in the `cast` module.
* transmute - This function was moved to `mem`, but it is now marked as
#[unstable]. This is due to planned changes to the `transmute`
function and how it can be invoked (see the #[unstable] comment).
For more information, see RFC 5 and #12898
* transmute_copy - This function was moved to `mem`, with clarification that is
is not an error to invoke it with T/U that are different
sizes, but rather that it is strongly discouraged. This
function is now #[stable]
* forget - This function was moved to `mem` and marked #[stable]
* bump_box_refcount - This function was removed due to the deprecation of
managed boxes as well as its questionable utility.
* transmute_mut - This function was previously deprecated, and removed as part
of this commit.
* transmute_mut_unsafe - This function doesn't serve much of a purpose when it
can be achieved with an `as` in safe code, so it was
removed.
* transmute_lifetime - This function was removed because it is likely a strong
indication that code is incorrect in the first place.
* transmute_mut_lifetime - This function was removed for the same reasons as
`transmute_lifetime`
* copy_lifetime - This function was moved to `mem`, but it is marked
`#[unstable]` now due to the likelihood of being removed in
the future if it is found to not be very useful.
* copy_mut_lifetime - This function was also moved to `mem`, but had the same
treatment as `copy_lifetime`.
* copy_lifetime_vec - This function was removed because it is not used today,
and its existence is not necessary with DST
(copy_lifetime will suffice).
In summary, the cast module was stripped down to these functions, and then the
functions were moved to the `mem` module.
transmute - #[unstable]
transmute_copy - #[stable]
forget - #[stable]
copy_lifetime - #[unstable]
copy_mut_lifetime - #[unstable]
[breaking-change]
2014-05-09 12:34:51 -05:00
|
|
|
let start = round_up(after_tydesc, align);
|
|
|
|
let end = start + n_bytes;
|
2013-04-29 17:23:04 -05:00
|
|
|
|
2014-06-09 22:29:36 -05:00
|
|
|
(start, end, tydesc_start, head.capacity())
|
|
|
|
};
|
2013-06-25 21:19:38 -05:00
|
|
|
|
2014-06-09 22:29:36 -05:00
|
|
|
if end > head_capacity {
|
|
|
|
return self.alloc_noncopy_grow(n_bytes, align);
|
|
|
|
}
|
2012-10-05 16:58:42 -05:00
|
|
|
|
2014-06-09 22:29:36 -05:00
|
|
|
let head = self.head.borrow();
|
2014-06-25 14:47:34 -05:00
|
|
|
head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
|
2012-10-05 16:58:42 -05:00
|
|
|
|
2014-06-09 22:29:36 -05:00
|
|
|
unsafe {
|
|
|
|
let buf = head.as_ptr();
|
2015-02-09 01:00:46 -06:00
|
|
|
return (buf.offset(tydesc_start as isize), buf.offset(start as isize));
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2014-12-07 10:11:15 -06:00
|
|
|
fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
|
2012-10-05 16:58:42 -05:00
|
|
|
unsafe {
|
2013-06-20 04:39:49 -05:00
|
|
|
let tydesc = get_tydesc::<T>();
|
2012-10-05 16:58:42 -05:00
|
|
|
let (ty_ptr, ptr) =
|
2014-05-17 02:56:00 -05:00
|
|
|
self.alloc_noncopy_inner(mem::size_of::<T>(),
|
|
|
|
mem::min_align_of::<T>());
|
2015-02-09 01:00:46 -06:00
|
|
|
let ty_ptr = ty_ptr as *mut usize;
|
core: Remove the cast module
This commit revisits the `cast` module in libcore and libstd, and scrutinizes
all functions inside of it. The result was to remove the `cast` module entirely,
folding all functionality into the `mem` module. Specifically, this is the fate
of each function in the `cast` module.
* transmute - This function was moved to `mem`, but it is now marked as
#[unstable]. This is due to planned changes to the `transmute`
function and how it can be invoked (see the #[unstable] comment).
For more information, see RFC 5 and #12898
* transmute_copy - This function was moved to `mem`, with clarification that is
is not an error to invoke it with T/U that are different
sizes, but rather that it is strongly discouraged. This
function is now #[stable]
* forget - This function was moved to `mem` and marked #[stable]
* bump_box_refcount - This function was removed due to the deprecation of
managed boxes as well as its questionable utility.
* transmute_mut - This function was previously deprecated, and removed as part
of this commit.
* transmute_mut_unsafe - This function doesn't serve much of a purpose when it
can be achieved with an `as` in safe code, so it was
removed.
* transmute_lifetime - This function was removed because it is likely a strong
indication that code is incorrect in the first place.
* transmute_mut_lifetime - This function was removed for the same reasons as
`transmute_lifetime`
* copy_lifetime - This function was moved to `mem`, but it is marked
`#[unstable]` now due to the likelihood of being removed in
the future if it is found to not be very useful.
* copy_mut_lifetime - This function was also moved to `mem`, but had the same
treatment as `copy_lifetime`.
* copy_lifetime_vec - This function was removed because it is not used today,
and its existence is not necessary with DST
(copy_lifetime will suffice).
In summary, the cast module was stripped down to these functions, and then the
functions were moved to the `mem` module.
transmute - #[unstable]
transmute_copy - #[stable]
forget - #[stable]
copy_lifetime - #[unstable]
copy_mut_lifetime - #[unstable]
[breaking-change]
2014-05-09 12:34:51 -05:00
|
|
|
let ptr = ptr as *mut T;
|
2012-10-05 16:58:42 -05:00
|
|
|
// Write in our tydesc along with a bit indicating that it
|
|
|
|
// has *not* been initialized yet.
|
core: Remove the cast module
This commit revisits the `cast` module in libcore and libstd, and scrutinizes
all functions inside of it. The result was to remove the `cast` module entirely,
folding all functionality into the `mem` module. Specifically, this is the fate
of each function in the `cast` module.
* transmute - This function was moved to `mem`, but it is now marked as
#[unstable]. This is due to planned changes to the `transmute`
function and how it can be invoked (see the #[unstable] comment).
For more information, see RFC 5 and #12898
* transmute_copy - This function was moved to `mem`, with clarification that is
is not an error to invoke it with T/U that are different
sizes, but rather that it is strongly discouraged. This
function is now #[stable]
* forget - This function was moved to `mem` and marked #[stable]
* bump_box_refcount - This function was removed due to the deprecation of
managed boxes as well as its questionable utility.
* transmute_mut - This function was previously deprecated, and removed as part
of this commit.
* transmute_mut_unsafe - This function doesn't serve much of a purpose when it
can be achieved with an `as` in safe code, so it was
removed.
* transmute_lifetime - This function was removed because it is likely a strong
indication that code is incorrect in the first place.
* transmute_mut_lifetime - This function was removed for the same reasons as
`transmute_lifetime`
* copy_lifetime - This function was moved to `mem`, but it is marked
`#[unstable]` now due to the likelihood of being removed in
the future if it is found to not be very useful.
* copy_mut_lifetime - This function was also moved to `mem`, but had the same
treatment as `copy_lifetime`.
* copy_lifetime_vec - This function was removed because it is not used today,
and its existence is not necessary with DST
(copy_lifetime will suffice).
In summary, the cast module was stripped down to these functions, and then the
functions were moved to the `mem` module.
transmute - #[unstable]
transmute_copy - #[stable]
forget - #[stable]
copy_lifetime - #[unstable]
copy_mut_lifetime - #[unstable]
[breaking-change]
2014-05-09 12:34:51 -05:00
|
|
|
*ty_ptr = mem::transmute(tydesc);
|
2012-10-05 16:58:42 -05:00
|
|
|
// Actually initialize it
|
2014-05-29 19:40:18 -05:00
|
|
|
ptr::write(&mut(*ptr), op());
|
2012-10-05 16:58:42 -05:00
|
|
|
// Now that we are done, update the tydesc to indicate that
|
|
|
|
// the object is there.
|
|
|
|
*ty_ptr = bitpack_tydesc_ptr(tydesc, true);
|
|
|
|
|
2014-10-27 15:31:41 -05:00
|
|
|
return &mut *ptr;
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-04 05:48:39 -05:00
|
|
|
/// Allocates a new item in the arena, using `op` to initialize the value,
|
|
|
|
/// and returns a reference to it.
|
2013-06-18 16:45:18 -05:00
|
|
|
#[inline]
|
2015-02-02 07:10:36 -06:00
|
|
|
pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
|
2013-04-10 15:14:06 -05:00
|
|
|
unsafe {
|
2013-06-16 10:50:16 -05:00
|
|
|
if intrinsics::needs_drop::<T>() {
|
2014-06-09 22:29:36 -05:00
|
|
|
self.alloc_noncopy(op)
|
2013-06-16 10:50:16 -05:00
|
|
|
} else {
|
2014-06-09 22:29:36 -05:00
|
|
|
self.alloc_copy(op)
|
2013-04-10 15:14:06 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2012-10-05 16:58:42 -05:00
|
|
|
}
|
2012-03-20 21:06:04 -05:00
|
|
|
|
2012-08-21 17:32:30 -05:00
|
|
|
#[test]
|
|
|
|
fn test_arena_destructors() {
|
2013-08-05 03:43:40 -05:00
|
|
|
let arena = Arena::new();
|
2015-01-24 08:39:32 -06:00
|
|
|
for i in 0..10 {
|
2012-08-21 17:32:30 -05:00
|
|
|
// Arena allocate something with drop glue to make sure it
|
|
|
|
// doesn't leak.
|
2014-04-02 22:06:55 -05:00
|
|
|
arena.alloc(|| Rc::new(i));
|
2012-08-21 17:32:30 -05:00
|
|
|
// Allocate something with funny size and alignment, to keep
|
|
|
|
// things interesting.
|
2013-11-20 17:46:49 -06:00
|
|
|
arena.alloc(|| [0u8, 1u8, 2u8]);
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-04-29 17:23:04 -05:00
|
|
|
#[test]
|
2015-01-31 17:08:25 -06:00
|
|
|
#[should_panic]
|
2012-08-21 17:32:30 -05:00
|
|
|
fn test_arena_destructors_fail() {
|
2013-08-05 03:43:40 -05:00
|
|
|
let arena = Arena::new();
|
2012-08-21 17:32:30 -05:00
|
|
|
// Put some stuff in the arena.
|
2015-01-24 08:39:32 -06:00
|
|
|
for i in 0..10 {
|
2012-08-21 17:32:30 -05:00
|
|
|
// Arena allocate something with drop glue to make sure it
|
|
|
|
// doesn't leak.
|
2014-04-02 22:06:55 -05:00
|
|
|
arena.alloc(|| { Rc::new(i) });
|
2012-08-21 17:32:30 -05:00
|
|
|
// Allocate something with funny size and alignment, to keep
|
|
|
|
// things interesting.
|
2015-02-09 01:00:46 -06:00
|
|
|
arena.alloc(|| { [0u8, 1, 2] });
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
2014-10-09 14:17:22 -05:00
|
|
|
// Now, panic while allocating
|
2015-02-09 01:00:46 -06:00
|
|
|
arena.alloc::<Rc<i32>, _>(|| {
|
2014-10-09 14:17:22 -05:00
|
|
|
panic!();
|
2013-11-21 21:20:48 -06:00
|
|
|
});
|
2012-08-21 17:32:30 -05:00
|
|
|
}
|
2014-01-06 19:03:30 -06:00
|
|
|
|
2014-04-04 06:57:39 -05:00
|
|
|
/// A faster arena that can hold objects of only one type.
|
2014-01-06 19:03:30 -06:00
|
|
|
pub struct TypedArena<T> {
|
|
|
|
/// A pointer to the next object to be allocated.
|
2014-06-25 14:47:34 -05:00
|
|
|
ptr: Cell<*const T>,
|
2014-01-06 19:03:30 -06:00
|
|
|
|
|
|
|
/// A pointer to the end of the allocated area. When this pointer is
|
|
|
|
/// reached, a new chunk is allocated.
|
2014-06-25 14:47:34 -05:00
|
|
|
end: Cell<*const T>,
|
2014-01-06 19:03:30 -06:00
|
|
|
|
|
|
|
/// A pointer to the first arena segment.
|
2014-09-05 08:08:30 -05:00
|
|
|
first: RefCell<*mut TypedArenaChunk<T>>,
|
2015-01-21 13:02:52 -06:00
|
|
|
|
|
|
|
/// Marker indicating that dropping the arena causes its owned
|
|
|
|
/// instances of `T` to be dropped.
|
|
|
|
_own: marker::PhantomData<T>,
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
|
2014-02-21 06:25:17 -06:00
|
|
|
struct TypedArenaChunk<T> {
|
2015-02-12 09:39:32 -06:00
|
|
|
marker: marker::PhantomData<T>,
|
|
|
|
|
2014-01-06 19:03:30 -06:00
|
|
|
/// Pointer to the next arena segment.
|
2014-09-05 08:08:30 -05:00
|
|
|
next: *mut TypedArenaChunk<T>,
|
2014-01-06 19:03:30 -06:00
|
|
|
|
|
|
|
/// The number of elements that this chunk can hold.
|
2015-02-09 01:00:46 -06:00
|
|
|
capacity: usize,
|
2014-01-06 19:03:30 -06:00
|
|
|
|
|
|
|
// Objects follow here, suitably aligned.
|
|
|
|
}
|
|
|
|
|
2015-02-09 01:00:46 -06:00
|
|
|
fn calculate_size<T>(capacity: usize) -> usize {
|
2014-09-05 08:08:30 -05:00
|
|
|
let mut size = mem::size_of::<TypedArenaChunk<T>>();
|
|
|
|
size = round_up(size, mem::min_align_of::<T>());
|
|
|
|
let elem_size = mem::size_of::<T>();
|
2014-11-09 07:11:28 -06:00
|
|
|
let elems_size = elem_size.checked_mul(capacity).unwrap();
|
|
|
|
size = size.checked_add(elems_size).unwrap();
|
2014-09-05 08:08:30 -05:00
|
|
|
size
|
|
|
|
}
|
|
|
|
|
2014-02-21 06:25:17 -06:00
|
|
|
impl<T> TypedArenaChunk<T> {
|
2014-01-06 19:03:30 -06:00
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
|
2014-09-05 08:08:30 -05:00
|
|
|
-> *mut TypedArenaChunk<T> {
|
|
|
|
let size = calculate_size::<T>(capacity);
|
|
|
|
let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
|
|
|
|
as *mut TypedArenaChunk<T>;
|
2014-10-28 16:06:06 -05:00
|
|
|
if chunk.is_null() { alloc::oom() }
|
2014-09-05 08:08:30 -05:00
|
|
|
(*chunk).next = next;
|
|
|
|
(*chunk).capacity = capacity;
|
2014-04-25 20:24:51 -05:00
|
|
|
chunk
|
|
|
|
}
|
|
|
|
|
2014-01-06 19:03:30 -06:00
|
|
|
/// Destroys this arena chunk. If the type descriptor is supplied, the
|
|
|
|
/// drop glue is called; otherwise, drop glue is not called.
|
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
unsafe fn destroy(&mut self, len: usize) {
|
2014-01-06 19:03:30 -06:00
|
|
|
// Destroy all the allocated objects.
|
2014-02-21 06:25:17 -06:00
|
|
|
if intrinsics::needs_drop::<T>() {
|
|
|
|
let mut start = self.start();
|
2015-01-26 14:46:12 -06:00
|
|
|
for _ in 0..len {
|
2014-06-25 14:47:34 -05:00
|
|
|
ptr::read(start as *const T); // run the destructor on the pointer
|
2015-02-09 01:00:46 -06:00
|
|
|
start = start.offset(mem::size_of::<T>() as isize)
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Destroy the next chunk.
|
2014-09-05 08:08:30 -05:00
|
|
|
let next = self.next;
|
|
|
|
let size = calculate_size::<T>(self.capacity);
|
Add trivial cast lints.
This permits all coercions to be performed in casts, but adds lints to warn in those cases.
Part of this patch moves cast checking to a later stage of type checking. We acquire obligations to check casts as part of type checking where we previously checked them. Once we have type checked a function or module, then we check any cast obligations which have been acquired. That means we have more type information available to check casts (this was crucial to making coercions work properly in place of some casts), but it means that casts cannot feed input into type inference.
[breaking change]
* Adds two new lints for trivial casts and trivial numeric casts, these are warn by default, but can cause errors if you build with warnings as errors. Previously, trivial numeric casts and casts to trait objects were allowed.
* The unused casts lint has gone.
* Interactions between casting and type inference have changed in subtle ways. Two ways this might manifest are:
- You may need to 'direct' casts more with extra type information, for example, in some cases where `foo as _ as T` succeeded, you may now need to specify the type for `_`
- Casts do not influence inference of integer types. E.g., the following used to type check:
```
let x = 42;
let y = &x as *const u32;
```
Because the cast would inform inference that `x` must have type `u32`. This no longer applies and the compiler will fallback to `i32` for `x` and thus there will be a type error in the cast. The solution is to add more type information:
```
let x: u32 = 42;
let y = &x as *const u32;
```
2015-03-19 23:15:27 -05:00
|
|
|
let self_ptr: *mut TypedArenaChunk<T> = self;
|
|
|
|
deallocate(self_ptr as *mut u8, size,
|
2014-09-05 08:08:30 -05:00
|
|
|
mem::min_align_of::<TypedArenaChunk<T>>());
|
2014-12-29 18:38:07 -06:00
|
|
|
if !next.is_null() {
|
2014-09-05 08:08:30 -05:00
|
|
|
let capacity = (*next).capacity;
|
|
|
|
(*next).destroy(capacity);
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns a pointer to the first allocated object.
|
|
|
|
#[inline]
|
2014-06-25 14:47:34 -05:00
|
|
|
fn start(&self) -> *const u8 {
|
|
|
|
let this: *const TypedArenaChunk<T> = self;
|
2014-01-06 19:03:30 -06:00
|
|
|
unsafe {
|
2015-02-09 01:00:46 -06:00
|
|
|
mem::transmute(round_up(this.offset(1) as usize,
|
2014-05-17 02:56:00 -05:00
|
|
|
mem::min_align_of::<T>()))
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Returns a pointer to the end of the allocated space.
|
|
|
|
#[inline]
|
2014-06-25 14:47:34 -05:00
|
|
|
fn end(&self) -> *const u8 {
|
2014-01-06 19:03:30 -06:00
|
|
|
unsafe {
|
2014-11-09 07:11:28 -06:00
|
|
|
let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
|
2015-02-09 01:00:46 -06:00
|
|
|
self.start().offset(size as isize)
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> TypedArena<T> {
|
2014-08-04 05:48:39 -05:00
|
|
|
/// Creates a new `TypedArena` with preallocated space for eight objects.
|
2014-01-06 19:03:30 -06:00
|
|
|
#[inline]
|
|
|
|
pub fn new() -> TypedArena<T> {
|
|
|
|
TypedArena::with_capacity(8)
|
|
|
|
}
|
|
|
|
|
2014-08-04 05:48:39 -05:00
|
|
|
/// Creates a new `TypedArena` with preallocated space for the given number of
|
2014-01-06 19:03:30 -06:00
|
|
|
/// objects.
|
|
|
|
#[inline]
|
2015-02-09 01:00:46 -06:00
|
|
|
pub fn with_capacity(capacity: usize) -> TypedArena<T> {
|
2014-09-05 08:08:30 -05:00
|
|
|
unsafe {
|
2014-09-14 22:27:36 -05:00
|
|
|
let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
|
2014-09-05 08:08:30 -05:00
|
|
|
TypedArena {
|
|
|
|
ptr: Cell::new((*chunk).start() as *const T),
|
|
|
|
end: Cell::new((*chunk).end() as *const T),
|
|
|
|
first: RefCell::new(chunk),
|
2015-01-21 13:02:52 -06:00
|
|
|
_own: marker::PhantomData,
|
2014-09-05 08:08:30 -05:00
|
|
|
}
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-08-04 05:48:39 -05:00
|
|
|
/// Allocates an object in the `TypedArena`, returning a reference to it.
|
2014-01-06 19:03:30 -06:00
|
|
|
#[inline]
|
2014-10-27 15:31:41 -05:00
|
|
|
pub fn alloc(&self, object: T) -> &mut T {
|
2014-06-09 22:41:44 -05:00
|
|
|
if self.ptr == self.end {
|
|
|
|
self.grow()
|
|
|
|
}
|
2014-01-06 19:03:30 -06:00
|
|
|
|
2014-10-27 15:31:41 -05:00
|
|
|
let ptr: &mut T = unsafe {
|
librustc: Make `Copy` opt-in.
This change makes the compiler no longer infer whether types (structures
and enumerations) implement the `Copy` trait (and thus are implicitly
copyable). Rather, you must implement `Copy` yourself via `impl Copy for
MyType {}`.
A new warning has been added, `missing_copy_implementations`, to warn
you if a non-generic public type has been added that could have
implemented `Copy` but didn't.
For convenience, you may *temporarily* opt out of this behavior by using
`#![feature(opt_out_copy)]`. Note though that this feature gate will never be
accepted and will be removed by the time that 1.0 is released, so you should
transition your code away from using it.
This breaks code like:
#[deriving(Show)]
struct Point2D {
x: int,
y: int,
}
fn main() {
let mypoint = Point2D {
x: 1,
y: 1,
};
let otherpoint = mypoint;
println!("{}{}", mypoint, otherpoint);
}
Change this code to:
#[deriving(Show)]
struct Point2D {
x: int,
y: int,
}
impl Copy for Point2D {}
fn main() {
let mypoint = Point2D {
x: 1,
y: 1,
};
let otherpoint = mypoint;
println!("{}{}", mypoint, otherpoint);
}
This is the backwards-incompatible part of #13231.
Part of RFC #3.
[breaking-change]
2014-12-05 19:01:33 -06:00
|
|
|
let ptr: &mut T = mem::transmute(self.ptr.clone());
|
2014-05-29 19:40:18 -05:00
|
|
|
ptr::write(ptr, object);
|
2014-06-09 22:41:44 -05:00
|
|
|
self.ptr.set(self.ptr.get().offset(1));
|
2014-01-06 19:03:30 -06:00
|
|
|
ptr
|
2014-06-09 22:41:44 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
ptr
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Grows the arena.
|
|
|
|
#[inline(never)]
|
2014-06-09 22:41:44 -05:00
|
|
|
fn grow(&self) {
|
2014-09-05 08:08:30 -05:00
|
|
|
unsafe {
|
|
|
|
let chunk = *self.first.borrow_mut();
|
2014-11-09 07:11:28 -06:00
|
|
|
let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
|
2014-09-05 08:08:30 -05:00
|
|
|
let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
|
|
|
|
self.ptr.set((*chunk).start() as *const T);
|
|
|
|
self.end.set((*chunk).end() as *const T);
|
|
|
|
*self.first.borrow_mut() = chunk
|
|
|
|
}
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Drop for TypedArena<T> {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
unsafe {
|
2014-09-05 08:08:30 -05:00
|
|
|
// Determine how much was filled.
|
2015-02-09 01:00:46 -06:00
|
|
|
let start = self.first.borrow().as_ref().unwrap().start() as usize;
|
|
|
|
let end = self.ptr.get() as usize;
|
2014-09-05 08:08:30 -05:00
|
|
|
let diff = (end - start) / mem::size_of::<T>();
|
|
|
|
|
|
|
|
// Pass that to the `destroy` method.
|
|
|
|
(**self.first.borrow_mut()).destroy(diff)
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(test)]
|
2014-02-13 19:49:11 -06:00
|
|
|
mod tests {
|
|
|
|
extern crate test;
|
2014-03-31 20:16:35 -05:00
|
|
|
use self::test::Bencher;
|
2014-01-06 19:03:30 -06:00
|
|
|
use super::{Arena, TypedArena};
|
|
|
|
|
2014-09-09 04:32:58 -05:00
|
|
|
#[allow(dead_code)]
|
2014-01-06 19:03:30 -06:00
|
|
|
struct Point {
|
2015-02-09 01:00:46 -06:00
|
|
|
x: i32,
|
|
|
|
y: i32,
|
|
|
|
z: i32,
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
|
2015-02-02 07:10:36 -06:00
|
|
|
#[test]
|
|
|
|
fn test_arena_alloc_nested() {
|
|
|
|
struct Inner { value: u8 }
|
|
|
|
struct Outer<'a> { inner: &'a Inner }
|
|
|
|
enum EI<'e> { I(Inner), O(Outer<'e>) }
|
|
|
|
|
|
|
|
struct Wrap<'a>(TypedArena<EI<'a>>);
|
|
|
|
|
|
|
|
impl<'a> Wrap<'a> {
|
|
|
|
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
|
|
|
|
let r: &EI = self.0.alloc(EI::I(f()));
|
|
|
|
if let &EI::I(ref i) = r {
|
|
|
|
i
|
|
|
|
} else {
|
|
|
|
panic!("mismatch");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
|
|
|
|
let r: &EI = self.0.alloc(EI::O(f()));
|
|
|
|
if let &EI::O(ref o) = r {
|
|
|
|
o
|
|
|
|
} else {
|
|
|
|
panic!("mismatch");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let arena = Wrap(TypedArena::new());
|
|
|
|
|
|
|
|
let result = arena.alloc_outer(|| Outer {
|
|
|
|
inner: arena.alloc_inner(|| Inner { value: 10 }) });
|
|
|
|
|
|
|
|
assert_eq!(result.inner.value, 10);
|
|
|
|
}
|
|
|
|
|
2014-01-06 19:03:30 -06:00
|
|
|
#[test]
|
2014-03-26 18:01:11 -05:00
|
|
|
pub fn test_copy() {
|
2014-01-06 19:03:30 -06:00
|
|
|
let arena = TypedArena::new();
|
2015-01-24 08:39:32 -06:00
|
|
|
for _ in 0..100000 {
|
2014-01-06 19:03:30 -06:00
|
|
|
arena.alloc(Point {
|
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-31 20:16:35 -05:00
|
|
|
pub fn bench_copy(b: &mut Bencher) {
|
2014-01-06 19:03:30 -06:00
|
|
|
let arena = TypedArena::new();
|
2014-03-31 20:16:35 -05:00
|
|
|
b.iter(|| {
|
2014-01-06 19:03:30 -06:00
|
|
|
arena.alloc(Point {
|
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
2014-02-08 04:32:09 -06:00
|
|
|
})
|
2014-01-06 19:03:30 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-31 20:16:35 -05:00
|
|
|
pub fn bench_copy_nonarena(b: &mut Bencher) {
|
|
|
|
b.iter(|| {
|
2015-02-17 14:41:32 -06:00
|
|
|
let _: Box<_> = box Point {
|
2014-01-06 19:03:30 -06:00
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
2015-02-17 14:41:32 -06:00
|
|
|
};
|
2014-01-06 19:03:30 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-31 20:16:35 -05:00
|
|
|
pub fn bench_copy_old_arena(b: &mut Bencher) {
|
2014-01-06 19:03:30 -06:00
|
|
|
let arena = Arena::new();
|
2014-03-31 20:16:35 -05:00
|
|
|
b.iter(|| {
|
2014-01-06 19:03:30 -06:00
|
|
|
arena.alloc(|| {
|
|
|
|
Point {
|
|
|
|
x: 1,
|
|
|
|
y: 2,
|
|
|
|
z: 3,
|
|
|
|
}
|
2014-02-08 04:32:09 -06:00
|
|
|
})
|
2014-01-06 19:03:30 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2014-09-09 04:32:58 -05:00
|
|
|
#[allow(dead_code)]
|
2014-03-26 18:01:11 -05:00
|
|
|
struct Noncopy {
|
2014-05-22 18:57:53 -05:00
|
|
|
string: String,
|
2015-02-09 01:00:46 -06:00
|
|
|
array: Vec<i32>,
|
2014-01-06 19:03:30 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2014-03-26 18:01:11 -05:00
|
|
|
pub fn test_noncopy() {
|
2014-01-06 19:03:30 -06:00
|
|
|
let arena = TypedArena::new();
|
2015-01-24 08:39:32 -06:00
|
|
|
for _ in 0..100000 {
|
2014-03-26 18:01:11 -05:00
|
|
|
arena.alloc(Noncopy {
|
2014-05-25 05:17:19 -05:00
|
|
|
string: "hello world".to_string(),
|
2014-03-05 17:28:08 -06:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-01-06 19:03:30 -06:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-31 20:16:35 -05:00
|
|
|
pub fn bench_noncopy(b: &mut Bencher) {
|
2014-01-06 19:03:30 -06:00
|
|
|
let arena = TypedArena::new();
|
2014-03-31 20:16:35 -05:00
|
|
|
b.iter(|| {
|
2014-03-26 18:01:11 -05:00
|
|
|
arena.alloc(Noncopy {
|
2014-05-25 05:17:19 -05:00
|
|
|
string: "hello world".to_string(),
|
2014-03-05 17:28:08 -06:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-02-08 04:32:09 -06:00
|
|
|
})
|
2014-01-06 19:03:30 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-31 20:16:35 -05:00
|
|
|
pub fn bench_noncopy_nonarena(b: &mut Bencher) {
|
|
|
|
b.iter(|| {
|
2015-02-17 14:41:32 -06:00
|
|
|
let _: Box<_> = box Noncopy {
|
2014-05-25 05:17:19 -05:00
|
|
|
string: "hello world".to_string(),
|
2014-03-05 17:28:08 -06:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2015-02-17 14:41:32 -06:00
|
|
|
};
|
2014-01-06 19:03:30 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
2014-03-31 20:16:35 -05:00
|
|
|
pub fn bench_noncopy_old_arena(b: &mut Bencher) {
|
2014-01-06 19:03:30 -06:00
|
|
|
let arena = Arena::new();
|
2014-03-31 20:16:35 -05:00
|
|
|
b.iter(|| {
|
2014-03-26 18:01:11 -05:00
|
|
|
arena.alloc(|| Noncopy {
|
2014-05-25 05:17:19 -05:00
|
|
|
string: "hello world".to_string(),
|
2014-03-05 17:28:08 -06:00
|
|
|
array: vec!( 1, 2, 3, 4, 5 ),
|
2014-02-08 04:32:09 -06:00
|
|
|
})
|
2014-01-06 19:03:30 -06:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|