From 2674b2ca985f73ce389e2550f8df69927aeabc00 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Wed, 12 Aug 2015 05:53:58 +0200 Subject: [PATCH 01/10] Implement in-place growth for RawVec --- src/liballoc/raw_vec.rs | 118 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 107 insertions(+), 11 deletions(-) diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 92f35c08a7d..52bd62f7a66 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -240,6 +240,47 @@ impl RawVec { } } + /// Attempts to double the size of the type's backing allocation in place. This is common + /// enough to want to do that it's easiest to just have a dedicated method. Slightly + /// more efficient logic can be provided for this than the general case. + /// + /// Returns true if the reallocation attempt has succeeded, or false otherwise. + /// + /// # Panics + /// + /// * Panics if T is zero-sized on the assumption that you managed to exhaust + /// all `usize::MAX` slots in your imaginary buffer. + /// * Panics on 32-bit platforms if the requested capacity exceeds + /// `isize::MAX` bytes. + #[inline(never)] + #[cold] + pub fn double_in_place(&mut self) -> bool { + unsafe { + let elem_size = mem::size_of::(); + let align = mem::align_of::(); + + // since we set the capacity to usize::MAX when elem_size is + // 0, getting to here necessarily means the RawVec is overfull. + assert!(elem_size != 0, "capacity overflow"); + + // Since we guarantee that we never allocate more than isize::MAX bytes, + // `elem_size * self.cap <= isize::MAX` as a precondition, so this can't overflow + let new_cap = 2 * self.cap; + let new_alloc_size = new_cap * elem_size; + + alloc_guard(new_alloc_size); + let size = heap::reallocate_inplace(self.ptr() as *mut _, + self.cap * elem_size, + new_alloc_size, + align); + if size >= new_alloc_size { + // We can't directly divide `size`. + self.cap = new_cap; + } + size >= new_alloc_size + } + } + /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already, /// will reallocate the minimum possible amount of memory necessary. @@ -300,6 +341,22 @@ impl RawVec { } } + /// Calculates the buffer's new size given that it'll hold `used_cap + + /// needed_extra_cap` elements. This logic is used in amortized reserve methods. + /// Returns `(new_capacity, new_alloc_size)`. + fn amortized_new_size(&self, used_cap: usize, needed_extra_cap: usize) -> (usize, usize) { + let elem_size = mem::size_of::(); + // Nothing we can really do about these checks :( + let required_cap = used_cap.checked_add(needed_extra_cap) + .expect("capacity overflow"); + // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. + let double_cap = self.cap * 2; + // `double_cap` guarantees exponential growth. + let new_cap = cmp::max(double_cap, required_cap); + let new_alloc_size = new_cap.checked_mul(elem_size).expect("capacity overflow"); + (new_cap, new_alloc_size) + } + /// Ensures that the buffer contains at least enough space to hold /// `used_cap + needed_extra_cap` elements. If it doesn't already have /// enough capacity, will reallocate enough space plus comfortable slack @@ -360,17 +417,7 @@ impl RawVec { return; } - // Nothing we can really do about these checks :( - let required_cap = used_cap.checked_add(needed_extra_cap) - .expect("capacity overflow"); - - // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. - let double_cap = self.cap * 2; - - // `double_cap` guarantees exponential growth. - let new_cap = cmp::max(double_cap, required_cap); - - let new_alloc_size = new_cap.checked_mul(elem_size).expect("capacity overflow"); + let (new_cap, new_alloc_size) = self.amortized_new_size(used_cap, needed_extra_cap); // FIXME: may crash and burn on over-reserve alloc_guard(new_alloc_size); @@ -393,6 +440,55 @@ impl RawVec { } } + /// Attempts to ensure that the buffer contains at least enough space to hold + /// `used_cap + needed_extra_cap` elements. If it doesn't already have + /// enough capacity, will reallocate in place enough space plus comfortable slack + /// space to get amortized `O(1)` behaviour. Will limit this behaviour + /// if it would needlessly cause itself to panic. + /// + /// If `used_cap` exceeds `self.cap()`, this may fail to actually allocate + /// the requested space. This is not really unsafe, but the unsafe + /// code *you* write that relies on the behaviour of this function may break. + /// + /// Returns true if the reallocation attempt has succeeded, or false otherwise. + /// + /// # Panics + /// + /// * Panics if the requested capacity exceeds `usize::MAX` bytes. + /// * Panics on 32-bit platforms if the requested capacity exceeds + /// `isize::MAX` bytes. + pub fn reserve_in_place(&mut self, used_cap: usize, needed_extra_cap: usize) -> bool { + unsafe { + let elem_size = mem::size_of::(); + let align = mem::align_of::(); + + // NOTE: we don't early branch on ZSTs here because we want this + // to actually catch "asking for more than usize::MAX" in that case. + // If we make it past the first branch then we are guaranteed to + // panic. + + // Don't actually need any more capacity. If the current `cap` is 0, we can't + // reallocate in place. + // Wrapping in case they give a bad `used_cap` + if self.cap().wrapping_sub(used_cap) >= needed_extra_cap || self.cap == 0 { + return false; + } + + let (_, new_alloc_size) = self.amortized_new_size(used_cap, needed_extra_cap); + // FIXME: may crash and burn on over-reserve + alloc_guard(new_alloc_size); + + let size = heap::reallocate_inplace(self.ptr() as *mut _, + self.cap * elem_size, + new_alloc_size, + align); + if size >= new_alloc_size { + self.cap = new_alloc_size / elem_size; + } + size >= new_alloc_size + } + } + /// Shrinks the allocation down to the specified amount. If the given amount /// is 0, actually completely deallocates. /// From 803e9ae67b770d8500c4ab5862e988d29118356a Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Wed, 12 Aug 2015 05:52:37 +0200 Subject: [PATCH 02/10] Improve TypedArena's chunk allocation scheme Closes #17931 Fixes #18037 --- src/libarena/lib.rs | 165 ++++++++++++++++++++++++-------------------- 1 file changed, 90 insertions(+), 75 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index b5107e411e8..bb6521254a3 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -31,9 +31,10 @@ #![feature(alloc)] #![feature(box_syntax)] #![feature(core_intrinsics)] +#![feature(drop_in_place)] +#![feature(raw)] #![feature(heap_api)] #![feature(oom)] -#![feature(ptr_as_ref)] #![feature(raw)] #![feature(staged_api)] #![feature(dropck_parametricity)] @@ -47,9 +48,13 @@ use std::intrinsics; use std::marker; use std::mem; use std::ptr; +use std::raw; +use std::raw::Repr; use std::rc::Rc; +use std::slice; -use alloc::heap::{allocate, deallocate}; +use alloc::heap; +use alloc::raw_vec::RawVec; // The way arena uses arrays is really deeply awful. The arrays are // allocated, and have capacities reserved, but the fill for the array @@ -366,14 +371,14 @@ fn test_arena_destructors_fail() { /// A faster arena that can hold objects of only one type. pub struct TypedArena { /// A pointer to the next object to be allocated. - ptr: Cell<*const T>, + ptr: Cell<*mut T>, /// A pointer to the end of the allocated area. When this pointer is /// reached, a new chunk is allocated. - end: Cell<*const T>, + end: Cell<*mut T>, - /// A pointer to the first arena segment. - first: RefCell<*mut TypedArenaChunk>, + /// A vector arena segments. + chunks: RefCell>>, /// Marker indicating that dropping the arena causes its owned /// instances of `T` to be dropped. @@ -381,88 +386,60 @@ pub struct TypedArena { } struct TypedArenaChunk { - marker: marker::PhantomData, - /// Pointer to the next arena segment. - next: *mut TypedArenaChunk, - - /// The number of elements that this chunk can hold. - capacity: usize, - - // Objects follow here, suitably aligned. -} - -fn calculate_size(capacity: usize) -> usize { - let mut size = mem::size_of::>(); - size = round_up(size, mem::align_of::()); - let elem_size = mem::size_of::(); - let elems_size = elem_size.checked_mul(capacity).unwrap(); - size = size.checked_add(elems_size).unwrap(); - size + storage: RawVec, } impl TypedArenaChunk { #[inline] - unsafe fn new(next: *mut TypedArenaChunk, capacity: usize) -> *mut TypedArenaChunk { - let size = calculate_size::(capacity); - let chunk = - allocate(size, mem::align_of::>()) as *mut TypedArenaChunk; - if chunk.is_null() { - alloc::oom() - } - (*chunk).next = next; - (*chunk).capacity = capacity; - chunk + unsafe fn new(capacity: usize) -> TypedArenaChunk { + TypedArenaChunk { storage: RawVec::with_capacity(capacity) } } - /// Destroys this arena chunk. If the type descriptor is supplied, the - /// drop glue is called; otherwise, drop glue is not called. + /// Destroys this arena chunk. #[inline] unsafe fn destroy(&mut self, len: usize) { - // Destroy all the allocated objects. + // The branch on needs_drop() is an -O1 performance optimization. + // Without the branch, dropping TypedArena takes linear time. if intrinsics::needs_drop::() { let mut start = self.start(); + // Destroy all allocated objects. for _ in 0..len { - ptr::read(start as *const T); // run the destructor on the pointer - start = start.offset(mem::size_of::() as isize) + ptr::drop_in_place(start); + start = start.offset(1); } } - - // Destroy the next chunk. - let next = self.next; - let size = calculate_size::(self.capacity); - let self_ptr: *mut TypedArenaChunk = self; - deallocate(self_ptr as *mut u8, - size, - mem::align_of::>()); - if !next.is_null() { - let capacity = (*next).capacity; - (*next).destroy(capacity); - } } // Returns a pointer to the first allocated object. #[inline] - fn start(&self) -> *const u8 { - let this: *const TypedArenaChunk = self; - unsafe { round_up(this.offset(1) as usize, mem::align_of::()) as *const u8 } + fn start(&self) -> *mut T { + self.storage.ptr() } // Returns a pointer to the end of the allocated space. #[inline] - fn end(&self) -> *const u8 { + fn end(&self) -> *mut T { unsafe { - let size = mem::size_of::().checked_mul(self.capacity).unwrap(); - self.start().offset(size as isize) + if mem::size_of::() == 0 { + // A pointer as large as possible for zero-sized elements. + !0 as *mut T + } else { + self.start().offset(self.storage.cap() as isize) + } } } } +const PAGE: usize = 4096; + impl TypedArena { - /// Creates a new `TypedArena` with preallocated space for eight objects. + /// Creates a new `TypedArena` with preallocated space for many objects. #[inline] pub fn new() -> TypedArena { - TypedArena::with_capacity(8) + // Reserve at least one page. + let elem_size = cmp::max(1, mem::size_of::()); + TypedArena::with_capacity(PAGE / elem_size) } /// Creates a new `TypedArena` with preallocated space for the given number of @@ -470,11 +447,11 @@ impl TypedArena { #[inline] pub fn with_capacity(capacity: usize) -> TypedArena { unsafe { - let chunk = TypedArenaChunk::::new(ptr::null_mut(), capacity); + let chunk = TypedArenaChunk::::new(cmp::max(1, capacity)); TypedArena { - ptr: Cell::new((*chunk).start() as *const T), - end: Cell::new((*chunk).end() as *const T), - first: RefCell::new(chunk), + ptr: Cell::new(chunk.start()), + end: Cell::new(chunk.end()), + chunks: RefCell::new(vec![chunk]), _own: marker::PhantomData, } } @@ -488,23 +465,39 @@ impl TypedArena { } unsafe { - let ptr: &mut T = &mut *(self.ptr.get() as *mut T); - ptr::write(ptr, object); - self.ptr.set(self.ptr.get().offset(1)); - ptr + if mem::size_of::() == 0 { + self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T); + let ptr = heap::EMPTY as *mut T; + // Don't drop the object. This `write` is equivalent to `forget`. + ptr::write(ptr, object); + &mut *ptr + } else { + let ptr = self.ptr.get(); + // Advance the pointer. + self.ptr.set(self.ptr.get().offset(1)); + // Write into uninitialized memory. + ptr::write(ptr, object); + &mut *ptr + } } } /// Grows the arena. #[inline(never)] + #[cold] fn grow(&self) { unsafe { - let chunk = *self.first.borrow_mut(); - let new_capacity = (*chunk).capacity.checked_mul(2).unwrap(); - let chunk = TypedArenaChunk::::new(chunk, new_capacity); - self.ptr.set((*chunk).start() as *const T); - self.end.set((*chunk).end() as *const T); - *self.first.borrow_mut() = chunk + let mut chunks = self.chunks.borrow_mut(); + let prev_capacity = chunks.last().unwrap().storage.cap(); + let new_capacity = prev_capacity.checked_mul(2).unwrap(); + if chunks.last_mut().unwrap().storage.double_in_place() { + self.end.set(chunks.last().unwrap().end()); + } else { + let chunk = TypedArenaChunk::::new(new_capacity); + self.ptr.set(chunk.start()); + self.end.set(chunk.end()); + chunks.push(chunk); + } } } } @@ -514,12 +507,26 @@ impl Drop for TypedArena { fn drop(&mut self) { unsafe { // Determine how much was filled. - let start = self.first.borrow().as_ref().unwrap().start() as usize; + let mut chunks_borrow = self.chunks.borrow_mut(); + let mut last_chunk = chunks_borrow.pop().unwrap(); + let start = last_chunk.start() as usize; let end = self.ptr.get() as usize; - let diff = (end - start) / mem::size_of::(); + let diff = if mem::size_of::() == 0 { + // Avoid division by zero. + end - start + } else { + (end - start) / mem::size_of::() + }; // Pass that to the `destroy` method. - (**self.first.borrow_mut()).destroy(diff) + last_chunk.destroy(diff); + // Destroy this chunk. + let _: RawVec = mem::transmute(last_chunk); + + for chunk in chunks_borrow.iter_mut() { + let cap = chunk.storage.cap(); + chunk.destroy(cap); + } } } } @@ -657,4 +664,12 @@ mod tests { }) }) } + + #[test] + pub fn test_zero_sized() { + let arena = TypedArena::new(); + for _ in 0..100000 { + arena.alloc(()); + } + } } From d42693a52b9486a9ba7ad059c49cae21ab593c00 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Thu, 13 Aug 2015 18:48:34 +0200 Subject: [PATCH 03/10] TypedArena implements Send --- src/libarena/lib.rs | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index bb6521254a3..851a864a73b 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -45,7 +45,7 @@ extern crate alloc; use std::cell::{Cell, RefCell}; use std::cmp; use std::intrinsics; -use std::marker; +use std::marker::{PhantomData, Send}; use std::mem; use std::ptr; use std::raw; @@ -103,7 +103,7 @@ pub struct Arena<'longer_than_self> { head: RefCell, copy_head: RefCell, chunks: RefCell>, - _marker: marker::PhantomData<*mut &'longer_than_self ()>, + _marker: PhantomData<*mut &'longer_than_self ()>, } impl<'a> Arena<'a> { @@ -118,7 +118,7 @@ impl<'a> Arena<'a> { head: RefCell::new(chunk(initial_size, false)), copy_head: RefCell::new(chunk(initial_size, true)), chunks: RefCell::new(Vec::new()), - _marker: marker::PhantomData, + _marker: PhantomData, } } } @@ -382,7 +382,7 @@ pub struct TypedArena { /// Marker indicating that dropping the arena causes its owned /// instances of `T` to be dropped. - _own: marker::PhantomData, + _own: PhantomData, } struct TypedArenaChunk { @@ -452,7 +452,7 @@ impl TypedArena { ptr: Cell::new(chunk.start()), end: Cell::new(chunk.end()), chunks: RefCell::new(vec![chunk]), - _own: marker::PhantomData, + _own: PhantomData, } } } @@ -531,6 +531,8 @@ impl Drop for TypedArena { } } +unsafe impl Send for TypedArena {} + #[cfg(test)] mod tests { extern crate test; From 0d3160c1f1467b82e791b9902165a7054554cb38 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Wed, 12 Aug 2015 09:18:41 +0200 Subject: [PATCH 04/10] Rework Arena structure Implements in-place growth. Removes the use of Rc within Arena. Closes #22847 --- src/libarena/lib.rs | 70 +++++++++++++++++++++++---------------------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 851a864a73b..532979ddeb0 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -29,13 +29,11 @@ test(no_crate_inject, attr(deny(warnings))))] #![feature(alloc)] -#![feature(box_syntax)] #![feature(core_intrinsics)] #![feature(drop_in_place)] +#![feature(heap_api)] #![feature(raw)] #![feature(heap_api)] -#![feature(oom)] -#![feature(raw)] #![feature(staged_api)] #![feature(dropck_parametricity)] #![cfg_attr(test, feature(test))] @@ -48,10 +46,6 @@ use std::intrinsics; use std::marker::{PhantomData, Send}; use std::mem; use std::ptr; -use std::raw; -use std::raw::Repr; -use std::rc::Rc; -use std::slice; use alloc::heap; use alloc::raw_vec::RawVec; @@ -59,20 +53,27 @@ use alloc::raw_vec::RawVec; // The way arena uses arrays is really deeply awful. The arrays are // allocated, and have capacities reserved, but the fill for the array // will always stay at 0. -#[derive(Clone, PartialEq)] struct Chunk { - data: Rc>>, + data: RawVec, fill: Cell, is_copy: Cell, } impl Chunk { + fn new(size: usize, is_copy: bool) -> Chunk { + Chunk { + data: RawVec::with_capacity(size), + fill: Cell::new(0), + is_copy: Cell::new(is_copy), + } + } + fn capacity(&self) -> usize { - self.data.borrow().capacity() + self.data.cap() } unsafe fn as_ptr(&self) -> *const u8 { - self.data.borrow().as_ptr() + self.data.ptr() } } @@ -115,22 +116,14 @@ impl<'a> Arena<'a> { /// Allocates a new Arena with `initial_size` bytes preallocated. pub fn new_with_size(initial_size: usize) -> Arena<'a> { Arena { - head: RefCell::new(chunk(initial_size, false)), - copy_head: RefCell::new(chunk(initial_size, true)), + head: RefCell::new(Chunk::new(initial_size, false)), + copy_head: RefCell::new(Chunk::new(initial_size, true)), chunks: RefCell::new(Vec::new()), _marker: PhantomData, } } } -fn chunk(size: usize, is_copy: bool) -> Chunk { - Chunk { - data: Rc::new(RefCell::new(Vec::with_capacity(size))), - fill: Cell::new(0), - is_copy: Cell::new(is_copy), - } -} - impl<'longer_than_self> Drop for Arena<'longer_than_self> { fn drop(&mut self) { unsafe { @@ -165,8 +158,6 @@ unsafe fn destroy_chunk(chunk: &Chunk) { let start = round_up(after_tydesc, align); - // debug!("freeing object: idx = {}, size = {}, align = {}, done = {}", - // start, size, align, is_done); if is_done { ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8); } @@ -215,17 +206,20 @@ unsafe fn get_tydesc() -> *const TyDesc { } impl<'longer_than_self> Arena<'longer_than_self> { + #[inline] fn chunk_size(&self) -> usize { self.copy_head.borrow().capacity() } // Functions for the POD part of the arena + #[cold] fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 { // Allocate a new chunk. let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size()); - self.chunks.borrow_mut().push(self.copy_head.borrow().clone()); - - *self.copy_head.borrow_mut() = chunk((new_min_chunk_size + 1).next_power_of_two(), true); + let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), true); + let mut copy_head = self.copy_head.borrow_mut(); + let old_chunk = mem::replace(&mut *copy_head, new_chunk); + self.chunks.borrow_mut().push(old_chunk); self.alloc_copy_inner(n_bytes, align) } @@ -233,10 +227,13 @@ impl<'longer_than_self> Arena<'longer_than_self> { #[inline] fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 { let start = round_up(self.copy_head.borrow().fill.get(), align); + let chunk_size = self.chunk_size(); let end = start + n_bytes; - if end > self.chunk_size() { - return self.alloc_copy_grow(n_bytes, align); + if end > chunk_size { + if !self.copy_head.borrow_mut().data.reserve_in_place(start, n_bytes) { + return self.alloc_copy_grow(n_bytes, align); + } } let copy_head = self.copy_head.borrow(); @@ -261,9 +258,10 @@ impl<'longer_than_self> Arena<'longer_than_self> { fn alloc_noncopy_grow(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) { // Allocate a new chunk. let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size()); - self.chunks.borrow_mut().push(self.head.borrow().clone()); - - *self.head.borrow_mut() = chunk((new_min_chunk_size + 1).next_power_of_two(), false); + let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false); + let mut head = self.head.borrow_mut(); + let old_chunk = mem::replace(&mut *head, new_chunk); + self.chunks.borrow_mut().push(old_chunk); self.alloc_noncopy_inner(n_bytes, align) } @@ -606,7 +604,11 @@ mod tests { #[bench] pub fn bench_copy_nonarena(b: &mut Bencher) { b.iter(|| { - let _: Box<_> = box Point { x: 1, y: 2, z: 3 }; + let _: Box<_> = Box::new(Point { + x: 1, + y: 2, + z: 3 + }); }) } @@ -647,10 +649,10 @@ mod tests { #[bench] pub fn bench_noncopy_nonarena(b: &mut Bencher) { b.iter(|| { - let _: Box<_> = box Noncopy { + let _: Box<_> = Box::new(Noncopy { string: "hello world".to_string(), array: vec![1, 2, 3, 4, 5], - }; + }); }) } From 5f1b1ec8fed0b4e79f3b00fac6698213d15b6027 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Wed, 12 Aug 2015 09:50:52 +0200 Subject: [PATCH 05/10] Rework Arena code --- src/libarena/lib.rs | 161 ++++++++++++++++++++++---------------------- 1 file changed, 79 insertions(+), 82 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 532979ddeb0..3197a9e72bd 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -50,12 +50,11 @@ use std::ptr; use alloc::heap; use alloc::raw_vec::RawVec; -// The way arena uses arrays is really deeply awful. The arrays are -// allocated, and have capacities reserved, but the fill for the array -// will always stay at 0. struct Chunk { data: RawVec, + /// Index of the first unused byte. fill: Cell, + /// Indicates whether objects with destructors are stored in this chunk. is_copy: Cell, } @@ -75,12 +74,37 @@ impl Chunk { unsafe fn as_ptr(&self) -> *const u8 { self.data.ptr() } + + // Walk down a chunk, running the destructors for any objects stored + // in it. + unsafe fn destroy(&self) { + let mut idx = 0; + let buf = self.as_ptr(); + let fill = self.fill.get(); + + while idx < fill { + let tydesc_data = buf.offset(idx as isize) as *const usize; + let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data); + let (size, align) = ((*tydesc).size, (*tydesc).align); + + let after_tydesc = idx + mem::size_of::<*const TyDesc>(); + + let start = round_up(after_tydesc, align); + + if is_done { + ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8); + } + + // Find where the next tydesc lives + idx = round_up(start + size, mem::align_of::<*const TyDesc>()); + } + } } /// A slower reflection-based arena that can allocate objects of any type. /// -/// This arena uses `Vec` as a backing store to allocate objects from. For -/// each allocated object, the arena stores a pointer to the type descriptor +/// This arena uses `RawVec` as a backing store to allocate objects from. +/// For each allocated object, the arena stores a pointer to the type descriptor /// followed by the object (potentially with alignment padding after each /// element). When the arena is destroyed, it iterates through all of its /// chunks, and uses the tydesc information to trace through the objects, @@ -127,10 +151,10 @@ impl<'a> Arena<'a> { impl<'longer_than_self> Drop for Arena<'longer_than_self> { fn drop(&mut self) { unsafe { - destroy_chunk(&*self.head.borrow()); + self.head.borrow().destroy(); for chunk in self.chunks.borrow().iter() { if !chunk.is_copy.get() { - destroy_chunk(chunk); + chunk.destroy(); } } } @@ -142,31 +166,6 @@ fn round_up(base: usize, align: usize) -> usize { (base.checked_add(align - 1)).unwrap() & !(align - 1) } -// Walk down a chunk, running the destructors for any objects stored -// in it. -unsafe fn destroy_chunk(chunk: &Chunk) { - let mut idx = 0; - let buf = chunk.as_ptr(); - let fill = chunk.fill.get(); - - while idx < fill { - let tydesc_data = buf.offset(idx as isize) as *const usize; - let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data); - let (size, align) = ((*tydesc).size, (*tydesc).align); - - let after_tydesc = idx + mem::size_of::<*const TyDesc>(); - - let start = round_up(after_tydesc, align); - - if is_done { - ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8); - } - - // Find where the next tydesc lives - idx = round_up(start + size, mem::align_of::<*const TyDesc>()); - } -} - // We encode whether the object a tydesc describes has been // initialized in the arena in the low bit of the tydesc pointer. This // is necessary in order to properly do cleanup if a panic occurs @@ -183,6 +182,9 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) { // HACK(eddyb) TyDesc replacement using a trait object vtable. // This could be replaced in the future with a custom DST layout, // or `&'static (drop_glue, size, align)` created by a `const fn`. +// Requirements: +// * rvalue promotion (issue #1056) +// * mem::{size_of, align_of} must be const fns struct TyDesc { drop_glue: fn(*const i8), size: usize, @@ -198,7 +200,7 @@ impl AllTypes for T {} unsafe fn get_tydesc() -> *const TyDesc { use std::raw::TraitObject; - let ptr = &*(1 as *const T); + let ptr = &*(heap::EMPTY as *const T); // Can use any trait that is implemented for all types. let obj = mem::transmute::<&AllTypes, TraitObject>(ptr); @@ -206,37 +208,44 @@ unsafe fn get_tydesc() -> *const TyDesc { } impl<'longer_than_self> Arena<'longer_than_self> { - #[inline] - fn chunk_size(&self) -> usize { - self.copy_head.borrow().capacity() - } - - // Functions for the POD part of the arena + // Grows a given chunk and returns `false`, or replaces it with a bigger + // chunk and returns `true`. + // This method is shared by both parts of the arena. #[cold] - fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 { - // Allocate a new chunk. - let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size()); - let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), true); - let mut copy_head = self.copy_head.borrow_mut(); - let old_chunk = mem::replace(&mut *copy_head, new_chunk); - self.chunks.borrow_mut().push(old_chunk); - - self.alloc_copy_inner(n_bytes, align) + fn alloc_grow(&self, head: &mut Chunk, used_cap: usize, n_bytes: usize) -> bool { + if head.data.reserve_in_place(used_cap, n_bytes) { + // In-place reallocation succeeded. + false + } else { + // Allocate a new chunk. + let new_min_chunk_size = cmp::max(n_bytes, head.capacity()); + let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false); + let old_chunk = mem::replace(head, new_chunk); + if old_chunk.fill.get() != 0 { + self.chunks.borrow_mut().push(old_chunk); + } + true + } } + // Functions for the copyable part of the arena. + #[inline] fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 { - let start = round_up(self.copy_head.borrow().fill.get(), align); - let chunk_size = self.chunk_size(); + let mut copy_head = self.copy_head.borrow_mut(); + let fill = copy_head.fill.get(); + let mut start = round_up(fill, align); + let mut end = start + n_bytes; - let end = start + n_bytes; - if end > chunk_size { - if !self.copy_head.borrow_mut().data.reserve_in_place(start, n_bytes) { - return self.alloc_copy_grow(n_bytes, align); + if end > copy_head.capacity() { + if self.alloc_grow(&mut *copy_head, fill, end - fill) { + // Continuing with a newly allocated chunk + start = 0; + end = n_bytes; + copy_head.is_copy.set(true); } } - let copy_head = self.copy_head.borrow(); copy_head.fill.set(end); unsafe { copy_head.as_ptr().offset(start as isize) } @@ -254,40 +263,28 @@ impl<'longer_than_self> Arena<'longer_than_self> { } } - // Functions for the non-POD part of the arena - fn alloc_noncopy_grow(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) { - // Allocate a new chunk. - let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size()); - let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false); - let mut head = self.head.borrow_mut(); - let old_chunk = mem::replace(&mut *head, new_chunk); - self.chunks.borrow_mut().push(old_chunk); - - self.alloc_noncopy_inner(n_bytes, align) - } + // Functions for the non-copyable part of the arena. #[inline] fn alloc_noncopy_inner(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) { - // Be careful to not maintain any `head` borrows active, because - // `alloc_noncopy_grow` borrows it mutably. - let (start, end, tydesc_start, head_capacity) = { - let head = self.head.borrow(); - let fill = head.fill.get(); + let mut head = self.head.borrow_mut(); + let fill = head.fill.get(); - let tydesc_start = fill; - let after_tydesc = fill + mem::size_of::<*const TyDesc>(); - let start = round_up(after_tydesc, align); - let end = start + n_bytes; + let mut tydesc_start = fill; + let after_tydesc = fill + mem::size_of::<*const TyDesc>(); + let mut start = round_up(after_tydesc, align); + let mut end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>()); - (start, end, tydesc_start, head.capacity()) - }; - - if end > head_capacity { - return self.alloc_noncopy_grow(n_bytes, align); + if end > head.capacity() { + if self.alloc_grow(&mut *head, tydesc_start, end - tydesc_start) { + // Continuing with a newly allocated chunk + tydesc_start = 0; + start = round_up(mem::size_of::<*const TyDesc>(), align); + end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>()); + } } - let head = self.head.borrow(); - head.fill.set(round_up(end, mem::align_of::<*const TyDesc>())); + head.fill.set(end); unsafe { let buf = head.as_ptr(); From 7899699a4af9c4a286daa148bf17be0a0b740756 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Thu, 13 Aug 2015 18:10:19 +0200 Subject: [PATCH 06/10] Implement fn alloc_bytes for Arena and fn clear for both arena types Closes #18471 Closes #18261 --- src/libarena/lib.rs | 161 +++++++++++++++++++++++++++++++++++++++----- 1 file changed, 143 insertions(+), 18 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 3197a9e72bd..8c42fcfb81f 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -46,6 +46,7 @@ use std::intrinsics; use std::marker::{PhantomData, Send}; use std::mem; use std::ptr; +use std::slice; use alloc::heap; use alloc::raw_vec::RawVec; @@ -122,9 +123,8 @@ impl Chunk { /// plain-old-data (`Copy` types) and means we don't need to waste time running /// their destructors. pub struct Arena<'longer_than_self> { - // The head is separated out from the list as a unbenchmarked - // microoptimization, to avoid needing to case on the list to access the - // head. + // The heads are separated out from the list as a unbenchmarked + // microoptimization, to avoid needing to case on the list to access a head. head: RefCell, copy_head: RefCell, chunks: RefCell>, @@ -329,6 +329,37 @@ impl<'longer_than_self> Arena<'longer_than_self> { } } } + + /// Allocates a slice of bytes of requested length. The bytes are not guaranteed to be zero + /// if the arena has previously been cleared. + /// + /// # Panics + /// + /// Panics if the requested length is too large and causes overflow. + pub fn alloc_bytes(&self, len: usize) -> &mut [u8] { + unsafe { + // Check for overflow. + self.copy_head.borrow().fill.get().checked_add(len).expect("length overflow"); + let ptr = self.alloc_copy_inner(len, 1); + intrinsics::assume(!ptr.is_null()); + slice::from_raw_parts_mut(ptr as *mut _, len) + } + } + + /// Clears the arena. Deallocates all but the longest chunk which may be reused. + pub fn clear(&mut self) { + unsafe { + self.head.borrow().destroy(); + self.head.borrow().fill.set(0); + self.copy_head.borrow().fill.set(0); + for chunk in self.chunks.borrow().iter() { + if !chunk.is_copy.get() { + chunk.destroy(); + } + } + self.chunks.borrow_mut().clear(); + } + } } #[test] @@ -495,6 +526,45 @@ impl TypedArena { } } } + /// Clears the arena. Deallocates all but the longest chunk which may be reused. + pub fn clear(&mut self) { + unsafe { + // Clear the last chunk, which is partially filled. + let mut chunks_borrow = self.chunks.borrow_mut(); + let last_idx = chunks_borrow.len() - 1; + self.clear_last_chunk(&mut chunks_borrow[last_idx]); + // If `T` is ZST, code below has no effect. + for mut chunk in chunks_borrow.drain(..last_idx) { + let cap = chunk.storage.cap(); + chunk.destroy(cap); + } + } + } + + // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other + // chunks. + fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk) { + // Determine how much was filled. + let start = last_chunk.start() as usize; + // We obtain the value of the pointer to the first uninitialized element. + let end = self.ptr.get() as usize; + // We then calculate the number of elements to be dropped in the last chunk, + // which is the filled area's length. + let diff = if mem::size_of::() == 0 { + // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get + // the number of zero-sized values in the last and only chunk, just out of caution. + // Recall that `end` was incremented for each allocated value. + end - start + } else { + (end - start) / mem::size_of::() + }; + // Pass that to the `destroy` method. + unsafe { + last_chunk.destroy(diff); + } + // Reset the chunk. + self.ptr.set(last_chunk.start()); + } } impl Drop for TypedArena { @@ -504,24 +574,14 @@ impl Drop for TypedArena { // Determine how much was filled. let mut chunks_borrow = self.chunks.borrow_mut(); let mut last_chunk = chunks_borrow.pop().unwrap(); - let start = last_chunk.start() as usize; - let end = self.ptr.get() as usize; - let diff = if mem::size_of::() == 0 { - // Avoid division by zero. - end - start - } else { - (end - start) / mem::size_of::() - }; - - // Pass that to the `destroy` method. - last_chunk.destroy(diff); - // Destroy this chunk. - let _: RawVec = mem::transmute(last_chunk); - + // Drop the contents of the last chunk. + self.clear_last_chunk(&mut last_chunk); + // The last chunk will be dropped. Destroy all other chunks. for chunk in chunks_borrow.iter_mut() { let cap = chunk.storage.cap(); chunk.destroy(cap); } + // RawVec handles deallocation of `last_chunk` and `self.chunks`. } } } @@ -533,6 +593,7 @@ mod tests { extern crate test; use self::test::Bencher; use super::{Arena, TypedArena}; + use std::rc::Rc; #[allow(dead_code)] struct Point { @@ -667,10 +728,74 @@ mod tests { } #[test] - pub fn test_zero_sized() { + pub fn test_typed_arena_zero_sized() { let arena = TypedArena::new(); for _ in 0..100000 { arena.alloc(()); } } + + #[test] + pub fn test_arena_zero_sized() { + let arena = Arena::new(); + for _ in 0..1000 { + for _ in 0..100 { + arena.alloc(|| ()); + } + arena.alloc(|| Point { + x: 1, + y: 2, + z: 3, + }); + } + } + + #[test] + pub fn test_typed_arena_clear() { + let mut arena = TypedArena::new(); + for _ in 0..10 { + arena.clear(); + for _ in 0..10000 { + arena.alloc(Point { + x: 1, + y: 2, + z: 3, + }); + } + } + } + + #[test] + pub fn test_arena_clear() { + let mut arena = Arena::new(); + for _ in 0..10 { + arena.clear(); + for _ in 0..10000 { + arena.alloc(|| Point { + x: 1, + y: 2, + z: 3, + }); + arena.alloc(|| Noncopy { + string: "hello world".to_string(), + array: vec![], + }); + } + } + } + + #[test] + pub fn test_arena_alloc_bytes() { + let arena = Arena::new(); + for i in 0..10000 { + arena.alloc(|| Point { + x: 1, + y: 2, + z: 3, + }); + for byte in arena.alloc_bytes(i % 42).iter_mut() { + *byte = i as u8; + } + } + } } From 72a5bb73c1056dcb34fad4af138258603bb36d18 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Fri, 14 Aug 2015 14:20:09 +0200 Subject: [PATCH 07/10] Move tests around --- src/libarena/lib.rs | 130 ++++++++++++++++++++++---------------------- 1 file changed, 64 insertions(+), 66 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 8c42fcfb81f..fe88d51470d 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -362,38 +362,6 @@ impl<'longer_than_self> Arena<'longer_than_self> { } } -#[test] -fn test_arena_destructors() { - let arena = Arena::new(); - for i in 0..10 { - // Arena allocate something with drop glue to make sure it - // doesn't leak. - arena.alloc(|| Rc::new(i)); - // Allocate something with funny size and alignment, to keep - // things interesting. - arena.alloc(|| [0u8, 1u8, 2u8]); - } -} - -#[test] -#[should_panic] -fn test_arena_destructors_fail() { - let arena = Arena::new(); - // Put some stuff in the arena. - for i in 0..10 { - // Arena allocate something with drop glue to make sure it - // doesn't leak. - arena.alloc(|| Rc::new(i)); - // Allocate something with funny size and alignment, to keep - // things interesting. - arena.alloc(|| [0u8, 1, 2]); - } - // Now, panic while allocating - arena.alloc::, _>(|| { - panic!(); - }); -} - /// A faster arena that can hold objects of only one type. pub struct TypedArena { /// A pointer to the next object to be allocated. @@ -693,40 +661,6 @@ mod tests { } } - #[bench] - pub fn bench_noncopy(b: &mut Bencher) { - let arena = TypedArena::new(); - b.iter(|| { - arena.alloc(Noncopy { - string: "hello world".to_string(), - array: vec![1, 2, 3, 4, 5], - }) - }) - } - - #[bench] - pub fn bench_noncopy_nonarena(b: &mut Bencher) { - b.iter(|| { - let _: Box<_> = Box::new(Noncopy { - string: "hello world".to_string(), - array: vec![1, 2, 3, 4, 5], - }); - }) - } - - #[bench] - pub fn bench_noncopy_old_arena(b: &mut Bencher) { - let arena = Arena::new(); - b.iter(|| { - arena.alloc(|| { - Noncopy { - string: "hello world".to_string(), - array: vec![1, 2, 3, 4, 5], - } - }) - }) - } - #[test] pub fn test_typed_arena_zero_sized() { let arena = TypedArena::new(); @@ -798,4 +732,68 @@ mod tests { } } } + + #[test] + fn test_arena_destructors() { + let arena = Arena::new(); + for i in 0..10 { + // Arena allocate something with drop glue to make sure it + // doesn't leak. + arena.alloc(|| Rc::new(i)); + // Allocate something with funny size and alignment, to keep + // things interesting. + arena.alloc(|| [0u8, 1u8, 2u8]); + } + } + + #[test] + #[should_panic] + fn test_arena_destructors_fail() { + let arena = Arena::new(); + // Put some stuff in the arena. + for i in 0..10 { + // Arena allocate something with drop glue to make sure it + // doesn't leak. + arena.alloc(|| { Rc::new(i) }); + // Allocate something with funny size and alignment, to keep + // things interesting. + arena.alloc(|| { [0u8, 1, 2] }); + } + // Now, panic while allocating + arena.alloc::, _>(|| { + panic!(); + }); + } + + #[bench] + pub fn bench_noncopy(b: &mut Bencher) { + let arena = TypedArena::new(); + b.iter(|| { + arena.alloc(Noncopy { + string: "hello world".to_string(), + array: vec!( 1, 2, 3, 4, 5 ), + }) + }) + } + + #[bench] + pub fn bench_noncopy_nonarena(b: &mut Bencher) { + b.iter(|| { + let _: Box<_> = Box::new(Noncopy { + string: "hello world".to_string(), + array: vec!( 1, 2, 3, 4, 5 ), + }); + }) + } + + #[bench] + pub fn bench_noncopy_old_arena(b: &mut Bencher) { + let arena = Arena::new(); + b.iter(|| { + arena.alloc(|| Noncopy { + string: "hello world".to_string(), + array: vec!( 1, 2, 3, 4, 5 ), + }) + }) + } } From 100a4698defac5d060ecb89c30d9c5f0dc770c15 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Thu, 3 Dec 2015 08:06:16 +0100 Subject: [PATCH 08/10] Ran rustfmt --- src/libarena/lib.rs | 54 ++++++++++++++++----------------------------- 1 file changed, 19 insertions(+), 35 deletions(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index fe88d51470d..8aafc115a29 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -630,11 +630,7 @@ mod tests { #[bench] pub fn bench_copy_nonarena(b: &mut Bencher) { b.iter(|| { - let _: Box<_> = Box::new(Point { - x: 1, - y: 2, - z: 3 - }); + let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 }); }) } @@ -676,11 +672,7 @@ mod tests { for _ in 0..100 { arena.alloc(|| ()); } - arena.alloc(|| Point { - x: 1, - y: 2, - z: 3, - }); + arena.alloc(|| Point { x: 1, y: 2, z: 3 }); } } @@ -690,11 +682,7 @@ mod tests { for _ in 0..10 { arena.clear(); for _ in 0..10000 { - arena.alloc(Point { - x: 1, - y: 2, - z: 3, - }); + arena.alloc(Point { x: 1, y: 2, z: 3 }); } } } @@ -705,14 +693,12 @@ mod tests { for _ in 0..10 { arena.clear(); for _ in 0..10000 { - arena.alloc(|| Point { - x: 1, - y: 2, - z: 3, - }); - arena.alloc(|| Noncopy { - string: "hello world".to_string(), - array: vec![], + arena.alloc(|| Point { x: 1, y: 2, z: 3 }); + arena.alloc(|| { + Noncopy { + string: "hello world".to_string(), + array: vec![], + } }); } } @@ -722,11 +708,7 @@ mod tests { pub fn test_arena_alloc_bytes() { let arena = Arena::new(); for i in 0..10000 { - arena.alloc(|| Point { - x: 1, - y: 2, - z: 3, - }); + arena.alloc(|| Point { x: 1, y: 2, z: 3 }); for byte in arena.alloc_bytes(i % 42).iter_mut() { *byte = i as u8; } @@ -754,10 +736,10 @@ mod tests { for i in 0..10 { // Arena allocate something with drop glue to make sure it // doesn't leak. - arena.alloc(|| { Rc::new(i) }); + arena.alloc(|| Rc::new(i)); // Allocate something with funny size and alignment, to keep // things interesting. - arena.alloc(|| { [0u8, 1, 2] }); + arena.alloc(|| [0u8, 1, 2]); } // Now, panic while allocating arena.alloc::, _>(|| { @@ -771,7 +753,7 @@ mod tests { b.iter(|| { arena.alloc(Noncopy { string: "hello world".to_string(), - array: vec!( 1, 2, 3, 4, 5 ), + array: vec![1, 2, 3, 4, 5], }) }) } @@ -781,7 +763,7 @@ mod tests { b.iter(|| { let _: Box<_> = Box::new(Noncopy { string: "hello world".to_string(), - array: vec!( 1, 2, 3, 4, 5 ), + array: vec![1, 2, 3, 4, 5], }); }) } @@ -790,9 +772,11 @@ mod tests { pub fn bench_noncopy_old_arena(b: &mut Bencher) { let arena = Arena::new(); b.iter(|| { - arena.alloc(|| Noncopy { - string: "hello world".to_string(), - array: vec!( 1, 2, 3, 4, 5 ), + arena.alloc(|| { + Noncopy { + string: "hello world".to_string(), + array: vec![1, 2, 3, 4, 5], + } }) }) } From 388e6afa1db74637169dcb65b1720c5e2f73c1d5 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Tue, 5 Jan 2016 11:02:58 +0100 Subject: [PATCH 09/10] Add tests for drops --- src/libarena/lib.rs | 127 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 126 insertions(+), 1 deletion(-) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 8aafc115a29..32053e048a9 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -561,9 +561,11 @@ mod tests { extern crate test; use self::test::Bencher; use super::{Arena, TypedArena}; + use std::cell::Cell; use std::rc::Rc; #[allow(dead_code)] + #[derive(Debug, Eq, PartialEq)] struct Point { x: i32, y: i32, @@ -668,11 +670,16 @@ mod tests { #[test] pub fn test_arena_zero_sized() { let arena = Arena::new(); + let mut points = vec![]; for _ in 0..1000 { for _ in 0..100 { arena.alloc(|| ()); } - arena.alloc(|| Point { x: 1, y: 2, z: 3 }); + let point = arena.alloc(|| Point { x: 1, y: 2, z: 3 }); + points.push(point); + } + for point in &points { + assert_eq!(**point, Point { x: 1, y: 2, z: 3 }); } } @@ -747,6 +754,124 @@ mod tests { }); } + // Drop tests + + struct DropCounter<'a> { + count: &'a Cell, + } + + impl<'a> Drop for DropCounter<'a> { + fn drop(&mut self) { + self.count.set(self.count.get() + 1); + } + } + + #[test] + fn test_arena_drop_count() { + let counter = Cell::new(0); + { + let arena = Arena::new(); + for _ in 0..100 { + // Allocate something with drop glue to make sure it doesn't leak. + arena.alloc(|| DropCounter { count: &counter }); + // Allocate something with funny size and alignment, to keep + // things interesting. + arena.alloc(|| [0u8, 1u8, 2u8]); + } + // dropping + }; + assert_eq!(counter.get(), 100); + } + + #[test] + fn test_arena_drop_on_clear() { + let counter = Cell::new(0); + for i in 0..10 { + let mut arena = Arena::new(); + for _ in 0..100 { + // Allocate something with drop glue to make sure it doesn't leak. + arena.alloc(|| DropCounter { count: &counter }); + // Allocate something with funny size and alignment, to keep + // things interesting. + arena.alloc(|| [0u8, 1u8, 2u8]); + } + arena.clear(); + assert_eq!(counter.get(), i * 100 + 100); + } + } + + #[test] + fn test_typed_arena_drop_count() { + let counter = Cell::new(0); + { + let arena: TypedArena = TypedArena::new(); + for _ in 0..100 { + // Allocate something with drop glue to make sure it doesn't leak. + arena.alloc(DropCounter { count: &counter }); + } + }; + assert_eq!(counter.get(), 100); + } + + #[test] + fn test_typed_arena_drop_on_clear() { + let counter = Cell::new(0); + let mut arena: TypedArena = TypedArena::new(); + for i in 0..10 { + for _ in 0..100 { + // Allocate something with drop glue to make sure it doesn't leak. + arena.alloc(DropCounter { count: &counter }); + } + arena.clear(); + assert_eq!(counter.get(), i * 100 + 100); + } + } + + thread_local! { + static DROP_COUNTER: Cell = Cell::new(0) + } + + struct SmallDroppable; + + impl Drop for SmallDroppable { + fn drop(&mut self) { + DROP_COUNTER.with(|c| c.set(c.get() + 1)); + } + } + + #[test] + fn test_arena_drop_small_count() { + DROP_COUNTER.with(|c| c.set(0)); + { + let arena = Arena::new(); + for _ in 0..10 { + for _ in 0..10 { + // Allocate something with drop glue to make sure it doesn't leak. + arena.alloc(|| SmallDroppable); + } + // Allocate something with funny size and alignment, to keep + // things interesting. + arena.alloc(|| [0u8, 1u8, 2u8]); + } + // dropping + }; + assert_eq!(DROP_COUNTER.with(|c| c.get()), 100); + } + + #[test] + fn test_typed_arena_drop_small_count() { + DROP_COUNTER.with(|c| c.set(0)); + { + let arena: TypedArena = TypedArena::new(); + for _ in 0..100 { + // Allocate something with drop glue to make sure it doesn't leak. + arena.alloc(SmallDroppable); + } + // dropping + }; + assert_eq!(DROP_COUNTER.with(|c| c.get()), 100); + } + #[bench] pub fn bench_noncopy(b: &mut Bencher) { let arena = TypedArena::new(); From e2ccc4f744b93f89666fe4c8828905297bb76178 Mon Sep 17 00:00:00 2001 From: Piotr Czarnecki Date: Wed, 6 Jan 2016 18:07:21 +0100 Subject: [PATCH 10/10] Deprecate reflection-based Arena --- src/libarena/lib.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 32053e048a9..cd2093984e6 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -38,6 +38,8 @@ #![feature(dropck_parametricity)] #![cfg_attr(test, feature(test))] +#![allow(deprecated)] + extern crate alloc; use std::cell::{Cell, RefCell}; @@ -122,6 +124,10 @@ impl Chunk { /// than objects without destructors. This reduces overhead when initializing /// plain-old-data (`Copy` types) and means we don't need to waste time running /// their destructors. +#[unstable(feature = "rustc_private", + reason = "Private to rustc", issue = "0")] +#[rustc_deprecated(since = "1.6.0-dev", reason = +"The reflection-based arena is superseded by the any-arena crate")] pub struct Arena<'longer_than_self> { // The heads are separated out from the list as a unbenchmarked // microoptimization, to avoid needing to case on the list to access a head.