Auto merge of #29026 - steveklabnik:rollup, r=steveklabnik

- Successful merges: #28988, #28989, #28990, #28997, #29007, #29015
- Failed merges: #28906
This commit is contained in:
bors 2015-10-14 00:06:32 +00:00
commit c0dc2cb81d
7 changed files with 124 additions and 114 deletions

View File

@ -1898,7 +1898,7 @@ for pop in search(&data_file, &city) {
In this piece of code, we take `file` (which has the type
`Option<String>`), and convert it to a type that `search` can use, in
this case, `&Option<AsRef<Path>>`. Do do this, we take a reference of
this case, `&Option<AsRef<Path>>`. To do this, we take a reference of
file, and map `Path::new` onto it. In this case, `as_ref()` converts
the `Option<String>` into an `Option<&str>`, and from there, we can
execute `Path::new` to the content of the optional, and return the

View File

@ -44,7 +44,7 @@ own iterator involves implementing the `Iterator` trait. While doing that is
outside of the scope of this guide, Rust provides a number of useful iterators
to accomplish various tasks. But first, a few notes about limitations of ranges.
Ranges are very primitive, and we often can use better alternatives. Consider
Ranges are very primitive, and we often can use better alternatives. Consider the
following Rust anti-pattern: using ranges to emulate a C-style `for` loop. Lets
suppose you needed to iterate over the contents of a vector. You may be tempted
to write this:

View File

@ -1,6 +1,6 @@
% Lifetimes
This guide is one of three presenting Rusts ownership system. This is one of
This guide is three of three presenting Rusts ownership system. This is one of
Rusts most unique and compelling features, with which Rust developers should
become quite acquainted. Ownership is how Rust achieves its largest goal,
memory safety. There are a few distinct concepts, each with its own chapter:

View File

@ -1,6 +1,6 @@
% References and Borrowing
This guide is one of three presenting Rusts ownership system. This is one of
This guide is two of three presenting Rusts ownership system. This is one of
Rusts most unique and compelling features, with which Rust developers should
become quite acquainted. Ownership is how Rust achieves its largest goal,
memory safety. There are a few distinct concepts, each with its own

View File

@ -43,8 +43,7 @@ use libc::{c_int, c_void, size_t};
extern {
fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void;
fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t,
flags: c_int) -> size_t;
fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;
fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
fn je_nallocx(size: size_t, flags: c_int) -> size_t;
}
@ -63,40 +62,52 @@ const MIN_ALIGN: usize = 8;
const MIN_ALIGN: usize = 16;
// MALLOCX_ALIGN(a) macro
fn mallocx_align(a: usize) -> c_int { a.trailing_zeros() as c_int }
fn mallocx_align(a: usize) -> c_int {
a.trailing_zeros() as c_int
}
fn align_to_flags(align: usize) -> c_int {
if align <= MIN_ALIGN { 0 } else { mallocx_align(align) }
if align <= MIN_ALIGN {
0
} else {
mallocx_align(align)
}
}
#[no_mangle]
pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
let flags = align_to_flags(align);
unsafe { je_mallocx(size as size_t, flags) as *mut u8 }
}
#[no_mangle]
pub extern fn __rust_reallocate(ptr: *mut u8, _old_size: usize, size: usize,
align: usize) -> *mut u8 {
pub extern "C" fn __rust_reallocate(ptr: *mut u8,
_old_size: usize,
size: usize,
align: usize)
-> *mut u8 {
let flags = align_to_flags(align);
unsafe { je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 }
}
#[no_mangle]
pub extern fn __rust_reallocate_inplace(ptr: *mut u8, _old_size: usize,
size: usize, align: usize) -> usize {
pub extern "C" fn __rust_reallocate_inplace(ptr: *mut u8,
_old_size: usize,
size: usize,
align: usize)
-> usize {
let flags = align_to_flags(align);
unsafe { je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize }
}
#[no_mangle]
pub extern fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
let flags = align_to_flags(align);
unsafe { je_sdallocx(ptr as *mut c_void, old_size as size_t, flags) }
}
#[no_mangle]
pub extern fn __rust_usable_size(size: usize, align: usize) -> usize {
pub extern "C" fn __rust_usable_size(size: usize, align: usize) -> usize {
let flags = align_to_flags(align);
unsafe { je_nallocx(size as size_t, flags) as usize }
}

View File

@ -39,29 +39,35 @@ const MIN_ALIGN: usize = 8;
const MIN_ALIGN: usize = 16;
#[no_mangle]
pub extern fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 {
unsafe { imp::allocate(size, align) }
}
#[no_mangle]
pub extern fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) {
unsafe { imp::deallocate(ptr, old_size, align) }
}
#[no_mangle]
pub extern fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> *mut u8 {
pub extern "C" fn __rust_reallocate(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> *mut u8 {
unsafe { imp::reallocate(ptr, old_size, size, align) }
}
#[no_mangle]
pub extern fn __rust_reallocate_inplace(ptr: *mut u8, old_size: usize,
size: usize, align: usize) -> usize {
pub extern "C" fn __rust_reallocate_inplace(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> usize {
unsafe { imp::reallocate_inplace(ptr, old_size, size, align) }
}
#[no_mangle]
pub extern fn __rust_usable_size(size: usize, align: usize) -> usize {
pub extern "C" fn __rust_usable_size(size: usize, align: usize) -> usize {
imp::usable_size(size, align)
}
@ -80,7 +86,8 @@ mod imp {
#[cfg(not(target_os = "android"))]
fn posix_memalign(memptr: *mut *mut libc::c_void,
align: libc::size_t,
size: libc::size_t) -> libc::c_int;
size: libc::size_t)
-> libc::c_int;
}
pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
@ -94,9 +101,7 @@ mod imp {
#[cfg(not(target_os = "android"))]
unsafe fn more_aligned_malloc(size: usize, align: usize) -> *mut u8 {
let mut out = ptr::null_mut();
let ret = posix_memalign(&mut out,
align as libc::size_t,
size as libc::size_t);
let ret = posix_memalign(&mut out, align as libc::size_t, size as libc::size_t);
if ret != 0 {
ptr::null_mut()
} else {
@ -107,8 +112,7 @@ mod imp {
}
}
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
} else {
@ -119,8 +123,11 @@ mod imp {
}
}
pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: usize, _size: usize,
_align: usize) -> usize {
pub unsafe fn reallocate_inplace(_ptr: *mut u8,
old_size: usize,
_size: usize,
_align: usize)
-> usize {
old_size
}
@ -141,8 +148,7 @@ mod imp {
extern "system" {
fn GetProcessHeap() -> HANDLE;
fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID;
fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID,
dwBytes: SIZE_T) -> LPVOID;
fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID, dwBytes: SIZE_T) -> LPVOID;
fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL;
}
@ -165,32 +171,45 @@ mod imp {
if align <= MIN_ALIGN {
HeapAlloc(GetProcessHeap(), 0, size as SIZE_T) as *mut u8
} else {
let ptr = HeapAlloc(GetProcessHeap(), 0,
(size + align) as SIZE_T) as *mut u8;
if ptr.is_null() { return ptr }
let ptr = HeapAlloc(GetProcessHeap(), 0, (size + align) as SIZE_T) as *mut u8;
if ptr.is_null() {
return ptr
}
align_ptr(ptr, align)
}
}
pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize,
align: usize) -> *mut u8 {
pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 {
if align <= MIN_ALIGN {
HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8
} else {
let header = get_header(ptr);
let new = HeapReAlloc(GetProcessHeap(), 0, header.0 as LPVOID,
let new = HeapReAlloc(GetProcessHeap(),
0,
header.0 as LPVOID,
(size + align) as SIZE_T) as *mut u8;
if new.is_null() { return new }
if new.is_null() {
return new
}
align_ptr(new, align)
}
}
pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: usize, size: usize,
align: usize) -> usize {
pub unsafe fn reallocate_inplace(ptr: *mut u8,
old_size: usize,
size: usize,
align: usize)
-> usize {
if align <= MIN_ALIGN {
let new = HeapReAlloc(GetProcessHeap(), HEAP_REALLOC_IN_PLACE_ONLY,
ptr as LPVOID, size as SIZE_T) as *mut u8;
if new.is_null() { old_size } else { size }
let new = HeapReAlloc(GetProcessHeap(),
HEAP_REALLOC_IN_PLACE_ONLY,
ptr as LPVOID,
size as SIZE_T) as *mut u8;
if new.is_null() {
old_size
} else {
size
}
} else {
old_size
}

View File

@ -105,7 +105,7 @@ pub struct Arena<'longer_than_self> {
head: RefCell<Chunk>,
copy_head: RefCell<Chunk>,
chunks: RefCell<Vec<Chunk>>,
_marker: marker::PhantomData<*mut &'longer_than_self()>,
_marker: marker::PhantomData<*mut &'longer_than_self ()>,
}
impl<'a> Arena<'a> {
@ -197,7 +197,7 @@ fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
struct TyDesc {
drop_glue: fn(*const i8),
size: usize,
align: usize
align: usize,
}
trait AllTypes { fn dummy(&self) { } }
@ -224,8 +224,7 @@ impl<'longer_than_self> Arena<'longer_than_self> {
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
*self.copy_head.borrow_mut() =
chunk((new_min_chunk_size + 1).next_power_of_two(), true);
*self.copy_head.borrow_mut() = chunk((new_min_chunk_size + 1).next_power_of_two(), true);
self.alloc_copy_inner(n_bytes, align)
}
@ -242,16 +241,13 @@ impl<'longer_than_self> Arena<'longer_than_self> {
let copy_head = self.copy_head.borrow();
copy_head.fill.set(end);
unsafe {
copy_head.as_ptr().offset(start as isize)
}
unsafe { copy_head.as_ptr().offset(start as isize) }
}
#[inline]
fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
mem::align_of::<T>());
let ptr = self.alloc_copy_inner(mem::size_of::<T>(), mem::align_of::<T>());
let ptr = ptr as *mut T;
ptr::write(&mut (*ptr), op());
&mut *ptr
@ -259,21 +255,18 @@ impl<'longer_than_self> Arena<'longer_than_self> {
}
// Functions for the non-POD part of the arena
fn alloc_noncopy_grow(&self, n_bytes: usize,
align: usize) -> (*const u8, *const u8) {
fn alloc_noncopy_grow(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
// Allocate a new chunk.
let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
self.chunks.borrow_mut().push(self.head.borrow().clone());
*self.head.borrow_mut() =
chunk((new_min_chunk_size + 1).next_power_of_two(), false);
*self.head.borrow_mut() = chunk((new_min_chunk_size + 1).next_power_of_two(), false);
self.alloc_noncopy_inner(n_bytes, align)
}
#[inline]
fn alloc_noncopy_inner(&self, n_bytes: usize,
align: usize) -> (*const u8, *const u8) {
fn alloc_noncopy_inner(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
// Be careful to not maintain any `head` borrows active, because
// `alloc_noncopy_grow` borrows it mutably.
let (start, end, tydesc_start, head_capacity) = {
@ -297,7 +290,8 @@ impl<'longer_than_self> Arena<'longer_than_self> {
unsafe {
let buf = head.as_ptr();
(buf.offset(tydesc_start as isize), buf.offset(start as isize))
(buf.offset(tydesc_start as isize),
buf.offset(start as isize))
}
}
@ -305,16 +299,14 @@ impl<'longer_than_self> Arena<'longer_than_self> {
fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
unsafe {
let tydesc = get_tydesc::<T>();
let (ty_ptr, ptr) =
self.alloc_noncopy_inner(mem::size_of::<T>(),
mem::align_of::<T>());
let (ty_ptr, ptr) = self.alloc_noncopy_inner(mem::size_of::<T>(), mem::align_of::<T>());
let ty_ptr = ty_ptr as *mut usize;
let ptr = ptr as *mut T;
// Write in our tydesc along with a bit indicating that it
// has *not* been initialized yet.
*ty_ptr = bitpack_tydesc_ptr(tydesc, false);
// Actually initialize it
ptr::write(&mut(*ptr), op());
ptr::write(&mut (*ptr), op());
// Now that we are done, update the tydesc to indicate that
// the object is there.
*ty_ptr = bitpack_tydesc_ptr(tydesc, true);
@ -358,10 +350,10 @@ fn test_arena_destructors_fail() {
for i in 0..10 {
// Arena allocate something with drop glue to make sure it
// doesn't leak.
arena.alloc(|| { Rc::new(i) });
arena.alloc(|| Rc::new(i));
// Allocate something with funny size and alignment, to keep
// things interesting.
arena.alloc(|| { [0u8, 1, 2] });
arena.alloc(|| [0u8, 1, 2]);
}
// Now, panic while allocating
arena.alloc::<Rc<i32>, _>(|| {
@ -409,12 +401,13 @@ fn calculate_size<T>(capacity: usize) -> usize {
impl<T> TypedArenaChunk<T> {
#[inline]
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
-> *mut TypedArenaChunk<T> {
unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize) -> *mut TypedArenaChunk<T> {
let size = calculate_size::<T>(capacity);
let chunk = allocate(size, mem::align_of::<TypedArenaChunk<T>>())
as *mut TypedArenaChunk<T>;
if chunk.is_null() { alloc::oom() }
let chunk =
allocate(size, mem::align_of::<TypedArenaChunk<T>>()) as *mut TypedArenaChunk<T>;
if chunk.is_null() {
alloc::oom()
}
(*chunk).next = next;
(*chunk).capacity = capacity;
chunk
@ -437,7 +430,8 @@ impl<T> TypedArenaChunk<T> {
let next = self.next;
let size = calculate_size::<T>(self.capacity);
let self_ptr: *mut TypedArenaChunk<T> = self;
deallocate(self_ptr as *mut u8, size,
deallocate(self_ptr as *mut u8,
size,
mem::align_of::<TypedArenaChunk<T>>());
if !next.is_null() {
let capacity = (*next).capacity;
@ -449,9 +443,7 @@ impl<T> TypedArenaChunk<T> {
#[inline]
fn start(&self) -> *const u8 {
let this: *const TypedArenaChunk<T> = self;
unsafe {
round_up(this.offset(1) as usize, mem::align_of::<T>()) as *const u8
}
unsafe { round_up(this.offset(1) as usize, mem::align_of::<T>()) as *const u8 }
}
// Returns a pointer to the end of the allocated space.
@ -545,14 +537,21 @@ mod tests {
#[test]
fn test_arena_alloc_nested() {
struct Inner { value: u8 }
struct Outer<'a> { inner: &'a Inner }
enum EI<'e> { I(Inner), O(Outer<'e>) }
struct Inner {
value: u8,
}
struct Outer<'a> {
inner: &'a Inner,
}
enum EI<'e> {
I(Inner),
O(Outer<'e>),
}
struct Wrap<'a>(TypedArena<EI<'a>>);
impl<'a> Wrap<'a> {
fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
let r: &EI = self.0.alloc(EI::I(f()));
if let &EI::I(ref i) = r {
i
@ -560,7 +559,7 @@ mod tests {
panic!("mismatch");
}
}
fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
let r: &EI = self.0.alloc(EI::O(f()));
if let &EI::O(ref o) = r {
o
@ -572,8 +571,9 @@ mod tests {
let arena = Wrap(TypedArena::new());
let result = arena.alloc_outer(|| Outer {
inner: arena.alloc_inner(|| Inner { value: 10 }) });
let result = arena.alloc_outer(|| {
Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) }
});
assert_eq!(result.inner.value, 10);
}
@ -582,49 +582,27 @@ mod tests {
pub fn test_copy() {
let arena = TypedArena::new();
for _ in 0..100000 {
arena.alloc(Point {
x: 1,
y: 2,
z: 3,
});
arena.alloc(Point { x: 1, y: 2, z: 3 });
}
}
#[bench]
pub fn bench_copy(b: &mut Bencher) {
let arena = TypedArena::new();
b.iter(|| {
arena.alloc(Point {
x: 1,
y: 2,
z: 3,
})
})
b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
}
#[bench]
pub fn bench_copy_nonarena(b: &mut Bencher) {
b.iter(|| {
let _: Box<_> = box Point {
x: 1,
y: 2,
z: 3,
};
let _: Box<_> = box Point { x: 1, y: 2, z: 3 };
})
}
#[bench]
pub fn bench_copy_old_arena(b: &mut Bencher) {
let arena = Arena::new();
b.iter(|| {
arena.alloc(|| {
Point {
x: 1,
y: 2,
z: 3,
}
})
})
b.iter(|| arena.alloc(|| Point { x: 1, y: 2, z: 3 }))
}
#[allow(dead_code)]
@ -639,7 +617,7 @@ mod tests {
for _ in 0..100000 {
arena.alloc(Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
array: vec!(1, 2, 3, 4, 5),
});
}
}
@ -650,7 +628,7 @@ mod tests {
b.iter(|| {
arena.alloc(Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
array: vec!(1, 2, 3, 4, 5),
})
})
}
@ -660,7 +638,7 @@ mod tests {
b.iter(|| {
let _: Box<_> = box Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
array: vec!(1, 2, 3, 4, 5),
};
})
}
@ -669,9 +647,11 @@ mod tests {
pub fn bench_noncopy_old_arena(b: &mut Bencher) {
let arena = Arena::new();
b.iter(|| {
arena.alloc(|| Noncopy {
string: "hello world".to_string(),
array: vec!( 1, 2, 3, 4, 5 ),
arena.alloc(|| {
Noncopy {
string: "hello world".to_string(),
array: vec!(1, 2, 3, 4, 5),
}
})
})
}