rustfmt: liballoc, liballoc_*, libarena
This commit is contained in:
parent
e24fffef8a
commit
1f1a1e6595
@ -131,11 +131,12 @@ pub struct Arc<T: ?Sized> {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> { }
|
||||
unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> { }
|
||||
unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
|
||||
|
||||
#[cfg(not(stage0))] // remove cfg after new snapshot
|
||||
// remove cfg after new snapshot
|
||||
#[cfg(not(stage0))]
|
||||
#[unstable(feature = "coerce_unsized", issue = "27732")]
|
||||
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
|
||||
|
||||
@ -152,11 +153,12 @@ pub struct Weak<T: ?Sized> {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> { }
|
||||
unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> { }
|
||||
unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
|
||||
|
||||
#[cfg(not(stage0))] // remove cfg after new snapshot
|
||||
// remove cfg after new snapshot
|
||||
#[cfg(not(stage0))]
|
||||
#[unstable(feature = "coerce_unsized", issue = "27732")]
|
||||
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
|
||||
|
||||
@ -226,7 +228,7 @@ pub fn new(data: T) -> Arc<T> {
|
||||
pub fn try_unwrap(this: Self) -> Result<T, Self> {
|
||||
// See `drop` for why all these atomics are like this
|
||||
if this.inner().strong.compare_and_swap(1, 0, Release) != 1 {
|
||||
return Err(this)
|
||||
return Err(this);
|
||||
}
|
||||
|
||||
atomic::fence(Acquire);
|
||||
@ -265,7 +267,7 @@ pub fn downgrade(this: &Self) -> Weak<T> {
|
||||
|
||||
// check if the weak counter is currently "locked"; if so, spin.
|
||||
if cur == usize::MAX {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// NOTE: this code currently ignores the possibility of overflow
|
||||
@ -276,7 +278,7 @@ pub fn downgrade(this: &Self) -> Weak<T> {
|
||||
// synchronize with the write coming from `is_unique`, so that the
|
||||
// events prior to that write happen before this read.
|
||||
if this.inner().weak.compare_and_swap(cur, cur + 1, Acquire) == cur {
|
||||
return Weak { _ptr: this._ptr }
|
||||
return Weak { _ptr: this._ptr };
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -568,14 +570,14 @@ fn drop(&mut self) {
|
||||
let ptr = *self._ptr;
|
||||
// if ptr.is_null() { return }
|
||||
if ptr as *mut u8 as usize == 0 || ptr as *mut u8 as usize == mem::POST_DROP_USIZE {
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
// Because `fetch_sub` is already atomic, we do not need to synchronize
|
||||
// with other threads unless we are going to delete the object. This
|
||||
// same logic applies to the below `fetch_sub` to the `weak` count.
|
||||
if self.inner().strong.fetch_sub(1, Release) != 1 {
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
// This fence is needed to prevent reordering of use of the data and
|
||||
@ -634,13 +636,13 @@ pub fn upgrade(&self) -> Option<Arc<T>> {
|
||||
// confirmed via the CAS below.
|
||||
let n = inner.strong.load(Relaxed);
|
||||
if n == 0 {
|
||||
return None
|
||||
return None;
|
||||
}
|
||||
|
||||
// Relaxed is valid for the same reason it is on Arc's Clone impl
|
||||
let old = inner.strong.compare_and_swap(n, n + 1, Relaxed);
|
||||
if old == n {
|
||||
return Some(Arc { _ptr: self._ptr })
|
||||
return Some(Arc { _ptr: self._ptr });
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -682,7 +684,7 @@ fn clone(&self) -> Weak<T> {
|
||||
}
|
||||
}
|
||||
|
||||
return Weak { _ptr: self._ptr }
|
||||
return Weak { _ptr: self._ptr };
|
||||
}
|
||||
}
|
||||
|
||||
@ -718,7 +720,7 @@ fn drop(&mut self) {
|
||||
|
||||
// see comments above for why this check is here
|
||||
if ptr as *mut u8 as usize == 0 || ptr as *mut u8 as usize == mem::POST_DROP_USIZE {
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
// If we find out that we were the last weak pointer, then its time to
|
||||
@ -928,8 +930,7 @@ mod tests {
|
||||
|
||||
struct Canary(*mut atomic::AtomicUsize);
|
||||
|
||||
impl Drop for Canary
|
||||
{
|
||||
impl Drop for Canary {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
match *self {
|
||||
@ -943,7 +944,7 @@ fn drop(&mut self) {
|
||||
|
||||
#[test]
|
||||
fn manually_share_arc() {
|
||||
let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
|
||||
let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
let arc_v = Arc::new(v);
|
||||
|
||||
let (tx, rx) = channel();
|
||||
|
@ -88,8 +88,7 @@
|
||||
#[unstable(feature = "box_heap",
|
||||
reason = "may be renamed; uncertain about custom allocator design",
|
||||
issue = "27779")]
|
||||
pub const HEAP: ExchangeHeapSingleton =
|
||||
ExchangeHeapSingleton { _force_singleton: () };
|
||||
pub const HEAP: ExchangeHeapSingleton = ExchangeHeapSingleton { _force_singleton: () };
|
||||
|
||||
/// This the singleton type used solely for `boxed::HEAP`.
|
||||
#[unstable(feature = "box_heap",
|
||||
@ -238,7 +237,7 @@ pub fn new(x: T) -> Box<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T : ?Sized> Box<T> {
|
||||
impl<T: ?Sized> Box<T> {
|
||||
/// Constructs a box from the raw pointer.
|
||||
///
|
||||
/// After this function call, pointer is owned by resulting box.
|
||||
@ -535,8 +534,7 @@ pub trait FnBox<A> {
|
||||
}
|
||||
|
||||
#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "0")]
|
||||
impl<A,F> FnBox<A> for F
|
||||
where F: FnOnce<A>
|
||||
impl<A, F> FnBox<A> for F where F: FnOnce<A>
|
||||
{
|
||||
type Output = F::Output;
|
||||
|
||||
@ -546,7 +544,7 @@ fn call_box(self: Box<F>, args: A) -> F::Output {
|
||||
}
|
||||
|
||||
#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "0")]
|
||||
impl<'a,A,R> FnOnce<A> for Box<FnBox<A,Output=R>+'a> {
|
||||
impl<'a, A, R> FnOnce<A> for Box<FnBox<A, Output = R> + 'a> {
|
||||
type Output = R;
|
||||
|
||||
extern "rust-call" fn call_once(self, args: A) -> R {
|
||||
@ -555,7 +553,7 @@ extern "rust-call" fn call_once(self, args: A) -> R {
|
||||
}
|
||||
|
||||
#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "0")]
|
||||
impl<'a,A,R> FnOnce<A> for Box<FnBox<A,Output=R>+Send+'a> {
|
||||
impl<'a, A, R> FnOnce<A> for Box<FnBox<A, Output = R> + Send + 'a> {
|
||||
type Output = R;
|
||||
|
||||
extern "rust-call" fn call_once(self, args: A) -> R {
|
||||
@ -564,7 +562,7 @@ extern "rust-call" fn call_once(self, args: A) -> R {
|
||||
}
|
||||
|
||||
#[unstable(feature = "coerce_unsized", issue = "27732")]
|
||||
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
|
||||
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
|
||||
|
||||
#[stable(feature = "box_slice_clone", since = "1.3.0")]
|
||||
impl<T: Clone> Clone for Box<[T]> {
|
||||
|
@ -74,8 +74,7 @@ fn test_show() {
|
||||
|
||||
#[test]
|
||||
fn deref() {
|
||||
fn homura<T: Deref<Target = i32>>(_: T) {
|
||||
}
|
||||
fn homura<T: Deref<Target = i32>>(_: T) {}
|
||||
homura(Box::new(765));
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,7 @@
|
||||
use core::{isize, usize};
|
||||
|
||||
#[allow(improper_ctypes)]
|
||||
extern {
|
||||
extern "C" {
|
||||
#[allocator]
|
||||
fn __rust_allocate(size: usize, align: usize) -> *mut u8;
|
||||
fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize);
|
||||
|
@ -196,9 +196,10 @@ impl<T: ?Sized> !marker::Send for Rc<T> {}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized> !marker::Sync for Rc<T> {}
|
||||
|
||||
#[cfg(not(stage0))] // remove cfg after new snapshot
|
||||
// remove cfg after new snapshot
|
||||
#[cfg(not(stage0))]
|
||||
#[unstable(feature = "coerce_unsized", issue = "27732")]
|
||||
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
|
||||
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
|
||||
|
||||
impl<T> Rc<T> {
|
||||
/// Constructs a new `Rc<T>`.
|
||||
@ -482,7 +483,6 @@ fn drop(&mut self) {
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized> Clone for Rc<T> {
|
||||
|
||||
/// Makes a clone of the `Rc<T>`.
|
||||
///
|
||||
/// When you clone an `Rc<T>`, it will create another pointer to the data and
|
||||
@ -678,21 +678,21 @@ fn cmp(&self, other: &Rc<T>) -> Ordering {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized+Hash> Hash for Rc<T> {
|
||||
impl<T: ?Sized + Hash> Hash for Rc<T> {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
(**self).hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized+fmt::Display> fmt::Display for Rc<T> {
|
||||
impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&**self, f)
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized+fmt::Debug> fmt::Debug for Rc<T> {
|
||||
impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&**self, f)
|
||||
}
|
||||
@ -731,9 +731,10 @@ impl<T: ?Sized> !marker::Send for Weak<T> {}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized> !marker::Sync for Weak<T> {}
|
||||
|
||||
#[cfg(not(stage0))] // remove cfg after new snapshot
|
||||
// remove cfg after new snapshot
|
||||
#[cfg(not(stage0))]
|
||||
#[unstable(feature = "coerce_unsized", issue = "27732")]
|
||||
impl<T: ?Sized+Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
|
||||
impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
|
||||
|
||||
impl<T: ?Sized> Weak<T> {
|
||||
/// Upgrades a weak reference to a strong reference.
|
||||
@ -810,7 +811,6 @@ fn drop(&mut self) {
|
||||
|
||||
#[stable(feature = "rc_weak", since = "1.4.0")]
|
||||
impl<T: ?Sized> Clone for Weak<T> {
|
||||
|
||||
/// Makes a clone of the `Weak<T>`.
|
||||
///
|
||||
/// This increases the weak reference count.
|
||||
@ -832,7 +832,7 @@ fn clone(&self) -> Weak<T> {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: ?Sized+fmt::Debug> fmt::Debug for Weak<T> {
|
||||
impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "(Weak)")
|
||||
}
|
||||
|
@ -41,7 +41,7 @@
|
||||
not(target_os = "android"),
|
||||
not(target_env = "musl")),
|
||||
link(name = "pthread"))]
|
||||
extern {
|
||||
extern "C" {
|
||||
fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void;
|
||||
fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
|
||||
fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;
|
||||
|
@ -79,7 +79,7 @@ mod imp {
|
||||
use libc;
|
||||
use MIN_ALIGN;
|
||||
|
||||
extern {
|
||||
extern "C" {
|
||||
// Apparently android doesn't have posix_memalign
|
||||
#[cfg(target_os = "android")]
|
||||
fn memalign(align: libc::size_t, size: libc::size_t) -> *mut libc::c_void;
|
||||
@ -180,7 +180,7 @@ pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 {
|
||||
} else {
|
||||
let ptr = HeapAlloc(GetProcessHeap(), 0, (size + align) as SIZE_T) as *mut u8;
|
||||
if ptr.is_null() {
|
||||
return ptr
|
||||
return ptr;
|
||||
}
|
||||
align_ptr(ptr, align)
|
||||
}
|
||||
@ -196,7 +196,7 @@ pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usi
|
||||
header.0 as LPVOID,
|
||||
(size + align) as SIZE_T) as *mut u8;
|
||||
if new.is_null() {
|
||||
return new
|
||||
return new;
|
||||
}
|
||||
align_ptr(new, align)
|
||||
}
|
||||
|
@ -168,8 +168,8 @@ unsafe fn destroy_chunk(chunk: &Chunk) {
|
||||
|
||||
let start = round_up(after_tydesc, align);
|
||||
|
||||
//debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
|
||||
// start, size, align, is_done);
|
||||
// debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
|
||||
// start, size, align, is_done);
|
||||
if is_done {
|
||||
((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
|
||||
}
|
||||
@ -201,8 +201,11 @@ struct TyDesc {
|
||||
align: usize,
|
||||
}
|
||||
|
||||
trait AllTypes { fn dummy(&self) { } }
|
||||
impl<T:?Sized> AllTypes for T { }
|
||||
trait AllTypes {
|
||||
fn dummy(&self) {}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> AllTypes for T {}
|
||||
|
||||
unsafe fn get_tydesc<T>() -> *const TyDesc {
|
||||
use std::raw::TraitObject;
|
||||
@ -624,7 +627,7 @@ pub fn test_noncopy() {
|
||||
for _ in 0..100000 {
|
||||
arena.alloc(Noncopy {
|
||||
string: "hello world".to_string(),
|
||||
array: vec!(1, 2, 3, 4, 5),
|
||||
array: vec![1, 2, 3, 4, 5],
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -635,7 +638,7 @@ pub fn bench_noncopy(b: &mut Bencher) {
|
||||
b.iter(|| {
|
||||
arena.alloc(Noncopy {
|
||||
string: "hello world".to_string(),
|
||||
array: vec!(1, 2, 3, 4, 5),
|
||||
array: vec![1, 2, 3, 4, 5],
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -645,7 +648,7 @@ pub fn bench_noncopy_nonarena(b: &mut Bencher) {
|
||||
b.iter(|| {
|
||||
let _: Box<_> = box Noncopy {
|
||||
string: "hello world".to_string(),
|
||||
array: vec!(1, 2, 3, 4, 5),
|
||||
array: vec![1, 2, 3, 4, 5],
|
||||
};
|
||||
})
|
||||
}
|
||||
@ -657,7 +660,7 @@ pub fn bench_noncopy_old_arena(b: &mut Bencher) {
|
||||
arena.alloc(|| {
|
||||
Noncopy {
|
||||
string: "hello world".to_string(),
|
||||
array: vec!(1, 2, 3, 4, 5),
|
||||
array: vec![1, 2, 3, 4, 5],
|
||||
}
|
||||
})
|
||||
})
|
||||
|
Loading…
Reference in New Issue
Block a user