// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 or the MIT license // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias` #[cfg(not(test))] use core::raw; #[cfg(stage0, not(test))] use util; /// Returns a pointer to `size` bytes of memory. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. #[inline] pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { imp::allocate(size, align) } /// Extends or shrinks the allocation referenced by `ptr` to `size` bytes of /// memory. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. /// /// The `old_size` and `align` parameters are the parameters that were used to /// create the allocation referenced by `ptr`. The `old_size` parameter may also /// be the value returned by `usable_size` for the requested size. #[inline] pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> *mut u8 { imp::reallocate(ptr, size, align, old_size) } /// Extends or shrinks the allocation referenced by `ptr` to `size` bytes of /// memory in-place. /// /// Returns true if successful, otherwise false if the allocation was not /// altered. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a /// power of 2. The alignment must be no larger than the largest supported page /// size on the platform. /// /// The `old_size` and `align` parameters are the parameters that were used to /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> bool { imp::reallocate_inplace(ptr, size, align, old_size) } /// Deallocates the memory referenced by `ptr`. /// /// The `ptr` parameter must not be null. /// /// The `size` and `align` parameters are the parameters that were used to /// create the allocation referenced by `ptr`. The `size` parameter may also be /// the value returned by `usable_size` for the requested size. #[inline] pub unsafe fn deallocate(ptr: *mut u8, size: uint, align: uint) { imp::deallocate(ptr, size, align) } /// Returns the usable size of an allocation created with the specified the /// `size` and `align`. #[inline] pub fn usable_size(size: uint, align: uint) -> uint { imp::usable_size(size, align) } /// Prints implementation-defined allocator statistics. /// /// These statistics may be inconsistent if other threads use the allocator /// during the call. #[unstable] pub fn stats_print() { imp::stats_print(); } // The compiler never calls `exchange_free` on Box, so zero-size // allocations can point to this `static`. It would be incorrect to use a null // pointer, due to enums assuming types like unique pointers are never null. pub static mut EMPTY: uint = 12345; /// The allocator for unique pointers. #[cfg(not(test))] #[lang="exchange_malloc"] #[inline] unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 { if size == 0 { &EMPTY as *const uint as *mut u8 } else { allocate(size, align) } } #[cfg(not(test))] #[lang="exchange_free"] #[inline] unsafe fn exchange_free(ptr: *mut u8, size: uint, align: uint) { deallocate(ptr, size, align); } // FIXME: #7496 #[cfg(stage0, not(test))] #[lang="closure_exchange_malloc"] #[inline] #[allow(deprecated)] unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut u8 { let total_size = util::get_box_size(size, align); let p = allocate(total_size, 8); let alloc = p as *mut raw::Box<()>; (*alloc).drop_glue = drop_glue; alloc as *mut u8 } // FIXME: #7496 #[cfg(not(stage0), not(test))] #[lang="closure_exchange_malloc"] #[inline] #[allow(deprecated)] unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut u8 { let p = allocate(size, align); let alloc = p as *mut raw::Box<()>; (*alloc).drop_glue = drop_glue; alloc as *mut u8 } // The minimum alignment guaranteed by the architecture. This value is used to // add fast paths for low alignment values. In practice, the alignment is a // constant at the call site and the branch will be optimized out. #[cfg(target_arch = "arm")] #[cfg(target_arch = "mips")] #[cfg(target_arch = "mipsel")] static MIN_ALIGN: uint = 8; #[cfg(target_arch = "x86")] #[cfg(target_arch = "x86_64")] static MIN_ALIGN: uint = 16; #[cfg(jemalloc)] mod imp { use core::option::{None, Option}; use core::ptr::{RawPtr, mut_null, null}; use core::num::Int; use libc::{c_char, c_int, c_void, size_t}; use super::MIN_ALIGN; #[link(name = "jemalloc", kind = "static")] #[cfg(not(test))] extern {} extern { fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void; fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void; fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t; #[cfg(stage0)] fn je_dallocx(ptr: *mut c_void, flags: c_int); #[cfg(not(stage0))] fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int); fn je_nallocx(size: size_t, flags: c_int) -> size_t; fn je_malloc_stats_print(write_cb: Option, cbopaque: *mut c_void, opts: *const c_char); } // -lpthread needs to occur after -ljemalloc, the earlier argument isn't enough #[cfg(not(windows), not(target_os = "android"))] #[link(name = "pthread")] extern {} // MALLOCX_ALIGN(a) macro #[inline(always)] fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int } #[inline(always)] fn align_to_flags(align: uint) -> c_int { if align <= MIN_ALIGN { 0 } else { mallocx_align(align) } } #[inline] pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { let flags = align_to_flags(align); let ptr = je_mallocx(size as size_t, flags) as *mut u8; if ptr.is_null() { ::oom() } ptr } #[inline] pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, _old_size: uint) -> *mut u8 { let flags = align_to_flags(align); let ptr = je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8; if ptr.is_null() { ::oom() } ptr } #[inline] pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, _old_size: uint) -> bool { let flags = align_to_flags(align); je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) == size as size_t } #[inline] #[cfg(stage0)] pub unsafe fn deallocate(ptr: *mut u8, _size: uint, align: uint) { let flags = align_to_flags(align); je_dallocx(ptr as *mut c_void, flags) } #[inline] #[cfg(not(stage0))] pub unsafe fn deallocate(ptr: *mut u8, size: uint, align: uint) { let flags = align_to_flags(align); je_sdallocx(ptr as *mut c_void, size as size_t, flags) } #[inline] pub fn usable_size(size: uint, align: uint) -> uint { let flags = align_to_flags(align); unsafe { je_nallocx(size as size_t, flags) as uint } } pub fn stats_print() { unsafe { je_malloc_stats_print(None, mut_null(), null()) } } } #[cfg(not(jemalloc), unix)] mod imp { use core::cmp; use core::mem; use core::ptr; use libc; use libc_heap; use super::MIN_ALIGN; extern { fn posix_memalign(memptr: *mut *mut libc::c_void, align: libc::size_t, size: libc::size_t) -> libc::c_int; } #[inline] pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { if align <= MIN_ALIGN { libc_heap::malloc_raw(size) } else { let mut out = 0 as *mut libc::c_void; let ret = posix_memalign(&mut out, align as libc::size_t, size as libc::size_t); if ret != 0 { ::oom(); } out as *mut u8 } } #[inline] pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> *mut u8 { if align <= MIN_ALIGN { libc_heap::realloc_raw(ptr, size) } else { let new_ptr = allocate(size, align); ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size)); deallocate(ptr, old_size, align); new_ptr } } #[inline] pub unsafe fn reallocate_inplace(_ptr: *mut u8, _size: uint, _align: uint, _old_size: uint) -> bool { false } #[inline] pub unsafe fn deallocate(ptr: *mut u8, _size: uint, _align: uint) { libc::free(ptr as *mut libc::c_void) } #[inline] pub fn usable_size(size: uint, _align: uint) -> uint { size } pub fn stats_print() {} } #[cfg(not(jemalloc), windows)] mod imp { use libc::{c_void, size_t}; use libc; use libc_heap; use core::ptr::RawPtr; use super::MIN_ALIGN; extern { fn _aligned_malloc(size: size_t, align: size_t) -> *mut c_void; fn _aligned_realloc(block: *mut c_void, size: size_t, align: size_t) -> *mut c_void; fn _aligned_free(ptr: *mut c_void); } #[inline] pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { if align <= MIN_ALIGN { libc_heap::malloc_raw(size) } else { let ptr = _aligned_malloc(size as size_t, align as size_t); if ptr.is_null() { ::oom(); } ptr as *mut u8 } } #[inline] pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, _old_size: uint) -> *mut u8 { if align <= MIN_ALIGN { libc_heap::realloc_raw(ptr, size) } else { let ptr = _aligned_realloc(ptr as *mut c_void, size as size_t, align as size_t); if ptr.is_null() { ::oom(); } ptr as *mut u8 } } #[inline] pub unsafe fn reallocate_inplace(_ptr: *mut u8, _size: uint, _align: uint, _old_size: uint) -> bool { false } #[inline] pub unsafe fn deallocate(ptr: *mut u8, _size: uint, align: uint) { if align <= MIN_ALIGN { libc::free(ptr as *mut libc::c_void) } else { _aligned_free(ptr as *mut c_void) } } #[inline] pub fn usable_size(size: uint, _align: uint) -> uint { size } pub fn stats_print() {} } #[cfg(test)] mod bench { extern crate test; use self::test::Bencher; #[bench] fn alloc_owned_small(b: &mut Bencher) { b.iter(|| { box 10i }) } }