From 4cd932f94e76046500e180bc941e36a2a17cade8 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Fri, 13 Jun 2014 23:23:31 -0700 Subject: [PATCH] alloc: Allow disabling jemalloc --- configure | 2 + mk/rt.mk | 6 ++ mk/target.mk | 1 + src/liballoc/heap.rs | 247 ++++++++++++++++++++++++++++++++++--------- 4 files changed, 208 insertions(+), 48 deletions(-) diff --git a/configure b/configure index 3a306085c77..bdcfecd8ce9 100755 --- a/configure +++ b/configure @@ -421,6 +421,7 @@ opt llvm-static-stdcpp 0 "statically link to libstdc++ for LLVM" opt rpath 1 "build rpaths into rustc itself" opt nightly 0 "build nightly packages" opt verify-install 1 "verify installed binaries work" +opt jemalloc 1 "build liballoc with jemalloc" valopt prefix "/usr/local" "set installation prefix" valopt local-rust-root "/usr/local" "set prefix for local rust binary" valopt llvm-root "" "set LLVM root" @@ -1167,6 +1168,7 @@ putvar CFG_MANDIR putvar CFG_DISABLE_INJECT_STD_VERSION putvar CFG_JEMALLOC_ROOT putvar CFG_LIBUV_ROOT +putvar CFG_DISABLE_JEMALLOC # Avoid spurious warnings from clang by feeding it original source on # ccache-miss rather than preprocessed input. diff --git a/mk/rt.mk b/mk/rt.mk index 7fc82ef6269..49b9d2aad8d 100644 --- a/mk/rt.mk +++ b/mk/rt.mk @@ -306,6 +306,8 @@ $$(JEMALLOC_LOCAL_$(1)): $$(JEMALLOC_DEPS) $$(MKFILE_DEPS) EXTRA_CFLAGS="$$(CFG_CFLAGS_$(1)) $$(CFG_JEMALLOC_CFLAGS_$(1)) -g1" $$(Q)$$(MAKE) -C "$$(JEMALLOC_BUILD_DIR_$(1))" build_lib_static +ifeq ($$(CFG_DISABLE_JEMALLOC),) +RUSTFLAGS_alloc := --cfg jemalloc ifeq ($(1),$$(CFG_BUILD)) ifneq ($$(CFG_JEMALLOC_ROOT),) $$(JEMALLOC_LIB_$(1)): $$(CFG_JEMALLOC_ROOT)/libjemalloc_pic.a @@ -319,6 +321,10 @@ else $$(JEMALLOC_LIB_$(1)): $$(JEMALLOC_LOCAL_$(1)) $$(Q)cp $$< $$@ endif +else +$$(JEMALLOC_LIB_$(1)): $$(MKFILE_DEPS) + $$(Q)touch $$@ +endif ################################################################################ # compiler-rt diff --git a/mk/target.mk b/mk/target.mk index f29a7a1348e..0f63ef9a430 100644 --- a/mk/target.mk +++ b/mk/target.mk @@ -84,6 +84,7 @@ $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): \ -L "$$(RT_OUTPUT_DIR_$(2))" \ -L "$$(LLVM_LIBDIR_$(2))" \ -L "$$(dir $$(LLVM_STDCPP_LOCATION_$(2)))" \ + $$(RUSTFLAGS_$(4)) \ --out-dir $$(@D) $$< @touch $$@ $$(call LIST_ALL_OLD_GLOB_MATCHES,\ diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 34a6dfca69b..0e7445e737c 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -12,36 +12,9 @@ // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias` // and `nonnull` -use core::intrinsics::{abort, cttz32}; -use core::option::{None, Option}; -use core::ptr::{RawPtr, mut_null, null}; -use libc::{c_char, c_int, c_void, size_t}; - #[cfg(not(test))] use core::raw; #[cfg(not(test))] use util; -#[link(name = "jemalloc", kind = "static")] -extern { - fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void; - fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void; - fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, - flags: c_int) -> size_t; - fn je_dallocx(ptr: *mut c_void, flags: c_int); - fn je_nallocx(size: size_t, flags: c_int) -> size_t; - fn je_malloc_stats_print(write_cb: Option, - cbopaque: *mut c_void, - opts: *c_char); -} - -// -lpthread needs to occur after -ljemalloc, the earlier argument isn't enough -#[cfg(not(windows), not(target_os = "android"))] -#[link(name = "pthread")] -extern {} - -// MALLOCX_ALIGN(a) macro -#[inline(always)] -fn mallocx_align(a: uint) -> c_int { unsafe { cttz32(a as u32) as c_int } } - /// Return a pointer to `size` bytes of memory. /// /// Behavior is undefined if the requested size is 0 or the alignment is not a @@ -49,11 +22,7 @@ fn mallocx_align(a: uint) -> c_int { unsafe { cttz32(a as u32) as c_int } } /// size on the platform. #[inline] pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { - let ptr = je_mallocx(size as size_t, mallocx_align(align)) as *mut u8; - if ptr.is_null() { - abort() - } - ptr + imp::allocate(size, align) } /// Extend or shrink the allocation referenced by `ptr` to `size` bytes of @@ -67,15 +36,9 @@ pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { /// create the allocation referenced by `ptr`. The `old_size` parameter may also /// be the value returned by `usable_size` for the requested size. #[inline] -#[allow(unused_variable)] // for the parameter names in the documentation pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> *mut u8 { - let ptr = je_rallocx(ptr as *mut c_void, size as size_t, - mallocx_align(align)) as *mut u8; - if ptr.is_null() { - abort() - } - ptr + imp::reallocate(ptr, size, align, old_size) } /// Extend or shrink the allocation referenced by `ptr` to `size` bytes of @@ -92,11 +55,9 @@ pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, /// create the allocation referenced by `ptr`. The `old_size` parameter may be /// any value in range_inclusive(requested_size, usable_size). #[inline] -#[allow(unused_variable)] // for the parameter names in the documentation pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, old_size: uint) -> bool { - je_xallocx(ptr as *mut c_void, size as size_t, 0, - mallocx_align(align)) == size as size_t + imp::reallocate_inplace(ptr, size, align, old_size) } /// Deallocate the memory referenced by `ptr`. @@ -107,16 +68,15 @@ pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, /// create the allocation referenced by `ptr`. The `size` parameter may also be /// the value returned by `usable_size` for the requested size. #[inline] -#[allow(unused_variable)] // for the parameter names in the documentation pub unsafe fn deallocate(ptr: *mut u8, size: uint, align: uint) { - je_dallocx(ptr as *mut c_void, mallocx_align(align)) + imp::deallocate(ptr, size, align) } /// Return the usable size of an allocation created with the specified the /// `size` and `align`. #[inline] pub fn usable_size(size: uint, align: uint) -> uint { - unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint } + imp::usable_size(size, align) } /// Print implementation-defined allocator statistics. @@ -125,9 +85,7 @@ pub fn usable_size(size: uint, align: uint) -> uint { /// during the call. #[unstable] pub fn stats_print() { - unsafe { - je_malloc_stats_print(None, mut_null(), null()) - } + imp::stats_print(); } // The compiler never calls `exchange_free` on ~ZeroSizeType, so zero-size @@ -170,6 +128,199 @@ unsafe fn closure_exchange_malloc(drop_glue: fn(*mut u8), size: uint, alloc as *mut u8 } +#[cfg(jemalloc)] +mod imp { + use core::intrinsics::abort; + use core::option::{None, Option}; + use core::ptr::{RawPtr, mut_null, null}; + use core::num::Bitwise; + use libc::{c_char, c_int, c_void, size_t}; + + #[link(name = "jemalloc", kind = "static")] + extern { + fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void; + fn je_rallocx(ptr: *mut c_void, size: size_t, + flags: c_int) -> *mut c_void; + fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, + flags: c_int) -> size_t; + fn je_dallocx(ptr: *mut c_void, flags: c_int); + fn je_nallocx(size: size_t, flags: c_int) -> size_t; + fn je_malloc_stats_print(write_cb: Option, + cbopaque: *mut c_void, + opts: *c_char); + } + + // -lpthread needs to occur after -ljemalloc, the earlier argument isn't enough + #[cfg(not(windows), not(target_os = "android"))] + #[link(name = "pthread")] + extern {} + + // MALLOCX_ALIGN(a) macro + #[inline(always)] + fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int } + + #[inline] + pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + let ptr = je_mallocx(size as size_t, mallocx_align(align)) as *mut u8; + if ptr.is_null() { + abort() + } + ptr + } + + #[inline] + pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, + _old_size: uint) -> *mut u8 { + let ptr = je_rallocx(ptr as *mut c_void, size as size_t, + mallocx_align(align)) as *mut u8; + if ptr.is_null() { + abort() + } + ptr + } + + #[inline] + pub unsafe fn reallocate_inplace(ptr: *mut u8, size: uint, align: uint, + _old_size: uint) -> bool { + je_xallocx(ptr as *mut c_void, size as size_t, 0, + mallocx_align(align)) == size as size_t + } + + #[inline] + pub unsafe fn deallocate(ptr: *mut u8, _size: uint, align: uint) { + je_dallocx(ptr as *mut c_void, mallocx_align(align)) + } + + #[inline] + pub fn usable_size(size: uint, align: uint) -> uint { + unsafe { je_nallocx(size as size_t, mallocx_align(align)) as uint } + } + + pub fn stats_print() { + unsafe { + je_malloc_stats_print(None, mut_null(), null()) + } + } +} + +#[cfg(not(jemalloc), unix)] +mod imp { + use core::mem; + use core::ptr; + use libc; + use libc_heap; + + extern { + fn posix_memalign(memptr: *mut *mut libc::c_void, + align: libc::size_t, + size: libc::size_t) -> libc::c_int; + } + + #[inline] + pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + // The posix_memalign manpage states + // + // alignment [...] must be a power of and a multiple of + // sizeof(void *) + // + // The `align` parameter to this function is the *minimum* alignment for + // a block of memory, so we special case everything under `*uint` to + // just pass it to malloc, which is guaranteed to align to at least the + // size of `*uint`. + if align < mem::size_of::<*uint>() { + libc_heap::malloc_raw(size) + } else { + let mut out = 0 as *mut libc::c_void; + let ret = posix_memalign(&mut out, + align as libc::size_t, + size as libc::size_t); + if ret != 0 { + ::oom(); + } + out as *mut u8 + } + } + + #[inline] + pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, + old_size: uint) -> *mut u8 { + let new_ptr = allocate(size, align); + ptr::copy_memory(new_ptr, ptr as *u8, old_size); + deallocate(ptr, old_size, align); + return new_ptr; + } + + #[inline] + pub unsafe fn reallocate_inplace(_ptr: *mut u8, _size: uint, _align: uint, + _old_size: uint) -> bool { + false + } + + #[inline] + pub unsafe fn deallocate(ptr: *mut u8, _size: uint, _align: uint) { + libc::free(ptr as *mut libc::c_void) + } + + #[inline] + pub fn usable_size(size: uint, _align: uint) -> uint { + size + } + + pub fn stats_print() { + } +} + +#[cfg(not(jemalloc), windows)] +mod imp { + use libc::{c_void, size_t}; + use core::ptr::RawPtr; + + extern { + fn _aligned_malloc(size: size_t, align: size_t) -> *mut c_void; + fn _aligned_realloc(block: *mut c_void, size: size_t, + align: size_t) -> *mut c_void; + fn _aligned_free(ptr: *mut c_void); + } + + #[inline] + pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 { + let ptr = _aligned_malloc(size as size_t, align as size_t); + if ptr.is_null() { + ::oom(); + } + ptr as *mut u8 + } + + #[inline] + pub unsafe fn reallocate(ptr: *mut u8, size: uint, align: uint, + _old_size: uint) -> *mut u8 { + let ptr = _aligned_realloc(ptr as *mut c_void, size as size_t, + align as size_t); + if ptr.is_null() { + ::oom(); + } + ptr as *mut u8 + } + + #[inline] + pub unsafe fn reallocate_inplace(_ptr: *mut u8, _size: uint, _align: uint, + _old_size: uint) -> bool { + false + } + + #[inline] + pub unsafe fn deallocate(ptr: *mut u8, _size: uint, _align: uint) { + _aligned_free(ptr as *mut c_void) + } + + #[inline] + pub fn usable_size(size: uint, _align: uint) -> uint { + size + } + + pub fn stats_print() {} +} + #[cfg(test)] mod bench { extern crate test; -- 2.44.0