From: Francis Ricci Date: Tue, 11 Apr 2017 20:05:02 +0000 (+0000) Subject: Implement standalone lsan interceptors for OS X X-Git-Tag: llvmorg-5.0.0-rc1~8022 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=03b2a8e47e3c780920e89b0b0e05e960a2007e4f;p=platform%2Fupstream%2Fllvm.git Implement standalone lsan interceptors for OS X Summary: Mimicks the existing tsan and asan implementations of Darwin interception. Reviewers: kubamracek, kcc, glider Subscribers: llvm-commits, mgorny Differential Revision: https://reviews.llvm.org/D31889 llvm-svn: 299979 --- diff --git a/compiler-rt/lib/lsan/CMakeLists.txt b/compiler-rt/lib/lsan/CMakeLists.txt index e9db8ac..55c825f 100644 --- a/compiler-rt/lib/lsan/CMakeLists.txt +++ b/compiler-rt/lib/lsan/CMakeLists.txt @@ -13,6 +13,7 @@ set(LSAN_SOURCES lsan_allocator.cc lsan_linux.cc lsan_interceptors.cc + lsan_malloc_mac.cc lsan_preinit.cc lsan_thread.cc) diff --git a/compiler-rt/lib/lsan/lsan.cc b/compiler-rt/lib/lsan/lsan.cc index c7c3429..6c4767d 100644 --- a/compiler-rt/lib/lsan/lsan.cc +++ b/compiler-rt/lib/lsan/lsan.cc @@ -76,6 +76,7 @@ extern "C" void __lsan_init() { InitializeFlags(); InitCommonLsan(); InitializeAllocator(); + ReplaceSystemMalloc(); InitTlsSize(); InitializeInterceptors(); InitializeThreadRegistry(); diff --git a/compiler-rt/lib/lsan/lsan.h b/compiler-rt/lib/lsan/lsan.h index ec5eb93..1061d2f 100644 --- a/compiler-rt/lib/lsan/lsan.h +++ b/compiler-rt/lib/lsan/lsan.h @@ -41,6 +41,13 @@ namespace __lsan { void InitializeInterceptors(); +void ReplaceSystemMalloc(); + +#define ENSURE_LSAN_INITED do { \ + CHECK(!lsan_init_is_running); \ + if (!lsan_inited) \ + __lsan_init(); \ +} while (0) } // namespace __lsan diff --git a/compiler-rt/lib/lsan/lsan_allocator.cc b/compiler-rt/lib/lsan/lsan_allocator.cc index 9c8acc1..011979e 100644 --- a/compiler-rt/lib/lsan/lsan_allocator.cc +++ b/compiler-rt/lib/lsan/lsan_allocator.cc @@ -117,6 +117,37 @@ uptr GetMallocUsableSize(const void *p) { return m->requested_size; } +void *lsan_memalign(uptr alignment, uptr size, const StackTrace &stack) { + return Allocate(stack, size, alignment, kAlwaysClearMemory); +} + +void *lsan_malloc(uptr size, const StackTrace &stack) { + return Allocate(stack, size, 1, kAlwaysClearMemory); +} + +void lsan_free(void *p) { + Deallocate(p); +} + +void *lsan_realloc(void *p, uptr size, const StackTrace &stack) { + return Reallocate(stack, p, size, 1); +} + +void *lsan_calloc(uptr nmemb, uptr size, const StackTrace &stack) { + size *= nmemb; + return Allocate(stack, size, 1, true); +} + +void *lsan_valloc(uptr size, const StackTrace &stack) { + if (size == 0) + size = GetPageSizeCached(); + return Allocate(stack, size, GetPageSizeCached(), kAlwaysClearMemory); +} + +uptr lsan_mz_size(const void *p) { + return GetMallocUsableSize(p); +} + ///// Interface to the common LSan module. ///// void LockAllocator() { diff --git a/compiler-rt/lib/lsan/lsan_allocator.h b/compiler-rt/lib/lsan/lsan_allocator.h index bd19211..e5def17 100644 --- a/compiler-rt/lib/lsan/lsan_allocator.h +++ b/compiler-rt/lib/lsan/lsan_allocator.h @@ -36,6 +36,8 @@ void GetAllocatorCacheRange(uptr *begin, uptr *end); void AllocatorThreadFinish(); void InitializeAllocator(); +const bool kAlwaysClearMemory = true; + struct ChunkMetadata { u8 allocated : 8; // Must be first. ChunkTag tag : 2; @@ -72,6 +74,15 @@ typedef SizeClassAllocator64 PrimaryAllocator; typedef SizeClassAllocatorLocalCache AllocatorCache; AllocatorCache *GetAllocatorCache(); + +void *lsan_memalign(uptr alignment, uptr size, const StackTrace &stack); +void *lsan_malloc(uptr size, const StackTrace &stack); +void lsan_free(void *p); +void *lsan_realloc(void *p, uptr size, const StackTrace &stack); +void *lsan_calloc(uptr nmemb, uptr size, const StackTrace &stack); +void *lsan_valloc(uptr size, const StackTrace &stack); +uptr lsan_mz_size(const void *p); + } // namespace __lsan #endif // LSAN_ALLOCATOR_H diff --git a/compiler-rt/lib/lsan/lsan_interceptors.cc b/compiler-rt/lib/lsan/lsan_interceptors.cc index 876b39d..2cc6fad 100644 --- a/compiler-rt/lib/lsan/lsan_interceptors.cc +++ b/compiler-rt/lib/lsan/lsan_interceptors.cc @@ -39,29 +39,22 @@ int pthread_key_create(unsigned *key, void (*destructor)(void* v)); int pthread_setspecific(unsigned key, const void *v); } -#define ENSURE_LSAN_INITED do { \ - CHECK(!lsan_init_is_running); \ - if (!lsan_inited) \ - __lsan_init(); \ -} while (0) - ///// Malloc/free interceptors. ///// -const bool kAlwaysClearMemory = true; - namespace std { struct nothrow_t; } +#if !SANITIZER_MAC INTERCEPTOR(void*, malloc, uptr size) { ENSURE_LSAN_INITED; GET_STACK_TRACE_MALLOC; - return Allocate(stack, size, 1, kAlwaysClearMemory); + return lsan_malloc(size, stack); } INTERCEPTOR(void, free, void *p) { ENSURE_LSAN_INITED; - Deallocate(p); + lsan_free(p); } INTERCEPTOR(void*, calloc, uptr nmemb, uptr size) { @@ -79,28 +72,42 @@ INTERCEPTOR(void*, calloc, uptr nmemb, uptr size) { if (CallocShouldReturnNullDueToOverflow(size, nmemb)) return nullptr; ENSURE_LSAN_INITED; GET_STACK_TRACE_MALLOC; - size *= nmemb; - return Allocate(stack, size, 1, true); + return lsan_calloc(nmemb, size, stack); } INTERCEPTOR(void*, realloc, void *q, uptr size) { ENSURE_LSAN_INITED; GET_STACK_TRACE_MALLOC; - return Reallocate(stack, q, size, 1); + return lsan_realloc(q, size, stack); } +INTERCEPTOR(int, posix_memalign, void **memptr, uptr alignment, uptr size) { + ENSURE_LSAN_INITED; + GET_STACK_TRACE_MALLOC; + *memptr = lsan_memalign(alignment, size, stack); + // FIXME: Return ENOMEM if user requested more than max alloc size. + return 0; +} + +INTERCEPTOR(void*, valloc, uptr size) { + ENSURE_LSAN_INITED; + GET_STACK_TRACE_MALLOC; + return lsan_valloc(size, stack); +} +#endif + #if SANITIZER_INTERCEPT_MEMALIGN INTERCEPTOR(void*, memalign, uptr alignment, uptr size) { ENSURE_LSAN_INITED; GET_STACK_TRACE_MALLOC; - return Allocate(stack, size, alignment, kAlwaysClearMemory); + return lsan_memalign(alignment, size, stack); } #define LSAN_MAYBE_INTERCEPT_MEMALIGN INTERCEPT_FUNCTION(memalign) INTERCEPTOR(void *, __libc_memalign, uptr alignment, uptr size) { ENSURE_LSAN_INITED; GET_STACK_TRACE_MALLOC; - void *res = Allocate(stack, size, alignment, kAlwaysClearMemory); + void *res = lsan_memalign(alignment, size, stack); DTLS_on_libc_memalign(res, size); return res; } @@ -114,29 +121,13 @@ INTERCEPTOR(void *, __libc_memalign, uptr alignment, uptr size) { INTERCEPTOR(void*, aligned_alloc, uptr alignment, uptr size) { ENSURE_LSAN_INITED; GET_STACK_TRACE_MALLOC; - return Allocate(stack, size, alignment, kAlwaysClearMemory); + return lsan_memalign(alignment, size, stack); } #define LSAN_MAYBE_INTERCEPT_ALIGNED_ALLOC INTERCEPT_FUNCTION(aligned_alloc) #else #define LSAN_MAYBE_INTERCEPT_ALIGNED_ALLOC #endif -INTERCEPTOR(int, posix_memalign, void **memptr, uptr alignment, uptr size) { - ENSURE_LSAN_INITED; - GET_STACK_TRACE_MALLOC; - *memptr = Allocate(stack, size, alignment, kAlwaysClearMemory); - // FIXME: Return ENOMEM if user requested more than max alloc size. - return 0; -} - -INTERCEPTOR(void*, valloc, uptr size) { - ENSURE_LSAN_INITED; - GET_STACK_TRACE_MALLOC; - if (size == 0) - size = GetPageSizeCached(); - return Allocate(stack, size, GetPageSizeCached(), kAlwaysClearMemory); -} - #if SANITIZER_INTERCEPT_MALLOC_USABLE_SIZE INTERCEPTOR(uptr, malloc_usable_size, void *ptr) { ENSURE_LSAN_INITED; diff --git a/compiler-rt/lib/lsan/lsan_linux.cc b/compiler-rt/lib/lsan/lsan_linux.cc index a60f741..c9749c7 100644 --- a/compiler-rt/lib/lsan/lsan_linux.cc +++ b/compiler-rt/lib/lsan/lsan_linux.cc @@ -26,6 +26,8 @@ void SetCurrentThread(u32 tid) { current_thread_tid = tid; } static THREADLOCAL AllocatorCache allocator_cache; AllocatorCache *GetAllocatorCache() { return &allocator_cache; } +void ReplaceSystemMalloc() {} + } // namespace __lsan #endif // SANITIZER_LINUX diff --git a/compiler-rt/lib/lsan/lsan_malloc_mac.cc b/compiler-rt/lib/lsan/lsan_malloc_mac.cc new file mode 100644 index 0000000..9c1dacc --- /dev/null +++ b/compiler-rt/lib/lsan/lsan_malloc_mac.cc @@ -0,0 +1,55 @@ +//===-- lsan_malloc_mac.cc ------------------------------------------------===// +// +// The LLVM Compiler Infrastructure +// +// This file is distributed under the University of Illinois Open Source +// License. See LICENSE.TXT for details. +// +//===----------------------------------------------------------------------===// +// +// This file is a part of LeakSanitizer (LSan), a memory leak detector. +// +// Mac-specific malloc interception. +//===----------------------------------------------------------------------===// + +#include "sanitizer_common/sanitizer_platform.h" +#if SANITIZER_MAC + +#include "lsan.h" +#include "lsan_allocator.h" +#include "lsan_thread.h" + +using namespace __lsan; +#define COMMON_MALLOC_ZONE_NAME "lsan" +#define COMMON_MALLOC_ENTER() ENSURE_LSAN_INITED +#define COMMON_MALLOC_SANITIZER_INITIALIZED lsan_inited +#define COMMON_MALLOC_FORCE_LOCK() +#define COMMON_MALLOC_FORCE_UNLOCK() +#define COMMON_MALLOC_MEMALIGN(alignment, size) \ + GET_STACK_TRACE_MALLOC; \ + void *p = lsan_memalign(alignment, size, stack) +#define COMMON_MALLOC_MALLOC(size) \ + GET_STACK_TRACE_MALLOC; \ + void *p = lsan_malloc(size, stack) +#define COMMON_MALLOC_REALLOC(ptr, size) \ + GET_STACK_TRACE_MALLOC; \ + void *p = lsan_realloc(ptr, size, stack) +#define COMMON_MALLOC_CALLOC(count, size) \ + GET_STACK_TRACE_MALLOC; \ + void *p = lsan_calloc(count, size, stack) +#define COMMON_MALLOC_VALLOC(size) \ + GET_STACK_TRACE_MALLOC; \ + void *p = lsan_valloc(size, stack) +#define COMMON_MALLOC_FREE(ptr) \ + lsan_free(ptr) +#define COMMON_MALLOC_SIZE(ptr) \ + uptr size = lsan_mz_size(ptr) +#define COMMON_MALLOC_FILL_STATS(zone, stats) +#define COMMON_MALLOC_REPORT_UNKNOWN_REALLOC(ptr, zone_ptr, zone_name) \ + (void)zone_name; \ + Report("mz_realloc(%p) -- attempting to realloc unallocated memory.\n", ptr); +#define COMMON_MALLOC_NAMESPACE __lsan + +#include "sanitizer_common/sanitizer_malloc_mac.inc" + +#endif // SANITIZER_MAC