From: Mikhail Kashkarov Date: Wed, 21 Aug 2019 21:20:39 +0000 (+0300) Subject: Fix up LSan allocator for x86_64/aarch64 X-Git-Tag: submit/tizen_base/20190826.073610^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fchanges%2F48%2F212548%2F1;p=platform%2Fupstream%2Flinaro-gcc.git Fix up LSan allocator for x86_64/aarch64 Keep LSan assumption for allocator cache constrains. Change-Id: I84ec778233a7285cd6da2e322de5db3ac0a9cb44 --- diff --git a/libsanitizer/lsan/lsan_allocator.cc b/libsanitizer/lsan/lsan_allocator.cc index a20d1eb..5cd3f4a 100644 --- a/libsanitizer/lsan/lsan_allocator.cc +++ b/libsanitizer/lsan/lsan_allocator.cc @@ -78,6 +78,12 @@ typedef CombinedAllocator Allocator; static Allocator allocator; +#if SANITIZER_WORDSIZE == 64 +static THREADLOCAL AllocatorCache allocator_cache; +AllocatorCache *GetAllocatorCache() { return &allocator_cache; } +#else // SANITIZER_WORDSIZE == 32 +// Try to fit into dynamic TLS region to be able to dlopen liblsan. Currently works +// only for x32 archs. static THREADLOCAL AllocatorCache *allocator_cache; AllocatorCache *GetAllocatorCache() { if (UNLIKELY(allocator_cache == nullptr)) { @@ -87,6 +93,7 @@ AllocatorCache *GetAllocatorCache() { } return allocator_cache; } +#endif void InitializeAllocator() { SetAllocatorMayReturnNull(common_flags()->allocator_may_return_null);