Fix up LSan allocator for x86_64/aarch64 48/212548/1 submit/tizen_base/20190826.073610
authorMikhail Kashkarov <m.kashkarov@partner.samsung.com>
Wed, 21 Aug 2019 21:20:39 +0000 (00:20 +0300)
committerMikhail Kashkarov <m.kashkarov@partner.samsung.com>
Thu, 22 Aug 2019 19:43:50 +0000 (22:43 +0300)
Keep LSan assumption for allocator cache constrains.

Change-Id: I84ec778233a7285cd6da2e322de5db3ac0a9cb44

libsanitizer/lsan/lsan_allocator.cc

index a20d1eb..5cd3f4a 100644 (file)
@@ -78,6 +78,12 @@ typedef CombinedAllocator<PrimaryAllocator, AllocatorCache,
           SecondaryAllocator> Allocator;
 
 static Allocator allocator;
+#if SANITIZER_WORDSIZE == 64
+static THREADLOCAL AllocatorCache allocator_cache;
+AllocatorCache *GetAllocatorCache() { return &allocator_cache; }
+#else // SANITIZER_WORDSIZE == 32
+// Try to fit into dynamic TLS region to be able to dlopen liblsan. Currently works
+// only for x32 archs.
 static THREADLOCAL AllocatorCache *allocator_cache;
 AllocatorCache *GetAllocatorCache() {
   if (UNLIKELY(allocator_cache == nullptr)) {
@@ -87,6 +93,7 @@ AllocatorCache *GetAllocatorCache() {
   }
   return allocator_cache;
 }
+#endif
 
 void InitializeAllocator() {
   SetAllocatorMayReturnNull(common_flags()->allocator_may_return_null);