From 3e5360f19465479605de3e9cd7212bf4008f3949 Mon Sep 17 00:00:00 2001 From: Kostya Kortchinsky Date: Tue, 20 Aug 2019 16:17:08 +0000 Subject: [PATCH] [scudo][standalone] Fix malloc_iterate Summary: cferris's Bionic tests found an issue in Scudo's `malloc_iterate`. We were inclusive of both boundaries, which resulted in a `Block` that was located on said boundary to be possibly accounted for twice, or just being accounted for while iterating on regions that are not ours (usually the unmapped ones in between Primary regions). The fix is to exclude the upper boundary in `iterateOverChunks`, and add a regression test. This additionally corrects a typo in a comment, and change the 64-bit Primary iteration function to not assume that `BatchClassId` is 0. Reviewers: cferris, morehouse, hctim, vitalybuka, eugenis Reviewed By: hctim Subscribers: delcypher, #sanitizers, llvm-commits Tags: #llvm, #sanitizers Differential Revision: https://reviews.llvm.org/D66231 llvm-svn: 369400 --- compiler-rt/lib/scudo/standalone/combined.h | 2 +- compiler-rt/lib/scudo/standalone/primary64.h | 6 ++-- .../lib/scudo/standalone/tests/wrappers_c_test.cpp | 41 ++++++++++++++++++++++ 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/compiler-rt/lib/scudo/standalone/combined.h b/compiler-rt/lib/scudo/standalone/combined.h index 3c3e4786..05a4ebcd 100644 --- a/compiler-rt/lib/scudo/standalone/combined.h +++ b/compiler-rt/lib/scudo/standalone/combined.h @@ -375,7 +375,7 @@ public: const uptr From = Base; const uptr To = Base + Size; auto Lambda = [this, From, To, Callback, Arg](uptr Block) { - if (Block < From || Block > To) + if (Block < From || Block >= To) return; uptr ChunkSize; const uptr ChunkBase = getChunkFromBlock(Block, &ChunkSize); diff --git a/compiler-rt/lib/scudo/standalone/primary64.h b/compiler-rt/lib/scudo/standalone/primary64.h index fd3709e..96fd1e6 100644 --- a/compiler-rt/lib/scudo/standalone/primary64.h +++ b/compiler-rt/lib/scudo/standalone/primary64.h @@ -36,7 +36,7 @@ namespace scudo { // freelist to the thread specific freelist, and back. // // The memory used by this allocator is never unmapped, but can be partially -// released it the platform allows for it. +// released if the platform allows for it. template class SizeClassAllocator64 { public: @@ -135,7 +135,9 @@ public: } template void iterateOverBlocks(F Callback) const { - for (uptr I = 1; I < NumClasses; I++) { + for (uptr I = 0; I < NumClasses; I++) { + if (I == SizeClassMap::BatchClassId) + continue; const RegionInfo *Region = getRegionInfo(I); const uptr BlockSize = getSizeByClassId(I); const uptr From = Region->RegionBeg; diff --git a/compiler-rt/lib/scudo/standalone/tests/wrappers_c_test.cpp b/compiler-rt/lib/scudo/standalone/tests/wrappers_c_test.cpp index 5498c16..6d6bbbf 100644 --- a/compiler-rt/lib/scudo/standalone/tests/wrappers_c_test.cpp +++ b/compiler-rt/lib/scudo/standalone/tests/wrappers_c_test.cpp @@ -14,6 +14,14 @@ #include #include +extern "C" { +void malloc_enable(void); +void malloc_disable(void); +int malloc_iterate(uintptr_t base, size_t size, + void (*callback)(uintptr_t base, size_t size, void *arg), + void *arg); +} + // Note that every C allocation function in the test binary will be fulfilled // by Scudo (this includes the gtest APIs, etc.), which is a test by itself. // But this might also lead to unexpected side-effects, since the allocation and @@ -239,3 +247,36 @@ TEST(ScudoWrappersCTest, MallInfo) { MI = mallinfo(); EXPECT_GE(static_cast(MI.fordblks), Free + BypassQuarantineSize); } + +static uintptr_t BoundaryP; +static size_t Count; + +static void callback(uintptr_t Base, size_t Size, void *Arg) { + if (Base == BoundaryP) + Count++; +} + +// Verify that a block located on an iteration boundary is not mis-accounted. +// To achieve this, we allocate a chunk for which the backing block will be +// aligned on a page, then run the malloc_iterate on both the pages that the +// block is a boundary for. It must only be seen once by the callback function. +TEST(ScudoWrappersCTest, MallocIterateBoundary) { + const size_t PageSize = sysconf(_SC_PAGESIZE); + const size_t BlockDelta = FIRST_32_SECOND_64(8U, 16U); + const size_t SpecialSize = PageSize - BlockDelta; + + void *P = malloc(SpecialSize); + EXPECT_NE(P, nullptr); + BoundaryP = reinterpret_cast(P); + const uintptr_t Block = BoundaryP - BlockDelta; + EXPECT_EQ((Block & (PageSize - 1)), 0U); + + Count = 0U; + malloc_disable(); + malloc_iterate(Block - PageSize, PageSize, callback, nullptr); + malloc_iterate(Block, PageSize, callback, nullptr); + malloc_enable(); + EXPECT_EQ(Count, 1U); + + free(P); +} -- 2.7.4