From 79ca0fd1a02132ef3d85aacbfc4ab5eab5911c08 Mon Sep 17 00:00:00 2001 From: Evgeniy Stepanov Date: Wed, 21 Jan 2015 13:21:31 +0000 Subject: [PATCH] [msan] Update origin for the entire destination range on memory store. Previously we always stored 4 bytes of origin at the destination address even for 8-byte (and longer) stores. This should fix rare missing, or incorrect, origin stacks in MSan reports. llvm-svn: 226658 --- compiler-rt/test/msan/origin-store-long.cc | 21 +++++ .../Transforms/Instrumentation/MemorySanitizer.cpp | 58 +++++++++++--- .../MemorySanitizer/store-long-origin.ll | 89 ++++++++++++++++++++++ 3 files changed, 159 insertions(+), 9 deletions(-) create mode 100644 compiler-rt/test/msan/origin-store-long.cc create mode 100644 llvm/test/Instrumentation/MemorySanitizer/store-long-origin.ll diff --git a/compiler-rt/test/msan/origin-store-long.cc b/compiler-rt/test/msan/origin-store-long.cc new file mode 100644 index 0000000..a7c2b7a --- /dev/null +++ b/compiler-rt/test/msan/origin-store-long.cc @@ -0,0 +1,21 @@ +// Check that 8-byte store updates origin for the full store range. +// RUN: %clangxx_msan -fsanitize-memory-track-origins -m64 -O0 %s -o %t && not %run %t >%t.out 2>&1 +// RUN: FileCheck %s < %t.out && FileCheck %s < %t.out +// RUN: %clangxx_msan -fsanitize-memory-track-origins -m64 -O2 %s -o %t && not %run %t >%t.out 2>&1 +// RUN: FileCheck %s < %t.out && FileCheck %s < %t.out + +#include + +int main() { + uint64_t *volatile p = new uint64_t; + uint64_t *volatile q = new uint64_t; + *p = *q; + char *z = (char *)p; + return z[6]; +// CHECK: WARNING: MemorySanitizer: use-of-uninitialized-value +// CHECK: in main {{.*}}origin-store-long.cc:[[@LINE-2]] + +// CHECK: Uninitialized value was created by a heap allocation +// CHECK: in main {{.*}}origin-store-long.cc:[[@LINE-8]] +} + diff --git a/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp b/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp index d97eb31..4109bfd 100644 --- a/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp @@ -120,6 +120,7 @@ using namespace llvm; #define DEBUG_TYPE "msan" +static const unsigned kOriginSize = 4; static const unsigned kMinOriginAlignment = 4; static const unsigned kShadowTLSAlignment = 8; @@ -602,21 +603,60 @@ struct MemorySanitizerVisitor : public InstVisitor { return IRB.CreateCall(MS.MsanChainOriginFn, V); } + Value *originToIntptr(IRBuilder<> &IRB, Value *Origin) { + unsigned IntptrSize = MS.DL->getTypeStoreSize(MS.IntptrTy); + if (IntptrSize == kOriginSize) return Origin; + assert(IntptrSize == kOriginSize * 2); + Origin = IRB.CreateIntCast(Origin, MS.IntptrTy, /* isSigned */ false); + return IRB.CreateOr(Origin, IRB.CreateShl(Origin, kOriginSize * 8)); + } + + /// \brief Fill memory range with the given origin value. + void paintOrigin(IRBuilder<> &IRB, Value *Origin, Value *OriginPtr, + unsigned Size, unsigned Alignment) { + unsigned IntptrAlignment = MS.DL->getABITypeAlignment(MS.IntptrTy); + unsigned IntptrSize = MS.DL->getTypeStoreSize(MS.IntptrTy); + assert(IntptrAlignment >= kMinOriginAlignment); + assert(IntptrSize >= kOriginSize); + + unsigned Ofs = 0; + unsigned CurrentAlignment = Alignment; + if (Alignment >= IntptrAlignment && IntptrSize > kOriginSize) { + Value *IntptrOrigin = originToIntptr(IRB, Origin); + Value *IntptrOriginPtr = + IRB.CreatePointerCast(OriginPtr, PointerType::get(MS.IntptrTy, 0)); + for (unsigned i = 0; i < Size / IntptrSize; ++i) { + Value *Ptr = + i ? IRB.CreateConstGEP1_32(IntptrOriginPtr, i) : IntptrOriginPtr; + IRB.CreateAlignedStore(IntptrOrigin, Ptr, CurrentAlignment); + Ofs += IntptrSize / kOriginSize; + CurrentAlignment = IntptrAlignment; + } + } + + for (unsigned i = Ofs; i < (Size + kOriginSize - 1) / kOriginSize; ++i) { + Value *GEP = i ? IRB.CreateConstGEP1_32(OriginPtr, i) : OriginPtr; + IRB.CreateAlignedStore(Origin, GEP, CurrentAlignment); + CurrentAlignment = kMinOriginAlignment; + } + } + void storeOrigin(IRBuilder<> &IRB, Value *Addr, Value *Shadow, Value *Origin, unsigned Alignment, bool AsCall) { unsigned OriginAlignment = std::max(kMinOriginAlignment, Alignment); + unsigned StoreSize = MS.DL->getTypeStoreSize(Shadow->getType()); if (isa(Shadow->getType())) { - IRB.CreateAlignedStore(updateOrigin(Origin, IRB), - getOriginPtr(Addr, IRB, Alignment), - OriginAlignment); + paintOrigin(IRB, updateOrigin(Origin, IRB), + getOriginPtr(Addr, IRB, Alignment), StoreSize, + OriginAlignment); } else { Value *ConvertedShadow = convertToShadowTyNoVec(Shadow, IRB); Constant *ConstantShadow = dyn_cast_or_null(ConvertedShadow); if (ConstantShadow) { if (ClCheckConstantShadow && !ConstantShadow->isZeroValue()) - IRB.CreateAlignedStore(updateOrigin(Origin, IRB), - getOriginPtr(Addr, IRB, Alignment), - OriginAlignment); + paintOrigin(IRB, updateOrigin(Origin, IRB), + getOriginPtr(Addr, IRB, Alignment), StoreSize, + OriginAlignment); return; } @@ -636,9 +676,9 @@ struct MemorySanitizerVisitor : public InstVisitor { Instruction *CheckTerm = SplitBlockAndInsertIfThen( Cmp, IRB.GetInsertPoint(), false, MS.OriginStoreWeights); IRBuilder<> IRBNew(CheckTerm); - IRBNew.CreateAlignedStore(updateOrigin(Origin, IRBNew), - getOriginPtr(Addr, IRBNew, Alignment), - OriginAlignment); + paintOrigin(IRBNew, updateOrigin(Origin, IRBNew), + getOriginPtr(Addr, IRBNew, Alignment), StoreSize, + OriginAlignment); } } } diff --git a/llvm/test/Instrumentation/MemorySanitizer/store-long-origin.ll b/llvm/test/Instrumentation/MemorySanitizer/store-long-origin.ll new file mode 100644 index 0000000..128f810 --- /dev/null +++ b/llvm/test/Instrumentation/MemorySanitizer/store-long-origin.ll @@ -0,0 +1,89 @@ +; RUN: opt < %s -msan -msan-check-access-address=0 -msan-track-origins=1 -S | FileCheck %s + +target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64-S128" +target triple = "x86_64-unknown-linux-gnu" + + +; Test origin for longer stores. + +define void @Store8(i64* nocapture %p, i64 %x) sanitize_memory { +entry: + store i64 %x, i64* %p, align 8 + ret void +} + +; Single 8-byte origin store +; CHECK-LABEL: define void @Store8( +; CHECK: store i64 {{.*}}, align 8 +; CHECK: store i64 {{.*}}, align 8 +; CHECK: store i64 {{.*}}, align 8 +; CHECK: ret void + +define void @Store8_align4(i64* nocapture %p, i64 %x) sanitize_memory { +entry: + store i64 %x, i64* %p, align 4 + ret void +} + +; Two 4-byte origin stores +; CHECK-LABEL: define void @Store8_align4( +; CHECK: store i64 {{.*}}, align 4 +; CHECK: store i32 {{.*}}, align 4 +; CHECK: getelementptr i32* {{.*}}, i32 1 +; CHECK: store i32 {{.*}}, align 4 +; CHECK: store i64 {{.*}}, align 4 +; CHECK: ret void + +%struct.S = type { i32, i32, i32 } + +define void @StoreAgg(%struct.S* nocapture %p, %struct.S %x) sanitize_memory { +entry: + store %struct.S %x, %struct.S* %p, align 4 + ret void +} + +; Three 4-byte origin stores +; CHECK-LABEL: define void @StoreAgg( +; CHECK: store { i32, i32, i32 } {{.*}}, align 4 +; CHECK: store i32 {{.*}}, align 4 +; CHECK: getelementptr i32* {{.*}}, i32 1 +; CHECK: store i32 {{.*}}, align 4 +; CHECK: getelementptr i32* {{.*}}, i32 2 +; CHECK: store i32 {{.*}}, align 4 +; CHECK: store %struct.S {{.*}}, align 4 +; CHECK: ret void + + +define void @StoreAgg8(%struct.S* nocapture %p, %struct.S %x) sanitize_memory { +entry: + store %struct.S %x, %struct.S* %p, align 8 + ret void +} + +; 8-byte + 4-byte origin stores +; CHECK-LABEL: define void @StoreAgg8( +; CHECK: store { i32, i32, i32 } {{.*}}, align 8 +; CHECK: store i64 {{.*}}, align 8 +; CHECK: getelementptr i32* {{.*}}, i32 2 +; CHECK: store i32 {{.*}}, align 8 +; CHECK: store %struct.S {{.*}}, align 8 +; CHECK: ret void + + +%struct.Q = type { i64, i64, i64 } +define void @StoreAgg24(%struct.Q* nocapture %p, %struct.Q %x) sanitize_memory { +entry: + store %struct.Q %x, %struct.Q* %p, align 8 + ret void +} + +; 3 8-byte origin stores +; CHECK-LABEL: define void @StoreAgg24( +; CHECK: store { i64, i64, i64 } {{.*}}, align 8 +; CHECK: store i64 {{.*}}, align 8 +; CHECK: getelementptr i64* {{.*}}, i32 1 +; CHECK: store i64 {{.*}}, align 8 +; CHECK: getelementptr i64* {{.*}}, i32 2 +; CHECK: store i64 {{.*}}, align 8 +; CHECK: store %struct.Q {{.*}}, align 8 +; CHECK: ret void -- 2.7.4