diff --git a/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp b/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp --- a/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp +++ b/llvm/lib/Transforms/Instrumentation/MemorySanitizer.cpp @@ -143,6 +143,7 @@ #include "llvm/ADT/APInt.h" #include "llvm/ADT/ArrayRef.h" #include "llvm/ADT/DepthFirstIterator.h" +#include "llvm/ADT/SmallSet.h" #include "llvm/ADT/SmallString.h" #include "llvm/ADT/SmallVector.h" #include "llvm/ADT/StringExtras.h" @@ -247,6 +248,13 @@ cl::desc("exact handling of relational integer ICmp"), cl::Hidden, cl::init(false)); +static cl::opt ClHandleLifetimeIntrinsics( + "msan-handle-lifetime-intrinsics", + cl::desc( + "when possible, poison scoped variables at the beginning of the scope " + "(slower, but more precise)"), + cl::Hidden, cl::init(true)); + // When compiling the Linux kernel, we sometimes see false positives related to // MSan being unable to understand that inline assembly calls may initialize // local variables. @@ -1023,6 +1031,10 @@ : Shadow(S), Origin(O), OrigIns(I) {} }; SmallVector InstrumentationList; + SmallSet FallbackAllocaSet; + bool InstrumentOnlyAllocas = !ClHandleLifetimeIntrinsics; + SmallSet AllocaSet; + SmallVector, 16> LifetimeStartList; SmallVector StoreList; MemorySanitizerVisitor(Function &F, MemorySanitizer &MS, @@ -1279,6 +1291,24 @@ VAHelper->finalizeInstrumentation(); + // Poison allocas that weren't instrumented at llvm.lifetime.start. + // If we failed to find an alloca for one of llvm.lifetime.start intrinsics, + // fall back to instrumenting all the allocas. + auto Allocas = InstrumentOnlyAllocas ? FallbackAllocaSet : AllocaSet; + for (AllocaInst *AI : Allocas) + instrumentAlloca(*AI); + if (!InstrumentOnlyAllocas) + for (auto Item : LifetimeStartList) { + auto Intr = Item.first; + auto Alloca = Item.second; + ConstantInt *Len = dyn_cast(Intr->getArgOperand(0)); + IRBuilder<> IRB(Intr->getNextNode()); + if (MS.CompileKernel) + instrumentAllocaKmsan(*Alloca, IRB, Len); + else + instrumentAllocaUserspace(*Alloca, IRB, Len); + } + bool InstrumentWithCalls = ClInstrumentationWithCallThreshold >= 0 && InstrumentationList.size() + StoreList.size() > (unsigned)ClInstrumentationWithCallThreshold; @@ -2536,6 +2566,25 @@ return false; } + void handleLifetimeStart(IntrinsicInst &I) { + if (!PoisonStack || InstrumentOnlyAllocas) + return; + ConstantInt *Len = dyn_cast(I.getArgOperand(0)); + if (Len->isMinusOne()) + return; + + DenseMap AllocaForValue; + AllocaInst *AI = + llvm::findAllocaForValue(I.getArgOperand(1), AllocaForValue); + if (!AI) { + InstrumentOnlyAllocas = true; + return; + } + if (AllocaSet.count(AI)) + AllocaSet.erase(AI); + LifetimeStartList.push_back(std::make_pair(&I, AI)); + } + void handleBswap(IntrinsicInst &I) { IRBuilder<> IRB(&I); Value *Op = I.getArgOperand(0); @@ -2951,6 +3000,9 @@ void visitIntrinsicInst(IntrinsicInst &I) { switch (I.getIntrinsicID()) { + case Intrinsic::lifetime_start: + handleLifetimeStart(I); + break; case Intrinsic::bswap: handleBswap(I); break; @@ -3413,9 +3465,7 @@ } } - void visitAllocaInst(AllocaInst &I) { - setShadow(&I, getCleanShadow(&I)); - setOrigin(&I, getCleanOrigin()); + void instrumentAlloca(AllocaInst &I) { IRBuilder<> IRB(I.getNextNode()); const DataLayout &DL = F.getParent()->getDataLayout(); uint64_t TypeSize = DL.getTypeAllocSize(I.getAllocatedType()); @@ -3429,6 +3479,15 @@ instrumentAllocaUserspace(I, IRB, Len); } + void visitAllocaInst(AllocaInst &I) { + setShadow(&I, getCleanShadow(&I)); + setOrigin(&I, getCleanOrigin()); + // We'll get to this alloca later unless it's poisoned at the corresponding + // llvm.lifetime.start. + AllocaSet.insert(&I); + FallbackAllocaSet.insert(&I); + } + void visitSelectInst(SelectInst& I) { IRBuilder<> IRB(&I); // a = select b, c, d diff --git a/llvm/test/Instrumentation/MemorySanitizer/alloca.ll b/llvm/test/Instrumentation/MemorySanitizer/alloca.ll --- a/llvm/test/Instrumentation/MemorySanitizer/alloca.ll +++ b/llvm/test/Instrumentation/MemorySanitizer/alloca.ll @@ -89,3 +89,122 @@ ; KMSAN: call void @__msan_unpoison_alloca(i8* {{.*}}, i64 20) ; CHECK: ret void +; Check that every llvm.lifetime.start() causes poisoning of locals. +define void @lifetime_start() sanitize_memory { +entry: + %x = alloca i32, align 4 + %c = bitcast i32* %x to i8* + br label %another_bb + +another_bb: + call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %c) + store i32 7, i32* %x + call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %c) + call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %c) + store i32 8, i32* %x + call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %c) + ret void +} + +; CHECK-LABEL: define void @lifetime_start( +; CHECK-LABEL: entry: +; CHECK: %x = alloca i32 +; CHECK-LABEL: another_bb: + +; CHECK: call void @llvm.lifetime.start +; INLINE: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, + +; CHECK: call void @llvm.lifetime.start +; INLINE: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, +; CHECK: ret void + +; If an object is variable sized, ignore llvm.lifetime.start. +define void @lifetime_start_var(i64 %cnt) sanitize_memory { +entry: + %x = alloca i32, i64 %cnt, align 4 + %c = bitcast i32* %x to i8* + call void @llvm.lifetime.start.p0i8(i64 -1, i8* nonnull %c) + call void @llvm.lifetime.end.p0i8(i64 -1, i8* nonnull %c) + ret void +} + +; CHECK-LABEL: define void @lifetime_start_var( +; CHECK-LABEL: entry: +; CHECK: %x = alloca i32, i64 %cnt +; CHECK: %[[A:.*]] = mul i64 4, %cnt +; INLINE: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 %[[A]], i1 false) +; CALL: call void @__msan_poison_stack(i8* {{.*}}, i64 %[[A]]) +; ORIGIN: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 %[[A]], +; KMSAN: call void @__msan_poison_alloca(i8* {{.*}}, i64 %[[A]], +; CHECK: call void @llvm.lifetime.start +; CHECK: call void @llvm.lifetime.end +; CHECK: ret void + + +; If we can't trace one of the lifetime markers to a single alloca, fall back +; to poisoning allocas at the beginning of the function. +define void @lifetime_no_alloca(i8 %v) sanitize_memory { +entry: + %x = alloca i32, align 4 + %y = alloca i32, align 4 + %z = alloca i32, align 4 + %cx = bitcast i32* %x to i8* + %cy = bitcast i32* %y to i8* + %cz = bitcast i32* %z to i8* + %tobool = icmp eq i8 %v, 0 + %xy = select i1 %tobool, i32* %x, i32* %y + %cxcy = select i1 %tobool, i8* %cx, i8* %cy + br label %another_bb + +another_bb: + call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %cz) + store i32 7, i32* %z + call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %cz) + call void @llvm.lifetime.start.p0i8(i64 4, i8* nonnull %cxcy) + store i32 8, i32* %xy + call void @llvm.lifetime.end.p0i8(i64 4, i8* nonnull %cxcy) + ret void +} + +; CHECK-LABEL: define void @lifetime_no_alloca( +; CHECK-LABEL: entry: +; CHECK: %x = alloca i32 +; INLINE: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, +; CHECK: %y = alloca i32 +; INLINE: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, +; CHECK: %z = alloca i32 +; INLINE: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, +; CHECK-LABEL: another_bb: + +; CHECK: call void @llvm.lifetime.start +; INLINE-NOT: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL-NOT: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN-NOT: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN-NOT: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, +; CHECK: call void @llvm.lifetime.end +; CHECK: call void @llvm.lifetime.start +; INLINE-NOT: call void @llvm.memset.p0i8.i64(i8* align 4 {{.*}}, i8 -1, i64 4, i1 false) +; CALL-NOT: call void @__msan_poison_stack(i8* {{.*}}, i64 4) +; ORIGIN-NOT: call void @__msan_set_alloca_origin4(i8* {{.*}}, i64 4, +; KMSAN-NOT: call void @__msan_poison_alloca(i8* {{.*}}, i64 4, +; CHECK: call void @llvm.lifetime.end + + + +declare void @llvm.lifetime.start.p0i8(i64 immarg, i8* nocapture) +declare void @llvm.lifetime.end.p0i8(i64 immarg, i8* nocapture)