Index: lib/Transforms/Instrumentation/AddressSanitizer.cpp =================================================================== --- lib/Transforms/Instrumentation/AddressSanitizer.cpp +++ lib/Transforms/Instrumentation/AddressSanitizer.cpp @@ -55,6 +55,7 @@ #include "llvm/Transforms/Utils/PromoteMemToReg.h" #include #include +#include #include #include #include @@ -364,14 +365,14 @@ /// This struct defines the shadow mapping using the rule: /// shadow = (mem >> Scale) ADD-or-OR Offset. -struct ShadowMapping { +struct ShadowMaskping { int Scale; uint64_t Offset; bool OrShadowOffset; }; -static ShadowMapping getShadowMapping(Triple &TargetTriple, int LongSize, - bool IsKasan) { +static ShadowMaskping getShadowMaskping(Triple &TargetTriple, int LongSize, + bool IsKasan) { bool IsAndroid = TargetTriple.isAndroid(); bool IsIOS = TargetTriple.isiOS() || TargetTriple.isWatchOS(); bool IsFreeBSD = TargetTriple.isOSFreeBSD(); @@ -388,7 +389,7 @@ bool IsAArch64 = TargetTriple.getArch() == llvm::Triple::aarch64; bool IsWindows = TargetTriple.isOSWindows(); - ShadowMapping Mapping; + ShadowMaskping Mapping; if (LongSize == 32) { // Android is always PIE, which means that the beginning of the address @@ -543,7 +544,7 @@ bool Recover; bool UseAfterScope; Type *IntptrTy; - ShadowMapping Mapping; + ShadowMaskping Mapping; DominatorTree *DT; Function *AsanCtorFunction = nullptr; Function *AsanInitFunction = nullptr; @@ -591,7 +592,7 @@ Type *IntptrTy; LLVMContext *C; Triple TargetTriple; - ShadowMapping Mapping; + ShadowMaskping Mapping; Function *AsanPoisonGlobals; Function *AsanUnpoisonGlobals; Function *AsanRegisterGlobals; @@ -616,7 +617,7 @@ LLVMContext *C; Type *IntptrTy; Type *IntptrPtrTy; - ShadowMapping Mapping; + ShadowMaskping Mapping; SmallVector AllocaVec; SmallSetVector NonInstrumentedStaticAllocaVec; @@ -811,11 +812,21 @@ /// Finds alloca where the value comes from. AllocaInst *findAllocaForValue(Value *V); - void poisonStackFrameInline(ArrayRef ShadowBytes, size_t Begin, - size_t End, IRBuilder<> &IRB, Value *ShadowBase, - bool DoPoison); - void poisonStackFrame(ArrayRef ShadowBytes, IRBuilder<> &IRB, - Value *ShadowBase, bool DoPoison); + + // Copies bytes from ShadowBytes into shadow memory for indexes where + // ShadowMask is not zero. If ShadowMask[i] is zero, we assume that + // ShadowBytes[i] is constantly zero and doesn't need to be overwritten. + void copyBytesIntoShadow(ArrayRef ShadowMask, + ArrayRef ShadowBytes, IRBuilder<> &IRB, + Value *ShadowBase); + void copyBytesIntoShadow(ArrayRef ShadowMask, + ArrayRef ShadowBytes, size_t Begin, + size_t End, IRBuilder<> &IRB, Value *ShadowBase); + void copyBytesIntoShadowInline(ArrayRef ShadowMask, + ArrayRef ShadowBytes, size_t Begin, + size_t End, IRBuilder<> &IRB, + Value *ShadowBase); + void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison); Value *createAllocaForLayout(IRBuilder<> &IRB, const ASanStackFrameLayout &L, @@ -1646,7 +1657,7 @@ int LongSize = M.getDataLayout().getPointerSizeInBits(); IntptrTy = Type::getIntNTy(*C, LongSize); TargetTriple = Triple(M.getTargetTriple()); - Mapping = getShadowMapping(TargetTriple, LongSize, CompileKernel); + Mapping = getShadowMaskping(TargetTriple, LongSize, CompileKernel); initializeCallbacks(M); bool Changed = false; @@ -1739,7 +1750,7 @@ /*InitArgTypes=*/{}, /*InitArgs=*/{}, kAsanVersionCheckName); appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndDtorPriority); } - Mapping = getShadowMapping(TargetTriple, LongSize, CompileKernel); + Mapping = getShadowMaskping(TargetTriple, LongSize, CompileKernel); return true; } @@ -1958,15 +1969,12 @@ kAsanAllocasUnpoison, IRB.getVoidTy(), IntptrTy, IntptrTy, nullptr)); } -// If DoPoison is true function copies ShadowBytes into shadow memory. -// If DoPoison is false function sets 0s into shadow memory. -// Function assumes that if ShadowBytes[i] is 0, then corresponding shadow -// memory is constant for duration of the function and it contains 0s. So we -// will try to minimize writes into corresponding addresses of the real shadow -// memory. -void FunctionStackPoisoner::poisonStackFrameInline( - ArrayRef ShadowBytes, size_t Begin, size_t End, IRBuilder<> &IRB, - Value *ShadowBase, bool DoPoison) { +// Copies bytes from ShadowBytes into shadow memory for indexes where ShadowMask +// is not zero. If ShadowMask[i] is zero, we assume that ShadowBytes[i] is +// constantly zero and doesn't need to be overwritten. +void FunctionStackPoisoner::copyBytesIntoShadowInline( + ArrayRef ShadowMask, ArrayRef ShadowBytes, size_t Begin, + size_t End, IRBuilder<> &IRB, Value *ShadowBase) { if (Begin >= End) return; @@ -1976,11 +1984,12 @@ const bool IsLittleEndian = F.getParent()->getDataLayout().isLittleEndian(); // Poison given range in shadow using larges store size with out leading and - // trailing zeros. Zeros never change, so they need neither poisoning nor - // up-poisoning, but we don't mind if some of them get into a middle of a - // store. + // trailing zeros in ShadowMask. Zeros never change, so they need neither + // poisoning nor up-poisoning. Still we don't mind if some of them get into a + // middle of a store. for (size_t i = Begin; i < End;) { - if (!ShadowBytes[i]) { + if (!ShadowMask[i]) { + assert(!ShadowBytes[i]); ++i; continue; } @@ -1991,22 +2000,17 @@ StoreSizeInBytes /= 2; // Minimize store size by trimming trailing zeros. - for (size_t j = StoreSizeInBytes - 1; j && !ShadowBytes[i + j]; --j) { + for (size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) { while (j <= StoreSizeInBytes / 2) StoreSizeInBytes /= 2; } uint64_t Val = 0; - if (DoPoison) { - for (size_t j = 0; j < StoreSizeInBytes; j++) { - if (IsLittleEndian) - Val |= (uint64_t)ShadowBytes[i + j] << (8 * j); - else - Val = (Val << 8) | ShadowBytes[i + j]; - } - assert(Val); // Impossible because ShadowBytes[i] != 0 - } else { - Val = 0; + for (size_t j = 0; j < StoreSizeInBytes; j++) { + if (IsLittleEndian) + Val |= (uint64_t)ShadowBytes[i + j] << (8 * j); + else + Val = (Val << 8) | ShadowBytes[i + j]; } Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)); @@ -2018,30 +2022,37 @@ } } -void FunctionStackPoisoner::poisonStackFrame(ArrayRef ShadowBytes, - IRBuilder<> &IRB, - Value *ShadowBase, bool DoPoison) { - auto ValueToWrite = [&](size_t i) { - if (DoPoison) - return ShadowBytes[i]; - return static_cast(0); - }; +void FunctionStackPoisoner::copyBytesIntoShadow(ArrayRef ShadowMask, + ArrayRef ShadowBytes, + IRBuilder<> &IRB, + Value *ShadowBase) { + copyBytesIntoShadow(ShadowMask, ShadowBytes, 0, ShadowMask.size(), IRB, + ShadowBase); +} - const size_t End = ShadowBytes.size(); - size_t Done = 0; - for (size_t i = 0, j = 1; i < End; i = j++) { - if (!ShadowBytes[i]) +void FunctionStackPoisoner::copyBytesIntoShadow(ArrayRef ShadowMask, + ArrayRef ShadowBytes, + size_t Begin, size_t End, + IRBuilder<> &IRB, + Value *ShadowBase) { + assert(ShadowMask.size() == ShadowBytes.size()); + size_t Done = Begin; + for (size_t i = Begin, j = Begin + 1; i < End; i = j++) { + if (!ShadowMask[i]) { + assert(!ShadowBytes[i]); continue; - uint8_t Val = ValueToWrite(i); + } + uint8_t Val = ShadowBytes[i]; if (!AsanSetShadowFunc[Val]) continue; // Skip same values. - for (; j < End && ShadowBytes[j] && Val == ValueToWrite(j); ++j) { + for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) { } if (j - i >= ClMaxInlinePoisoningSize) { - poisonStackFrameInline(ShadowBytes, Done, i, IRB, ShadowBase, DoPoison); + copyBytesIntoShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, + ShadowBase); IRB.CreateCall(AsanSetShadowFunc[Val], {IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)), ConstantInt::get(IntptrTy, j - i)}); @@ -2049,7 +2060,8 @@ } } - poisonStackFrameInline(ShadowBytes, Done, End, IRB, ShadowBase, DoPoison); + copyBytesIntoShadowInline(ShadowMask, ShadowBytes, Done, End, IRB, + ShadowBase); } // Fake stack allocator (asan_fake_stack.h) has 11 size classes @@ -2155,23 +2167,33 @@ // If we have a call to llvm.localescape, keep it in the entry block. if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore); - // Insert poison calls for lifetime intrinsics for static allocas. + // Find static allocas with lifetime analysis. + DenseMap + AllocaToSVDMap; for (const auto &APC : StaticAllocaPoisonCallVec) { assert(APC.InsBefore); assert(APC.AI); assert(ASan.isInterestingAlloca(*APC.AI)); assert(APC.AI->isStaticAlloca()); - IRBuilder<> IRB(APC.InsBefore); - poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison); + if (ClExperimentalPoisoning) { + AllocaToSVDMap[APC.AI] = nullptr; + } else { + IRBuilder<> IRB(APC.InsBefore); + poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison); + } } SmallVector SVD; SVD.reserve(AllocaVec.size()); for (AllocaInst *AI : AllocaVec) { + size_t UseAfterScopePoisonSize = + AllocaToSVDMap.find(AI) != AllocaToSVDMap.end() + ? ASan.getAllocaSizeInBytes(*AI) + : 0; ASanStackVariableDescription D = {AI->getName().data(), ASan.getAllocaSizeInBytes(*AI), - 0, + UseAfterScopePoisonSize, AI->getAlignment(), AI, 0}; @@ -2182,8 +2204,9 @@ size_t MinHeaderSize = ASan.LongSize / 2; const ASanStackFrameLayout &L = ComputeASanStackFrameLayout(SVD, 1ULL << Mapping.Scale, MinHeaderSize); - const SmallVector &ShadowBytes = + const SmallVector &ShadowBytesAfterScope = GetShadowBytesAfterScope(SVD, L); + DEBUG(dbgs() << L.DescriptionString << " --- " << L.FrameSize << "\n"); uint64_t LocalStackSize = L.FrameSize; bool DoStackMalloc = ClUseAfterReturn && !ASan.CompileKernel && @@ -2278,15 +2301,47 @@ IntptrPtrTy); IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2); - // Poison the stack redzones at the entry. + // Poison the stack red zones at the entry. Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB); - poisonStackFrame(ShadowBytes, IRB, ShadowBase, true); + copyBytesIntoShadow(ShadowBytesAfterScope, ShadowBytesAfterScope, IRB, + ShadowBase); + + if (ClExperimentalPoisoning && !StaticAllocaPoisonCallVec.empty()) { + // Complete AllocaToSVDMap + for (const auto &Desc : SVD) { + auto It = AllocaToSVDMap.find(Desc.AI); + if (It != AllocaToSVDMap.end()) { + It->second = &Desc; + } + } + + const SmallVector &ShadowBytesInScope = GetShadowBytes(SVD, L); + + // Poison static allocas near lifetime intrinsics. + for (const auto &APC : StaticAllocaPoisonCallVec) { + // Must be already set. + assert(AllocaToSVDMap[APC.AI]); + const auto &Desc = *AllocaToSVDMap[APC.AI]; + assert(Desc.Offset % L.Granularity == 0); + size_t Begin = Desc.Offset / L.Granularity; + size_t End = Begin + (APC.Size + L.Granularity - 1) / L.Granularity; + + IRBuilder<> IRB(APC.InsBefore); + copyBytesIntoShadow(ShadowBytesAfterScope, + APC.DoPoison ? ShadowBytesAfterScope + : ShadowBytesInScope, + Begin, End, IRB, ShadowBase); + } + } + + SmallVector ShadowBytesClean(ShadowBytesAfterScope.size(), 0); auto UnpoisonStack = [&](IRBuilder<> &IRB) { // Do this always as poisonAlloca can be disabled with // detect_stack_use_after_scope=0. - poisonStackFrame(ShadowBytes, IRB, ShadowBase, false); - if (!StaticAllocaPoisonCallVec.empty()) { + copyBytesIntoShadow(ShadowBytesAfterScope, ShadowBytesClean, IRB, + ShadowBase); + if (!ClExperimentalPoisoning && !StaticAllocaPoisonCallVec.empty()) { // If we poisoned some allocas in llvm.lifetime analysis, // unpoison whole stack frame now. poisonAlloca(LocalStackBase, LocalStackSize, IRB, false); @@ -2321,9 +2376,10 @@ IRBuilder<> IRBPoison(ThenTerm); if (StackMallocIdx <= 4) { int ClassSize = kMinStackMallocSize << StackMallocIdx; - ShadowBytesAfterReturn.resize(ClassSize >> Mapping.Scale, + ShadowBytesAfterReturn.resize(ClassSize / L.Granularity, kAsanStackUseAfterReturnMagic); - poisonStackFrame(ShadowBytesAfterReturn, IRBPoison, ShadowBase, true); + copyBytesIntoShadow(ShadowBytesAfterReturn, ShadowBytesAfterReturn, + IRBPoison, ShadowBase); Value *SavedFlagPtrPtr = IRBPoison.CreateAdd( FakeStack, ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8)); Index: test/Instrumentation/AddressSanitizer/stack-poisoning-experimental-be.ll =================================================================== --- /dev/null +++ test/Instrumentation/AddressSanitizer/stack-poisoning-experimental-be.ll @@ -0,0 +1,324 @@ +; Test check the following function parts: ENTRY, LIFE (lifetime), FAKE (fake stack) and EXIT. +; Test each part can have prefix: no prefix (regular), UAS (use-after-scope), EXP (new poisoning) and UAS-EXP (use-after-scope with new poisoning) + +; Regular stack poisoning. +; RUN: opt < %s -asan -asan-module -asan-experimental-poisoning=0 -asan-use-after-scope=0 -S | FileCheck --check-prefixes=CHECK,ENTRY,LIFE,FAKE,EXIT %s + +; Optimized poisoning. Only fake stack part is different from the first test. +; RUN: opt < %s -asan -asan-module -asan-experimental-poisoning=1 -asan-use-after-scope=0 -S | FileCheck --check-prefixes=CHECK,ENTRY,LIFE,FAKE-EXP,EXIT %s + +; Regular stack poisoning with stack-use-after-scope. Only lifetime checks are different from the first test. +; RUN: opt < %s -asan -asan-module -asan-experimental-poisoning=0 -asan-use-after-scope=1 -S | FileCheck --check-prefixes=CHECK,ENTRY,LIFE-UAS,FAKE,EXIT %s + +; Optimized poisoning with stack-use-after-scope. +; RUN: opt < %s -asan -asan-module -asan-experimental-poisoning=1 -asan-use-after-scope=1 -S | FileCheck --check-prefixes=CHECK,ENTRY-UAS-EXP,LIFE-UAS-EXP,FAKE-EXP,EXIT-EXP %s + +target datalayout = "E-m:e-i64:64-n32:64" +target triple = "powerpc64-unknown-linux-gnu" + +declare void @Foo(i8*) + +define void @Bar() uwtable sanitize_address { +entry: + %x = alloca [650 x i8], align 16 + %xx = getelementptr inbounds [650 x i8], [650 x i8]* %x, i64 0, i64 0 + + %y = alloca [13 x i8], align 1 + %yy = getelementptr inbounds [13 x i8], [13 x i8]* %y, i64 0, i64 0 + + %z = alloca [40 x i8], align 1 + %zz = getelementptr inbounds [40 x i8], [40 x i8]* %z, i64 0, i64 0 + + ; CHECK: [[SHADOW_BASE:%[0-9]+]] = add i64 %{{[0-9]+}}, 2199023255552 + + ; F1F1F1F1 + ; ENTRY-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 0 + ; ENTRY-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i32]]* + ; ENTRY-NEXT: store [[TYPE]] -235802127, [[TYPE]]* [[PTR]], align 1 + + ; 02F2F2F2F2F2F2F2 + ; ENTRY-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 85 + ; ENTRY-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i64]]* + ; ENTRY-NEXT: store [[TYPE]] 212499257711850226, [[TYPE]]* [[PTR]], align 1 + + ; F2F2F2F2F2F2F2F2 + ; ENTRY-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 93 + ; ENTRY-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i64]]* + ; ENTRY-NEXT: store [[TYPE]] -940422246894996750, [[TYPE]]* [[PTR]], align 1 + + ; F20005F2F2000000 + ; ENTRY-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 101 + ; ENTRY-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i64]]* + ; ENTRY-NEXT: store [[TYPE]] -1008799775530680320, [[TYPE]]* [[PTR]], align 1 + + ; F3F3F3F3 + ; ENTRY-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 111 + ; ENTRY-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i32]]* + ; ENTRY-NEXT: store [[TYPE]] -202116109, [[TYPE]]* [[PTR]], align 1 + + ; F3 + ; ENTRY-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 115 + ; ENTRY-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i8]]* + ; ENTRY-NEXT: store [[TYPE]] -13, [[TYPE]]* [[PTR]], align 1 + + ; F1F1F1F1 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 0 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i32]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -235802127, [[TYPE]]* [[PTR]], align 1 + + ; F8F8F8... + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 4 + ; ENTRY-UAS-EXP-NEXT: call void @__asan_set_shadow_f8(i64 [[OFFSET]], i64 82) + + ; F2F2F2F2F2F2F2F2 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 86 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i64]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -940422246894996750, [[TYPE]]* [[PTR]], align 1 + + ; F2F2F2F2F2F2F2F2 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 94 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i64]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -940422246894996750, [[TYPE]]* [[PTR]], align 1 + + ; F8F8F2F2F8F8F8F8 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 102 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i64]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -506387832706107144, [[TYPE]]* [[PTR]], align 1 + + ; F8F3F3F3 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 110 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i32]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -118230029, [[TYPE]]* [[PTR]], align 1 + + ; F3F3 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 114 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i16]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -3085, [[TYPE]]* [[PTR]], align 1 + + ; CHECK-LABEL: %xx = getelementptr inbounds + ; CHECK-NEXT: %yy = getelementptr inbounds + ; CHECK-NEXT: %zz = getelementptr inbounds + + + call void @llvm.lifetime.start(i64 650, i8* %xx) + ; LIFE-UAS: call void @__asan_unpoison_stack_memory(i64 %{{[0-9]+}}, i64 [[SIZE:650]]) + + ; 0000... + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 4 + ; ENTRY-UAS-EXP-NEXT: call void @__asan_set_shadow_00(i64 [[OFFSET]], i64 81) + ; 02 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 85 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i8]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] 2, [[TYPE]]* [[PTR]], align 1 + + ; CHECK-NEXT: call void @llvm.lifetime.start(i64 650, i8* %xx) + + call void @Foo(i8* %xx) + ; CHECK-NEXT: call void @Foo(i8* %xx) + + call void @llvm.lifetime.end(i64 650, i8* %xx) + ; LIFE-UAS: call void @__asan_poison_stack_memory(i64 %{{[0-9]+}}, i64 [[SIZE]]) + + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 4 + ; ENTRY-UAS-EXP-NEXT: call void @__asan_set_shadow_f8(i64 [[OFFSET]], i64 82) + + ; CHECK-NEXT: call void @llvm.lifetime.end(i64 650, i8* %xx) + + + call void @llvm.lifetime.start(i64 13, i8* %yy) + ; LIFE-UAS: call void @__asan_unpoison_stack_memory(i64 %{{[0-9]+}}, i64 [[SIZE:13]]) + + ; 0005 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 102 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i16]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] 5, [[TYPE]]* [[PTR]], align 1 + + ; CHECK-NEXT: call void @llvm.lifetime.start(i64 13, i8* %yy) + + call void @Foo(i8* %yy) + ; CHECK-NEXT: call void @Foo(i8* %yy) + + call void @llvm.lifetime.end(i64 13, i8* %yy) + ; LIFE-UAS: call void @__asan_poison_stack_memory(i64 %{{[0-9]+}}, i64 [[SIZE]]) + + ; F8F8 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 102 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i16]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -1800, [[TYPE]]* [[PTR]], align 1 + + ; CHECK-NEXT: call void @llvm.lifetime.end(i64 13, i8* %yy) + + + call void @llvm.lifetime.start(i64 40, i8* %zz) + ; LIFE-UAS: call void @__asan_unpoison_stack_memory(i64 %{{[0-9]+}}, i64 [[SIZE:40]]) + + ; 00000000 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 106 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i32]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] 0, [[TYPE]]* [[PTR]], align 1 + ; 00 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 110 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i8]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] 0, [[TYPE]]* [[PTR]], align 1 + + ; CHECK-NEXT: call void @llvm.lifetime.start(i64 40, i8* %zz) + + call void @Foo(i8* %zz) + ; CHECK-NEXT: call void @Foo(i8* %zz) + + call void @llvm.lifetime.end(i64 40, i8* %zz) + ; LIFE-UAS: call void @__asan_poison_stack_memory(i64 %{{[0-9]+}}, i64 [[SIZE]]) + + ; F8F8F8F8 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 106 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i32]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -117901064, [[TYPE]]* [[PTR]], align 1 + ; F8 + ; ENTRY-UAS-EXP-NEXT: [[OFFSET:%[0-9]+]] = add i64 [[SHADOW_BASE]], 110 + ; ENTRY-UAS-EXP-NEXT: [[PTR:%[0-9]+]] = inttoptr i64 [[OFFSET]] to [[TYPE:i8]]* + ; ENTRY-UAS-EXP-NEXT: store [[TYPE]] -8, [[TYPE]]* [[PTR]], align 1 + + ; CHECK-NEXT: call void @llvm.lifetime.end(i64 40, i8* %zz) + + + ; CHECK-LABEL: