Index: include/llvm/CodeGen/StackProtector.h =================================================================== --- include/llvm/CodeGen/StackProtector.h +++ include/llvm/CodeGen/StackProtector.h @@ -83,9 +83,15 @@ // A prologue is generated. bool HasPrologue = false; + // A stackguard call exists in the function. + bool HasStackGuard = false; + // IR checking code is generated. bool HasIRCheck = false; + // Place on stack that stores the stack guard. + AllocaInst *StackGuardAlloca = nullptr; + /// InsertStackProtectors - Insert code into the prologue and epilogue of /// the function. /// Index: lib/CodeGen/StackProtector.cpp =================================================================== --- lib/CodeGen/StackProtector.cpp +++ lib/CodeGen/StackProtector.cpp @@ -111,7 +111,9 @@ Trip = TM->getTargetTriple(); TLI = TM->getSubtargetImpl(Fn)->getTargetLowering(); HasPrologue = false; + HasStackGuard = false; HasIRCheck = false; + StackGuardAlloca = nullptr; Attribute Attr = Fn.getFnAttribute("stack-protector-buffer-size"); if (Attr.isStringAttribute() && @@ -241,13 +243,23 @@ bool StackProtector::RequiresStackProtector() { bool Strong = false; bool NeedsProtector = false; - for (const BasicBlock &BB : *F) - for (const Instruction &I : BB) - if (const CallInst *CI = dyn_cast(&I)) + for (const BasicBlock &BB : *F) { + for (const Instruction &I : BB) { + if (const CallInst *CI = dyn_cast(&I)) { if (CI->getCalledFunction() == Intrinsic::getDeclaration(F->getParent(), - Intrinsic::stackprotector)) + Intrinsic::stackprotector)) { + StackGuardAlloca = cast(CI->getOperand(1)); HasPrologue = true; + } + else if (CI->getCalledFunction() == + Intrinsic::getDeclaration(F->getParent(), + Intrinsic::stackguard)) { + HasStackGuard = true; + } + } + } + } if (F->hasFnAttribute(Attribute::SafeStack)) return false; @@ -400,9 +412,9 @@ // impossible to emit the check in IR, so the target *must* support stack // protection in SDAG. bool SupportsSelectionDAGSP = - TLI->useStackGuardXorFP() || - (EnableSelectionDAGSP && !TM->Options.EnableFastISel); - AllocaInst *AI = nullptr; // Place on stack that stores the stack guard. + (TLI->useStackGuardXorFP() || + (EnableSelectionDAGSP && !TM->Options.EnableFastISel)) && + (!HasPrologue || HasStackGuard); for (Function::iterator I = F->begin(), E = F->end(); I != E;) { BasicBlock *BB = &*I++; @@ -413,7 +425,7 @@ // Generate prologue instrumentation if not already generated. if (!HasPrologue) { HasPrologue = true; - SupportsSelectionDAGSP &= CreatePrologue(F, M, RI, TLI, AI); + SupportsSelectionDAGSP &= CreatePrologue(F, M, RI, TLI, StackGuardAlloca); } // SelectionDAG based code generation. Nothing else needs to be done here. @@ -433,7 +445,7 @@ // Generate the function-based epilogue instrumentation. // The target provides a guard check function, generate a call to it. IRBuilder<> B(RI); - LoadInst *Guard = B.CreateLoad(AI, true, "Guard"); + LoadInst *Guard = B.CreateLoad(StackGuardAlloca, true, "Guard"); CallInst *Call = B.CreateCall(GuardCheck, {Guard}); llvm::Function *Function = cast(GuardCheck); Call->setAttributes(Function->getAttributes()); @@ -489,7 +501,7 @@ // Generate the stack protector instructions in the old basic block. IRBuilder<> B(BB); Value *Guard = getStackGuard(TLI, M, B); - LoadInst *LI2 = B.CreateLoad(AI, true); + LoadInst *LI2 = B.CreateLoad(StackGuardAlloca, true); Value *Cmp = B.CreateICmpEQ(Guard, LI2); auto SuccessProb = BranchProbabilityInfo::getBranchProbStackProtector(true); Index: test/CodeGen/AArch64/stack-guard-remat-bitcast.ll =================================================================== --- test/CodeGen/AArch64/stack-guard-remat-bitcast.ll +++ test/CodeGen/AArch64/stack-guard-remat-bitcast.ll @@ -9,12 +9,6 @@ ; Load the stack guard for the second time, just in case the previous value gets spilled. ; CHECK: adrp [[GUARD_PAGE:x[0-9]+]], ___stack_chk_guard@GOTPAGE ; CHECK: ldr [[R2:x[0-9]+]], {{\[}}[[R1]]{{\]}} -; CHECK: stur [[R2]], {{\[}}x29, [[SLOT0:[0-9#\-]+]]{{\]}} -; CHECK: ldur [[R3:x[0-9]+]], {{\[}}x29, [[SLOT0]]{{\]}} -; CHECK: ldr [[GUARD_ADDR:x[0-9]+]], {{\[}}[[GUARD_PAGE]], ___stack_chk_guard@GOTPAGEOFF{{\]}} -; CHECK: ldr [[GUARD:x[0-9]+]], {{\[}}[[GUARD_ADDR]]{{\]}} -; CHECK: cmp [[GUARD]], [[R3]] -; CHECK: b.ne LBB define i32 @test_stack_guard_remat2() { entry: @@ -22,9 +16,7 @@ %StackGuard = load i8*, i8** bitcast (i64** @__stack_chk_guard to i8**) call void @llvm.stackprotector(i8* %StackGuard, i8** %StackGuardSlot) %container = alloca [32 x i8], align 1 - call void @llvm.stackprotectorcheck(i8** bitcast (i64** @__stack_chk_guard to i8**)) ret i32 -1 } declare void @llvm.stackprotector(i8*, i8**) -declare void @llvm.stackprotectorcheck(i8**) Index: test/CodeGen/X86/stack-protector-twice.ll =================================================================== --- /dev/null +++ test/CodeGen/X86/stack-protector-twice.ll @@ -0,0 +1,25 @@ +; RUN: llc -mtriple=x86_64-pc-linux-gnu < %s -o - + +declare void @__stack_chk_fail() +declare void @llvm.stackprotector(i8*, i8**) + +; Check that we don't crash when running a function already instrumented by +; StackProtector through llc +define void @instrumented_func() sspstrong +{ +top: + %StackGuardSlot = alloca i8* + %StackGuard = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*) + call void @llvm.stackprotector(i8* %StackGuard, i8** %StackGuardSlot) + %StackGuard2 = load volatile i8*, i8* addrspace(257)* inttoptr (i32 40 to i8* addrspace(257)*) + %loaded = load volatile i8*, i8** %StackGuardSlot + %cmp = icmp eq i8* %StackGuard2, %loaded + br i1 %cmp, label %SP_return, label %CallStackCheckFailBlk + +SP_return: + ret void + +CallStackCheckFailBlk: + call void @__stack_chk_fail() + unreachable +}