Index: llvm/lib/CodeGen/LocalStackSlotAllocation.cpp =================================================================== --- llvm/lib/CodeGen/LocalStackSlotAllocation.cpp +++ llvm/lib/CodeGen/LocalStackSlotAllocation.cpp @@ -201,6 +201,14 @@ SmallSet ProtectedObjs; if (MFI.hasStackProtectorIndex()) { int StackProtectorFI = MFI.getStackProtectorIndex(); + + // We need to make sure we didn't pre-allocate the stack protector when + // doing this. + // If we already have a stack protector, this will re-assign it to a slot + // that is **not** covering the protected objects. + assert(!MFI.isObjectPreAllocated(StackProtectorFI) && + "Stack protector pre-allocated in LocalStackSlotAllocation"); + StackObjSet LargeArrayObjs; StackObjSet SmallArrayObjs; StackObjSet AddrOfObjs; Index: llvm/lib/CodeGen/PrologEpilogInserter.cpp =================================================================== --- llvm/lib/CodeGen/PrologEpilogInserter.cpp +++ llvm/lib/CodeGen/PrologEpilogInserter.cpp @@ -933,8 +933,16 @@ StackObjSet SmallArrayObjs; StackObjSet AddrOfObjs; - AdjustStackOffset(MFI, StackProtectorFI, StackGrowsDown, Offset, MaxAlign, - Skew); + // If we need a stack protector, we need to make sure that + // LocalStackSlotPass didn't already allocate a slot for it. + // If we are told to use the LocalStackAllocationBlock, the stack protector + // is expected to be already pre-allocated. + if (!MFI.getUseLocalStackAllocationBlock()) + AdjustStackOffset(MFI, StackProtectorFI, StackGrowsDown, Offset, MaxAlign, + Skew); + else if (!MFI.isObjectPreAllocated(MFI.getStackProtectorIndex())) + llvm_unreachable( + "Stack protector not pre-allocated by LocalStackSlotPass."); // Assign large stack objects first. for (unsigned i = 0, e = MFI.getObjectIndexEnd(); i != e; ++i) { @@ -968,6 +976,15 @@ llvm_unreachable("Unexpected SSPLayoutKind."); } + // We expect **all** the protected stack objects to be pre-allocated by + // LocalStackSlotPass. If it turns out that PEI still has to allocate some + // of them, we may end up messing up the expected order of the objects. + if (MFI.getUseLocalStackAllocationBlock() && + !(LargeArrayObjs.empty() && SmallArrayObjs.empty() && + AddrOfObjs.empty())) + llvm_unreachable("Found protected stack objects not pre-allocated by " + "LocalStackSlotPass."); + AssignProtectedObjSet(LargeArrayObjs, ProtectedObjs, MFI, StackGrowsDown, Offset, MaxAlign, Skew); AssignProtectedObjSet(SmallArrayObjs, ProtectedObjs, MFI, StackGrowsDown, Index: test/CodeGen/AArch64/stack-guard-reassign.mir =================================================================== --- /dev/null +++ test/CodeGen/AArch64/stack-guard-reassign.mir @@ -0,0 +1,34 @@ +# RUN: llc -mtriple=arm64-apple-ios -start-before=localstackalloc -stop-after=prologepilog -o - %s | FileCheck %s + +--- | + @__stack_chk_guard = external global i8* + define i32 @main(i32, i8**) { + %StackGuardSlot = alloca i8* + unreachable + } +... +--- +name: main +tracksRegLiveness: true +frameInfo: +# CHECK: stackSize: 544 + stackProtector: '%stack.0.StackGuardSlot' +stack: + - { id: 0, name: StackGuardSlot, size: 8, alignment: 8, stack-id: default } +# Verify that the offset assigned to the stack protector is at the top of the +# frame, covering the locals. +# CHECK: - { id: 0, name: StackGuardSlot, type: default, offset: -24, size: 8, +# CHECK-NEXT: alignment: 8, stack-id: default, callee-saved-register: '', callee-saved-restored: true, +# CHECK-NEXT: local-offset: -8, debug-info-variable: '', debug-info-expression: '', +# CHECK-NEXT: debug-info-location: '' } + - { id: 1, size: 512, alignment: 1, stack-id: default } + - { id: 2, size: 4, alignment: 4, stack-id: default } +body: | + bb.0: + %25:gpr64common = LOAD_STACK_GUARD :: (dereferenceable invariant load 8 from @__stack_chk_guard) + STRXui killed %25, %stack.0.StackGuardSlot, 0 :: (volatile store 8 into %stack.0.StackGuardSlot) + %28:gpr64 = LDRXui %stack.0.StackGuardSlot, 0 :: (volatile load 8 from %stack.0.StackGuardSlot) + %29:gpr64common = LOAD_STACK_GUARD :: (dereferenceable invariant load 8 from @__stack_chk_guard) + RET_ReallyLR implicit undef $w0, implicit killed %28, implicit killed %29 + +...