Index: lib/Target/AArch64/AArch64FrameLowering.cpp =================================================================== --- lib/Target/AArch64/AArch64FrameLowering.cpp +++ lib/Target/AArch64/AArch64FrameLowering.cpp @@ -279,6 +279,31 @@ return MBB.erase(I); } +static bool ShouldSignReturnAddress(MachineFunction &MF) { + // The function should be signed in the following situations: + // - sign-return-address=all + // - sign-return-address=non-leaf and the functions spills the LR + + const Function &F = MF.getFunction(); + if(!F.hasFnAttribute("sign-return-address")) + return false; + + StringRef Scope = F.getFnAttribute("sign-return-address").getValueAsString(); + if(Scope.equals_lower("none")) + return false; + + if(Scope.equals_lower("all")) + return true; + + assert(Scope.equals_lower("partial") && "Expected all, none or partial"); + + for (const auto &Info : MF.getFrameInfo().getCalleeSavedInfo()) + if (Info.getReg() == AArch64::LR) + return true; + + return false; +} + void AArch64FrameLowering::emitCalleeSavedFrameMoves( MachineBasicBlock &MBB, MachineBasicBlock::iterator MBBI) const { MachineFunction &MF = *MBB.getParent(); @@ -568,6 +593,11 @@ // to determine the end of the prologue. DebugLoc DL; + if (ShouldSignReturnAddress(MF)) { + BuildMI(MBB, MBBI, DL, TII->get(AArch64::PACIASP)) + .setMIFlag(MachineInstr::FrameSetup); + } + // All calls are tail calls in GHC calling conv, and functions have no // prologue/epilogue. if (MF.getFunction().getCallingConv() == CallingConv::GHC) @@ -832,6 +862,31 @@ } } +static void InsertReturnAddressAuth(MachineFunction &MF, + MachineBasicBlock &MBB) { + if (!ShouldSignReturnAddress(MF)) + return; + const AArch64Subtarget &Subtarget = MF.getSubtarget(); + const TargetInstrInfo *TII = Subtarget.getInstrInfo(); + + MachineBasicBlock::iterator MBBI = MBB.getFirstTerminator(); + DebugLoc DL; + if (MBBI == MBB.end()) + return; + + DL = MBBI->getDebugLoc(); + + if (Subtarget.hasV8_3aOps() && MBBI != MBB.end() && + MBBI->getOpcode() == AArch64::RET_ReallyLR) { + BuildMI(MBB, MBBI, DL, TII->get(AArch64::RETAA)) + .copyImplicitOps(*MBBI); + MBB.erase(MBBI); + } else { + BuildMI(MBB, MBBI, DL, TII->get(AArch64::AUTIASP)) + .setMIFlag(MachineInstr::FrameDestroy); + } +} + void AArch64FrameLowering::emitEpilogue(MachineFunction &MF, MachineBasicBlock &MBB) const { MachineBasicBlock::iterator MBBI = MBB.getLastNonDebugInstr(); @@ -945,6 +1000,7 @@ emitFrameOffset(MBB, MBB.getFirstTerminator(), DL, AArch64::SP, AArch64::SP, NumBytes + AfterCSRPopSize, TII, MachineInstr::FrameDestroy); + InsertReturnAddressAuth(MF, MBB); return; } @@ -955,8 +1011,10 @@ bool RedZone = canUseRedZone(MF); // If this was a redzone leaf function, we don't need to restore the // stack pointer (but we may need to pop stack args for fastcc). - if (RedZone && AfterCSRPopSize == 0) + if (RedZone && AfterCSRPopSize == 0) { + InsertReturnAddressAuth(MF, MBB); return; + } bool NoCalleeSaveRestore = PrologueSaveSize == 0; int StackRestoreBytes = RedZone ? 0 : NumBytes; @@ -973,8 +1031,10 @@ emitFrameOffset(MBB, LastPopI, DL, AArch64::SP, AArch64::SP, StackRestoreBytes, TII, MachineInstr::FrameDestroy); - if (Done) + if (Done) { + InsertReturnAddressAuth(MF, MBB); return; + } NumBytes = 0; } @@ -1012,6 +1072,8 @@ emitFrameOffset(MBB, FirstSPPopI, DL, AArch64::SP, AArch64::SP, AfterCSRPopSize, TII, MachineInstr::FrameDestroy); } + + InsertReturnAddressAuth(MF, MBB); } /// getFrameIndexReference - Provide a base+offset reference to an FI slot for Index: test/CodeGen/AArch64/sign-return-address.ll =================================================================== --- /dev/null +++ test/CodeGen/AArch64/sign-return-address.ll @@ -0,0 +1,118 @@ +; RUN: llc -mtriple=aarch64-none-eabi < %s | FileCheck %s + +; CHECK-LABEL: @leaf +define dso_local i32 @leaf(i32 %x) { +entry: + %x.addr = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + ret i32 %0 +} +; CHECK-NOT: paci{{[a,b]}}sp +; CHECK-NOT: auti{{[a,b]}}sp + +; CHECK-LABEL: @leaf_sign_none +define dso_local i32 @leaf_sign_none(i32 %x) #2 { +entry: + %x.addr = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + ret i32 %0 +} +; CHECK-NOT: paci{{[a,b]}}sp +; CHECK-NOT: auti{{[a,b]}}sp + +; CHECK-LABEL: @leaf_sign_non_leaf +define dso_local i32 @leaf_sign_non_leaf(i32 %x) #1 { +entry: + %x.addr = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + ret i32 %0 +} +; CHECK-NOT: paci{{[a,b]}}sp +; CHECK-NOT: auti{{[a,b]}}sp + +; CHECK-LABEL: @leaf_sign_all +define dso_local i32 @leaf_sign_all(i32 %x) #0 { +entry: + %x.addr = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + ret i32 %0 +} +; CHECK: paciasp +; CHECK: autiasp +; CHECK: ret + +; CHECK: @leaf_clobbers_lr +define dso_local i64 @leaf_clobbers_lr(i64 %x) #1 { +entry: + %0 = alloca i64, align 8 + store i64 %x, i64* %0, align 8 + %1 = load i64, i64* %0, align 8 + call void asm sideeffect "mov x30, $0", "r,~{lr}"(i64 %1) #1 + ret i64 %x +} +; CHECK: paciasp +; CHECK: autiasp +; CHECK: ret + +; CHECK: @non_leaf_sign_all +define dso_local i32 @non_leaf_sign_all(i32 %x) #0 { +entry: + %x.addr = alloca i32, align 4 + %y = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + %call = call i32 @leaf_sign_non_leaf(i32 %0) + %1 = load i32, i32* %x.addr, align 4 + %call1 = call i32 @leaf_sign_all(i32 %1) + %add = add nsw i32 %call, %call1 + store i32 %add, i32* %y, align 4 + %2 = load i32, i32* %y, align 4 + ret i32 %2 +} +; CHECK: paciasp +; CHECK: autiasp +; CHECK: ret + +; CHECK: @non_leaf_sign_non_leaf +define dso_local i32 @non_leaf_sign_non_leaf(i32 %x) #1 { +entry: + %x.addr = alloca i32, align 4 + %y = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + %call = call i32 @leaf_sign_non_leaf(i32 %0) + %1 = load i32, i32* %x.addr, align 4 + %call1 = call i32 @leaf_sign_all(i32 %1) + %add = add nsw i32 %call, %call1 + store i32 %add, i32* %y, align 4 + %2 = load i32, i32* %y, align 4 + ret i32 %2 +} +; CHECK: paciasp +; CHECK: autiasp +; CHECK: ret + +; CHECK-LABEL: @leaf_sign_all_v83 +define dso_local i32 @leaf_sign_all_v83(i32 %x) #3 { +entry: + %x.addr = alloca i32, align 4 + store i32 %x, i32* %x.addr, align 4 + %0 = load i32, i32* %x.addr, align 4 + ret i32 %0 +} +; CHECK: paciasp +; CHECK-NOT: ret +; CHECK: retaa +; CHECK-NOT: ret + +attributes #0 = { "sign-return-address"="all" } + +attributes #1 = { "sign-return-address"="partial" } + +attributes #2 = { "sign-return-address"="none" } + +attributes #3 = { "sign-return-address"="all" "target-features"="+v8.3a" }