Index: llvm/lib/Transforms/IPO/LowerTypeTests.cpp =================================================================== --- llvm/lib/Transforms/IPO/LowerTypeTests.cpp +++ llvm/lib/Transforms/IPO/LowerTypeTests.cpp @@ -1197,6 +1197,7 @@ static const unsigned kX86JumpTableEntrySize = 8; static const unsigned kARMJumpTableEntrySize = 4; +static const unsigned kARMBTIJumpTableEntrySize = 8; unsigned LowerTypeTestsModule::getJumpTableEntrySize() { switch (Arch) { @@ -1205,7 +1206,10 @@ return kX86JumpTableEntrySize; case Triple::arm: case Triple::thumb: + return kARMJumpTableEntrySize; case Triple::aarch64: + if (M.getModuleFlag("branch-target-enforcement")) + return kARMBTIJumpTableEntrySize; return kARMJumpTableEntrySize; default: report_fatal_error("Unsupported architecture for jump tables"); @@ -1224,7 +1228,11 @@ if (JumpTableArch == Triple::x86 || JumpTableArch == Triple::x86_64) { AsmOS << "jmp ${" << ArgIndex << ":c}@plt\n"; AsmOS << "int3\nint3\nint3\n"; - } else if (JumpTableArch == Triple::arm || JumpTableArch == Triple::aarch64) { + } else if (JumpTableArch == Triple::arm) { + AsmOS << "b $" << ArgIndex << "\n"; + } else if (JumpTableArch == Triple::aarch64) { + if (Dest->getParent()->getModuleFlag("branch-target-enforcement")) + AsmOS << "hint #34\n"; // BTI C AsmOS << "b $" << ArgIndex << "\n"; } else if (JumpTableArch == Triple::thumb) { AsmOS << "b.w $" << ArgIndex << "\n"; @@ -1386,6 +1394,10 @@ // by Clang for -march=armv7. F->addFnAttr("target-cpu", "cortex-a8"); } + if (JumpTableArch == Triple::aarch64) { + F->addFnAttr("ignore-branch-target-enforcement"); + F->addFnAttr("ignore-sign-return-address"); + } // Make sure we don't emit .eh_frame for this function. F->addFnAttr(Attribute::NoUnwind); Index: llvm/test/Transforms/LowerTypeTests/aarch64-jumptable.ll =================================================================== --- /dev/null +++ llvm/test/Transforms/LowerTypeTests/aarch64-jumptable.ll @@ -0,0 +1,39 @@ +; RUN: opt -S -lowertypetests -mtriple=aarch64-unknown-linux-gnu < %s | FileCheck --check-prefixes=AARCH64 %s + +; Test for the jump table generation with branch protection on AArch64 + +target datalayout = "e-p:64:64" + +@0 = private unnamed_addr constant [2 x void (...)*] [void (...)* bitcast (void ()* @f to void (...)*), void (...)* bitcast (void ()* @g to void (...)*)], align 16 + +; AARCH64: @f = alias void (), void ()* @[[JT:.*]] + +define void @f() !type !0 { + ret void +} + +define internal void @g() !type !0 { + ret void +} + +!0 = !{i32 0, !"typeid1"} + +declare i1 @llvm.type.test(i8* %ptr, metadata %bitset) nounwind readnone + +define i1 @foo(i8* %p) { + %x = call i1 @llvm.type.test(i8* %p, metadata !"typeid1") + ret i1 %x +} + +!llvm.module.flags = !{!1} + +!1 = !{i32 4, !"branch-target-enforcement", i32 1} + +; AARCH64: define private void @[[JT]]() #[[ATTR:.*]] align 8 { + +; AARCH64: hint #34 +; AARCH64-SAME: b $0 +; AARCH64-SAME: hint #34 +; AARCH64-SAME: b $1 + +; AARCH64: attributes #[[ATTR]] = { naked nounwind "ignore-branch-target-enforcement" "ignore-sign-return-address"