Index: lib/Transforms/IPO/LowerTypeTests.cpp =================================================================== --- lib/Transforms/IPO/LowerTypeTests.cpp +++ lib/Transforms/IPO/LowerTypeTests.cpp @@ -15,6 +15,7 @@ #include "llvm/Transforms/IPO/LowerTypeTests.h" #include "llvm/ADT/EquivalenceClasses.h" #include "llvm/ADT/Statistic.h" +#include "llvm/ADT/StringExtras.h" #include "llvm/ADT/Triple.h" #include "llvm/IR/Constant.h" #include "llvm/IR/Constants.h" @@ -213,6 +214,7 @@ IntegerType *Int1Ty = Type::getInt1Ty(M.getContext()); IntegerType *Int8Ty = Type::getInt8Ty(M.getContext()); IntegerType *Int32Ty = Type::getInt32Ty(M.getContext()); + PointerType *Int8PtrTy = PointerType::getUnqual(Int8Ty); PointerType *Int32PtrTy = PointerType::getUnqual(Int32Ty); IntegerType *Int64Ty = Type::getInt64Ty(M.getContext()); IntegerType *IntPtrTy = M.getDataLayout().getIntPtrType(M.getContext(), 0); @@ -243,12 +245,15 @@ ArrayRef Globals); unsigned getJumpTableEntrySize(); Type *getJumpTableEntryType(); - Constant *createJumpTableEntry(GlobalObject *Src, Function *Dest, + std::string createJumpTableEntry(GlobalObject *Src, Function *Dest, unsigned Distance); + std::string createJumpTableAlias(Function *Dest, GlobalVariable *JumpTable, + unsigned Distance); + void addUsed(ArrayRef Functions); void verifyTypeMDNode(GlobalObject *GO, MDNode *Type); void buildBitSetsFromFunctions(ArrayRef TypeIds, ArrayRef Functions); - void buildBitSetsFromFunctionsX86(ArrayRef TypeIds, + void buildBitSetsFromFunctionsNative(ArrayRef TypeIds, ArrayRef Functions); void buildBitSetsFromFunctionsWASM(ArrayRef TypeIds, ArrayRef Functions); @@ -627,62 +632,99 @@ } static const unsigned kX86JumpTableEntrySize = 8; +static const unsigned kARMJumpTableEntrySize = 4; unsigned LowerTypeTestsModule::getJumpTableEntrySize() { - return kX86JumpTableEntrySize; + switch (Arch) { + case Triple::x86: + case Triple::x86_64: + return kX86JumpTableEntrySize; + case Triple::arm: + case Triple::aarch64: + return kARMJumpTableEntrySize; + default: + report_fatal_error("Unsupported architecture for jump tables"); + } } // Create a constant representing a jump table entry for the target. This // consists of an instruction sequence containing a relative branch to Dest. The // constant will be laid out at address Src+(Len*Distance) where Len is the // target-specific jump table entry size. -Constant *LowerTypeTestsModule::createJumpTableEntry(GlobalObject *Src, +std::string LowerTypeTestsModule::createJumpTableEntry(GlobalObject *Src, Function *Dest, unsigned Distance) { - const unsigned kJmpPCRel32Code = 0xe9; - const unsigned kInt3Code = 0xcc; - - ConstantInt *Jmp = ConstantInt::get(Int8Ty, kJmpPCRel32Code); - - // Build a constant representing the displacement between the constant's - // address and Dest. This will resolve to a PC32 relocation referring to Dest. - Constant *DestInt = ConstantExpr::getPtrToInt(Dest, IntPtrTy); - Constant *SrcInt = ConstantExpr::getPtrToInt(Src, IntPtrTy); - Constant *Disp = ConstantExpr::getSub(DestInt, SrcInt); - ConstantInt *DispOffset = - ConstantInt::get(IntPtrTy, Distance * kX86JumpTableEntrySize + 5); - Constant *OffsetedDisp = ConstantExpr::getSub(Disp, DispOffset); - OffsetedDisp = ConstantExpr::getTruncOrBitCast(OffsetedDisp, Int32Ty); - - ConstantInt *Int3 = ConstantInt::get(Int8Ty, kInt3Code); - - Constant *Fields[] = { - Jmp, OffsetedDisp, Int3, Int3, Int3, - }; - return ConstantStruct::getAnon(Fields, /*Packed=*/true); + std::string S; + raw_string_ostream OS(S); + if (Arch == Triple::x86 || Arch == Triple::x86_64) { + OS << "jmp " << Dest->getName() << "@plt\n"; + OS << "int3\nint3\nint3\n"; + } else if (Arch == Triple::arm || Arch == Triple::aarch64) { + OS << "b " << Dest->getName() << "\n"; + } else { + report_fatal_error("Unsupported architecture for jump tables"); + } + + return OS.str(); +} + +std::string LowerTypeTestsModule::createJumpTableAlias( + Function *Dest, GlobalVariable *JumpTable, unsigned Distance) { + std::string S; + raw_string_ostream OS(S); + OS << ".globl " << Dest->getName() << "\n"; + OS << ".type " << Dest->getName() << ", function\n"; + OS << Dest->getName() << " = " << JumpTable->getName() << " + " + << (getJumpTableEntrySize() * Distance) << "\n"; + OS << ".size " << Dest->getName() << ", " << getJumpTableEntrySize() << "\n"; + return OS.str(); } Type *LowerTypeTestsModule::getJumpTableEntryType() { - return StructType::get(M.getContext(), - {Int8Ty, Int32Ty, Int8Ty, Int8Ty, Int8Ty}, - /*Packed=*/true); + return ArrayType::get(Int8Ty, getJumpTableEntrySize()); } /// Given a disjoint set of type identifiers and functions, build the bit sets /// and lower the llvm.type.test calls, architecture dependently. void LowerTypeTestsModule::buildBitSetsFromFunctions( ArrayRef TypeIds, ArrayRef Functions) { - if (Arch == Triple::x86 || Arch == Triple::x86_64) - buildBitSetsFromFunctionsX86(TypeIds, Functions); + if (Arch == Triple::x86 || Arch == Triple::x86_64 || Arch == Triple::arm || + Arch == Triple::aarch64) + buildBitSetsFromFunctionsNative(TypeIds, Functions); else if (Arch == Triple::wasm32 || Arch == Triple::wasm64) buildBitSetsFromFunctionsWASM(TypeIds, Functions); else report_fatal_error("Unsupported architecture for jump tables"); } +void LowerTypeTestsModule::addUsed(ArrayRef Functions) { + GlobalVariable *GV = M.getGlobalVariable("llvm.used"); + SmallVector Init; + if (GV) { + ConstantArray *CA = dyn_cast(GV->getInitializer()); + for (auto &Op : CA->operands()) + Init.push_back(cast_or_null(Op)); + GV->eraseFromParent(); + } + for (Function *F : Functions) { + if (!F->isDeclaration()) + Init.push_back(ConstantExpr::getBitCast(F, Int8PtrTy)); + } + + if (Init.empty()) + return; + + ArrayType *ATy = ArrayType::get(Int8PtrTy, Init.size()); + GV = new llvm::GlobalVariable( + M, ATy, false, GlobalValue::AppendingLinkage, + ConstantArray::get(ATy, Init), "llvm.used"); + + GV->setSection("llvm.metadata"); +} + /// Given a disjoint set of type identifiers and functions, build a jump table /// for the functions, build the bit sets and lower the llvm.type.test calls. -void LowerTypeTestsModule::buildBitSetsFromFunctionsX86( +void LowerTypeTestsModule::buildBitSetsFromFunctionsNative( ArrayRef TypeIds, ArrayRef Functions) { // Unlike the global bitset builder, the function bitset builder cannot // re-arrange functions in a particular order and base its calculations on the @@ -774,45 +816,68 @@ // Create a constant to hold the jump table. ArrayType *JumpTableType = ArrayType::get(getJumpTableEntryType(), Functions.size()); - auto JumpTable = new GlobalVariable(M, JumpTableType, - /*isConstant=*/true, - GlobalValue::PrivateLinkage, nullptr); - JumpTable->setSection(ObjectFormat == Triple::MachO - ? "__TEXT,__text,regular,pure_instructions" - : ".text"); + auto JumpTable = + new GlobalVariable(M, JumpTableType, + /*isConstant=*/true, GlobalValue::ExternalLinkage, // FIXME: private + nullptr, ".cfi.jumptable"); + JumpTable->setVisibility(GlobalValue::HiddenVisibility); lowerTypeTestCalls(TypeIds, JumpTable, GlobalLayout); // Build aliases pointing to offsets into the jump table, and replace // references to the original functions with references to the aliases. for (unsigned I = 0; I != Functions.size(); ++I) { - Constant *CombinedGlobalElemPtr = ConstantExpr::getBitCast( - ConstantExpr::getGetElementPtr( - JumpTableType, JumpTable, - ArrayRef{ConstantInt::get(IntPtrTy, 0), - ConstantInt::get(IntPtrTy, I)}), - Functions[I]->getType()); if (LinkerSubsectionsViaSymbols || Functions[I]->isDeclarationForLinker()) { + Constant *CombinedGlobalElemPtr = ConstantExpr::getBitCast( + ConstantExpr::getGetElementPtr( + JumpTableType, JumpTable, + ArrayRef{ConstantInt::get(IntPtrTy, 0), + ConstantInt::get(IntPtrTy, I)}), + Functions[I]->getType()); Functions[I]->replaceAllUsesWith(CombinedGlobalElemPtr); + + if (Functions[I]->isWeakForLinker()) { + std::string S; + raw_string_ostream OS(S); + OS << ".weak " << Functions[I]->getName() << "\n"; + M.appendModuleInlineAsm(OS.str()); + } } else { assert(Functions[I]->getType()->getAddressSpace() == 0); - GlobalAlias *GAlias = GlobalAlias::create(Functions[I]->getValueType(), 0, - Functions[I]->getLinkage(), "", - CombinedGlobalElemPtr, &M); - GAlias->setVisibility(Functions[I]->getVisibility()); - GAlias->takeName(Functions[I]); - Functions[I]->replaceAllUsesWith(GAlias); + + M.appendModuleInlineAsm(createJumpTableAlias(Functions[I], JumpTable, I)); + + Function *DeclAlias = + Function::Create(cast(Functions[I]->getValueType()), + GlobalValue::ExternalLinkage, "", &M); + DeclAlias->setVisibility(Functions[I]->getVisibility()); + DeclAlias->takeName(Functions[I]); + // Unnamed functions can not be added to llvm.used. + Functions[I]->setName(DeclAlias->getName() + ".cfi"); + Functions[I]->replaceAllUsesWith(DeclAlias); } if (!Functions[I]->isDeclarationForLinker()) - Functions[I]->setLinkage(GlobalValue::PrivateLinkage); + Functions[I]->setLinkage(GlobalValue::InternalLinkage); + } // Build and set the jump table's initializer. std::vector JumpTableEntries; + + // Try to emit the jump table at the end of the text segment. + // Jump table must come after __cfi_check in the cross-dso mode. + // FIXME: this magic section name seems to do the trick. + std::string S; + raw_string_ostream OS(S); + OS << ".section " << (ObjectFormat == Triple::MachO + ? "__TEXT,__text,regular,pure_instructions" + : ".text.cfi, \"ax\", @progbits") + << "\n"; + OS << ".p2align 3\n"; + OS << JumpTable->getName() << ":\n"; + M.appendModuleInlineAsm(OS.str()); for (unsigned I = 0; I != Functions.size(); ++I) - JumpTableEntries.push_back( - createJumpTableEntry(JumpTable, Functions[I], I)); - JumpTable->setInitializer( - ConstantArray::get(JumpTableType, JumpTableEntries)); + M.appendModuleInlineAsm(createJumpTableEntry(nullptr, Functions[I], I)); + addUsed(Functions); } /// Assign a dummy layout using an incrementing counter, tag each function Index: test/Transforms/LowerTypeTests/function-disjoint.ll =================================================================== --- test/Transforms/LowerTypeTests/function-disjoint.ll +++ test/Transforms/LowerTypeTests/function-disjoint.ll @@ -5,21 +5,36 @@ target datalayout = "e-p:64:64" -; X64: @[[JT0:.*]] = private constant [1 x <{ i8, i32, i8, i8, i8 }>] [<{ i8, i32, i8, i8, i8 }> <{ i8 -23, i32 trunc (i64 sub (i64 sub (i64 ptrtoint (void ()* @[[FNAME:.*]] to i64), i64 ptrtoint ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT0]] to i64)), i64 5) to i32), i8 -52, i8 -52, i8 -52 }>], section ".text" -; X64: @[[JT1:.*]] = private constant [1 x <{ i8, i32, i8, i8, i8 }>] [<{ i8, i32, i8, i8, i8 }> <{ i8 -23, i32 trunc (i64 sub (i64 sub (i64 ptrtoint (void ()* @[[GNAME:.*]] to i64), i64 ptrtoint ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT1]] to i64)), i64 5) to i32), i8 -52, i8 -52, i8 -52 }>], section ".text" +; X64: module asm "f = .cfi.jumptable + 0" + +; X64: module asm ".cfi.jumptable:" +; X64-NEXT: module asm "jmp f.cfi@plt" +; X64-NEXT: module asm "int3" +; X64-NEXT: module asm "int3" +; X64-NEXT: module asm "int3" + +; X64: module asm "g = .cfi.jumptable.1 + 0" + +; X64: module asm ".cfi.jumptable.1:" +; X64-NEXT: module asm "jmp g.cfi@plt" +; X64-NEXT: module asm "int3" +; X64-NEXT: module asm "int3" +; X64-NEXT: module asm "int3" + + +; X64: @.cfi.jumptable = external hidden constant [1 x [8 x i8]] +; X64: @.cfi.jumptable.1 = external hidden constant [1 x [8 x i8]] + ; WASM32: private constant [0 x i8] zeroinitializer @0 = private unnamed_addr constant [2 x void ()*] [void ()* @f, void ()* @g], align 16 -; X64: @f = alias void (), bitcast ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT0]] to void ()*) -; X64: @g = alias void (), bitcast ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT1]] to void ()*) - -; X64: define private void @[[FNAME]]() +; X64: define internal void @f.cfi() ; WASM32: define void @f() !type !{{[0-9]+}} !wasm.index ![[I0:[0-9]+]] define void @f() !type !0 { ret void } -; X64: define private void @[[GNAME]]() +; X64: define internal void @g.cfi() ; WASM32: define void @g() !type !{{[0-9]+}} !wasm.index ![[I1:[0-9]+]] define void @g() !type !1 { ret void @@ -31,15 +46,18 @@ declare i1 @llvm.type.test(i8* %ptr, metadata %bitset) nounwind readnone define i1 @foo(i8* %p) { - ; X64: icmp eq i64 {{.*}}, ptrtoint ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT0]] to i64) + ; X64: icmp eq i64 {{.*}}, ptrtoint ([1 x [8 x i8]]* @.cfi.jumptable to i64) ; WASM32: icmp eq i64 {{.*}}, 1 %x = call i1 @llvm.type.test(i8* %p, metadata !"typeid1") - ; X64: icmp eq i64 {{.*}}, ptrtoint ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT1]] to i64) + ; X64: icmp eq i64 {{.*}}, ptrtoint ([1 x [8 x i8]]* @.cfi.jumptable.1 to i64) ; WASM32: icmp eq i64 {{.*}}, 2 %y = call i1 @llvm.type.test(i8* %p, metadata !"typeid2") %z = add i1 %x, %y ret i1 %z } +; X64: declare void @f() +; X64: declare void @g() + ; WASM32: ![[I0]] = !{i64 1} -; WASM32: ![[I1]] = !{i64 2} +; WASM32: ![[I1]] = !{i64 2} \ No newline at end of file Index: test/Transforms/LowerTypeTests/function-ext.ll =================================================================== --- test/Transforms/LowerTypeTests/function-ext.ll +++ test/Transforms/LowerTypeTests/function-ext.ll @@ -4,14 +4,18 @@ ; Tests that we correctly handle external references, including the case where ; all functions in a bitset are external references. -; X64: @[[JT:.*]] = private constant [1 x <{ i8, i32, i8, i8, i8 }>] [<{ i8, i32, i8, i8, i8 }> <{ i8 -23, i32 trunc (i64 sub (i64 sub (i64 ptrtoint (void ()* @foo to i64), i64 ptrtoint ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]] to i64)), i64 5) to i32), i8 -52, i8 -52, i8 -52 }>], section ".text" +; X64: module asm ".cfi.jumptable:" +; X64-NEXT: module asm "jmp foo@plt" +; X64-NOT: module asm "jmp {{.*}}@plt" + +; X64: @.cfi.jumptable = external hidden constant [1 x [8 x i8]] ; WASM32: private constant [0 x i8] zeroinitializer ; WASM32: declare !type !{{[0-9]+}} void @foo() declare !type !0 void @foo() define i1 @bar(i8* %ptr) { - ; X64: icmp eq i64 {{.*}}, ptrtoint ([1 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]] to i64) + ; X64: icmp eq i64 {{.*}}, ptrtoint ([1 x [8 x i8]]* @.cfi.jumptable to i64) ; WASM32: sub i64 {{.*}}, 0 ; WASM32: icmp ult i64 {{.*}}, 1 %p = call i1 @llvm.type.test(i8* %ptr, metadata !"void") Index: test/Transforms/LowerTypeTests/function.ll =================================================================== --- test/Transforms/LowerTypeTests/function.ll +++ test/Transforms/LowerTypeTests/function.ll @@ -1,24 +1,65 @@ -; RUN: opt -S -lowertypetests -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck --check-prefix=X64 %s +; RUN: opt -S -lowertypetests -mtriple=i686-unknown-linux-gnu < %s | FileCheck --check-prefix=X86 %s +; RUN: opt -S -lowertypetests -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck --check-prefix=X86 %s +; RUN: opt -S -lowertypetests -mtriple=arm-unknown-linux-gnu < %s | FileCheck --check-prefix=ARM %s +; RUN: opt -S -lowertypetests -mtriple=aarch64-unknown-linux-gnu < %s | FileCheck --check-prefix=ARM %s ; RUN: opt -S -lowertypetests -mtriple=wasm32-unknown-unknown < %s | FileCheck --check-prefix=WASM32 %s ; Tests that we correctly handle bitsets containing 2 or more functions. target datalayout = "e-p:64:64" -; X64: @[[JT:.*]] = private constant [2 x <{ i8, i32, i8, i8, i8 }>] [<{ i8, i32, i8, i8, i8 }> <{ i8 -23, i32 trunc (i64 sub (i64 sub (i64 ptrtoint (void ()* @[[FNAME:.*]] to i64), i64 ptrtoint ([2 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]] to i64)), i64 5) to i32), i8 -52, i8 -52, i8 -52 }>, <{ i8, i32, i8, i8, i8 }> <{ i8 -23, i32 trunc (i64 sub (i64 sub (i64 ptrtoint (void ()* @[[GNAME:.*]] to i64), i64 ptrtoint ([2 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]] to i64)), i64 13) to i32), i8 -52, i8 -52, i8 -52 }>], section ".text" +; X86: module asm ".globl f" +; X86-NEXT: module asm ".type f, function" +; X86-NEXT: module asm "f = .cfi.jumptable + 0" +; X86-NEXT: module asm ".size f, 8" +; X86-NEXT: module asm ".globl g" +; X86-NEXT: module asm ".type g, function" +; X86-NEXT: module asm "g = .cfi.jumptable + 8" +; X86-NEXT: module asm ".size g, 8" +; X86-NEXT: module asm ".section .text.cfi, \22ax\22, @progbits" +; X86-NEXT: module asm ".p2align 3" +; X86-NEXT: module asm ".cfi.jumptable:" +; X86-NEXT: module asm "jmp f.cfi@plt" +; X86-NEXT: module asm "int3" +; X86-NEXT: module asm "int3" +; X86-NEXT: module asm "int3" +; X86-NEXT: module asm "jmp g.cfi@plt" +; X86-NEXT: module asm "int3" +; X86-NEXT: module asm "int3" +; X86-NEXT: module asm "int3" + +; ARM: module asm ".globl f" +; ARM-NEXT: module asm ".type f, function" +; ARM-NEXT: module asm "f = .cfi.jumptable + 0" +; ARM-NEXT: module asm ".size f, 4" +; ARM-NEXT: module asm ".globl g" +; ARM-NEXT: module asm ".type g, function" +; ARM-NEXT: module asm "g = .cfi.jumptable + 4" +; ARM-NEXT: module asm ".size g, 4" +; ARM-NEXT: module asm ".section .text.cfi, \22ax\22, @progbits" +; ARM-NEXT: module asm ".p2align 3" +; ARM-NEXT: module asm ".cfi.jumptable:" +; ARM-NEXT: module asm "b f.cfi" +; ARM-NEXT: module asm "b g.cfi" + +; X86: @.cfi.jumptable = external hidden constant [2 x [8 x i8]] +; ARM: @.cfi.jumptable = external hidden constant [2 x [4 x i8]] + ; WASM32: private constant [0 x i8] zeroinitializer @0 = private unnamed_addr constant [2 x void (...)*] [void (...)* bitcast (void ()* @f to void (...)*), void (...)* bitcast (void ()* @g to void (...)*)], align 16 -; X64: @f = alias void (), bitcast ([2 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]] to void ()*) -; X64: @g = alias void (), bitcast (<{ i8, i32, i8, i8, i8 }>* getelementptr inbounds ([2 x <{ i8, i32, i8, i8, i8 }>], [2 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]], i64 0, i64 1) to void ()*) +; X86: @llvm.used = appending global [2 x i8*] [i8* bitcast (void ()* @f.cfi to i8*), i8* bitcast (void ()* @g.cfi to i8*)], section "llvm.metadata" +; ARM: @llvm.used = appending global [2 x i8*] [i8* bitcast (void ()* @f.cfi to i8*), i8* bitcast (void ()* @g.cfi to i8*)], section "llvm.metadata" -; X64: define private void @[[FNAME]]() +; X86: define internal void @f.cfi() +; ARM: define internal void @f.cfi() ; WASM32: define void @f() !type !{{[0-9]+}} !wasm.index ![[I0:[0-9]+]] define void @f() !type !0 { ret void } -; X64: define private void @[[GNAME]]() +; X86: define internal void @g.cfi() +; ARM: define internal void @g.cfi() ; WASM32: define void @g() !type !{{[0-9]+}} !wasm.index ![[I1:[0-9]+]] define void @g() !type !0 { ret void @@ -29,12 +70,18 @@ declare i1 @llvm.type.test(i8* %ptr, metadata %bitset) nounwind readnone define i1 @foo(i8* %p) { - ; X64: sub i64 {{.*}}, ptrtoint ([2 x <{ i8, i32, i8, i8, i8 }>]* @[[JT]] to i64) + ; X86: sub i64 {{.*}}, ptrtoint ([2 x [8 x i8]]* @.cfi.jumptable to i64) + ; ARM: sub i64 {{.*}}, ptrtoint ([2 x [4 x i8]]* @.cfi.jumptable to i64) ; WASM32: sub i64 {{.*}}, 1 ; WASM32: icmp ult i64 {{.*}}, 2 %x = call i1 @llvm.type.test(i8* %p, metadata !"typeid1") ret i1 %x } +; X86: declare void @f() +; ARM: declare void @f() +; X86: declare void @g() +; ARM: declare void @g() + ; WASM32: ![[I0]] = !{i64 1} ; WASM32: ![[I1]] = !{i64 2} Index: test/Transforms/LowerTypeTests/section.ll =================================================================== --- test/Transforms/LowerTypeTests/section.ll +++ test/Transforms/LowerTypeTests/section.ll @@ -5,9 +5,11 @@ target triple = "x86_64-unknown-linux-gnu" -; CHECK: @[[A:.*]] = private constant {{.*}} section ".text" -; CHECK: @f = alias void (), bitcast ({{.*}}* @[[A]] to void ()*) -; CHECK: define private void {{.*}} section "xxx" +; CHECK: module asm ".section .text.cfi, +; CHECK: module asm ".cfi.jumptable:" +; CHECK-NEXT: module asm "jmp f.cfi@plt" + +; CHECK: define internal void @f.cfi() section "xxx" define void @f() section "xxx" !type !0 { entry: