Index: llvm/trunk/lib/Transforms/IPO/LowerTypeTests.cpp =================================================================== --- llvm/trunk/lib/Transforms/IPO/LowerTypeTests.cpp +++ llvm/trunk/lib/Transforms/IPO/LowerTypeTests.cpp @@ -683,6 +683,7 @@ case Triple::x86_64: return kX86JumpTableEntrySize; case Triple::arm: + case Triple::thumb: case Triple::aarch64: return kARMJumpTableEntrySize; default: @@ -730,6 +731,8 @@ OS << "int3\nint3\nint3\n"; } else if (Arch == Triple::arm || Arch == Triple::aarch64) { OS << "b " << Name << "\n"; + } else if (Arch == Triple::thumb) { + OS << "b.w " << Name << "\n"; } else { report_fatal_error("Unsupported architecture for jump tables"); } @@ -754,8 +757,13 @@ else if (!Dest->hasLocalLinkage()) OS << ".globl " << Name << "\n"; OS << ".type " << Name << ", function\n"; - OS << Name << " = " << JumpTable->getName() << " + " - << (getJumpTableEntrySize() * Distance) << "\n"; + if (Arch == Triple::thumb) { + OS << ".thumb_set " << Name << ", " << JumpTable->getName() << " + " + << (getJumpTableEntrySize() * Distance) << "\n"; + } else { + OS << Name << " = " << JumpTable->getName() << " + " + << (getJumpTableEntrySize() * Distance) << "\n"; + } OS << ".size " << Name << ", " << getJumpTableEntrySize() << "\n"; } @@ -768,7 +776,7 @@ void LowerTypeTestsModule::buildBitSetsFromFunctions( ArrayRef TypeIds, ArrayRef Functions) { if (Arch == Triple::x86 || Arch == Triple::x86_64 || Arch == Triple::arm || - Arch == Triple::aarch64) + Arch == Triple::thumb || Arch == Triple::aarch64) buildBitSetsFromFunctionsNative(TypeIds, Functions); else if (Arch == Triple::wasm32 || Arch == Triple::wasm64) buildBitSetsFromFunctionsWASM(TypeIds, Functions); @@ -990,10 +998,12 @@ // FIXME: this magic section name seems to do the trick. AsmOS << ".section " << (ObjectFormat == Triple::MachO ? "__TEXT,__text,regular,pure_instructions" - : ".text.cfi, \"ax\", @progbits") + : ".text.cfi, \"ax\", %progbits") << "\n"; // Align the whole table by entry size. AsmOS << ".balign " << EntrySize << "\n"; + if (Arch == Triple::thumb) + AsmOS << ".thumb_func\n"; AsmOS << JumpTable->getName() << ":\n"; for (unsigned I = 0; I != Functions.size(); ++I) createJumpTableEntry(AsmOS, cast(Functions[I]->getGlobal()), I); Index: llvm/trunk/test/Transforms/LowerTypeTests/function.ll =================================================================== --- llvm/trunk/test/Transforms/LowerTypeTests/function.ll +++ llvm/trunk/test/Transforms/LowerTypeTests/function.ll @@ -1,7 +1,8 @@ -; RUN: opt -S -lowertypetests -mtriple=i686-unknown-linux-gnu < %s | FileCheck --check-prefix=X86 %s -; RUN: opt -S -lowertypetests -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck --check-prefix=X86 %s -; RUN: opt -S -lowertypetests -mtriple=arm-unknown-linux-gnu < %s | FileCheck --check-prefix=ARM %s -; RUN: opt -S -lowertypetests -mtriple=aarch64-unknown-linux-gnu < %s | FileCheck --check-prefix=ARM %s +; RUN: opt -S -lowertypetests -mtriple=i686-unknown-linux-gnu < %s | FileCheck --check-prefixes=X86,NATIVE %s +; RUN: opt -S -lowertypetests -mtriple=x86_64-unknown-linux-gnu < %s | FileCheck --check-prefixes=X86,NATIVE %s +; RUN: opt -S -lowertypetests -mtriple=arm-unknown-linux-gnu < %s | FileCheck --check-prefixes=ARM,NATIVE %s +; RUN: opt -S -lowertypetests -mtriple=thumb-unknown-linux-gnu < %s | FileCheck --check-prefixes=THUMB,NATIVE %s +; RUN: opt -S -lowertypetests -mtriple=aarch64-unknown-linux-gnu < %s | FileCheck --check-prefixes=ARM,NATIVE %s ; RUN: opt -S -lowertypetests -mtriple=wasm32-unknown-unknown < %s | FileCheck --check-prefix=WASM32 %s ; Tests that we correctly handle bitsets containing 2 or more functions. @@ -15,7 +16,7 @@ ; X86-NEXT: module asm ".type g, function" ; X86-NEXT: module asm "g = .cfi.jumptable + 8" ; X86-NEXT: module asm ".size g, 8" -; X86-NEXT: module asm ".section .text.cfi, \22ax\22, @progbits" +; X86-NEXT: module asm ".section .text.cfi, \22ax\22, %progbits" ; X86-NEXT: module asm ".balign 8" ; X86-NEXT: module asm ".cfi.jumptable:" ; X86-NEXT: module asm "jmp f.cfi@plt" @@ -34,30 +35,43 @@ ; ARM-NEXT: module asm ".type g, function" ; ARM-NEXT: module asm "g = .cfi.jumptable + 4" ; ARM-NEXT: module asm ".size g, 4" -; ARM-NEXT: module asm ".section .text.cfi, \22ax\22, @progbits" +; ARM-NEXT: module asm ".section .text.cfi, \22ax\22, %progbits" ; ARM-NEXT: module asm ".balign 4" ; ARM-NEXT: module asm ".cfi.jumptable:" ; ARM-NEXT: module asm "b f.cfi" ; ARM-NEXT: module asm "b g.cfi" +; THUMB: module asm ".globl f" +; THUMB-NEXT: module asm ".type f, function" +; THUMB-NEXT: module asm ".thumb_set f, .cfi.jumptable + 0" +; THUMB-NEXT: module asm ".size f, 4" +; THUMB-NEXT: module asm ".type g, function" +; THUMB-NEXT: module asm ".thumb_set g, .cfi.jumptable + 4" +; THUMB-NEXT: module asm ".size g, 4" +; THUMB-NEXT: module asm ".section .text.cfi, \22ax\22, %progbits" +; THUMB-NEXT: module asm ".balign 4" +; THUMB-NEXT: module asm ".thumb_func" +; THUMB-NEXT: module asm ".cfi.jumptable:" +; THUMB-NEXT: module asm "b.w f.cfi" +; THUMB-NEXT: module asm "b.w g.cfi" + + ; X86: @.cfi.jumptable = external hidden constant [2 x [8 x i8]] ; ARM: @.cfi.jumptable = external hidden constant [2 x [4 x i8]] +; THUMB: @.cfi.jumptable = external hidden constant [2 x [4 x i8]] ; WASM32: private constant [0 x i8] zeroinitializer @0 = private unnamed_addr constant [2 x void (...)*] [void (...)* bitcast (void ()* @f to void (...)*), void (...)* bitcast (void ()* @g to void (...)*)], align 16 -; X86: @llvm.used = appending global [2 x i8*] [i8* bitcast (void ()* @f.cfi to i8*), i8* bitcast (void ()* @g.cfi to i8*)], section "llvm.metadata" -; ARM: @llvm.used = appending global [2 x i8*] [i8* bitcast (void ()* @f.cfi to i8*), i8* bitcast (void ()* @g.cfi to i8*)], section "llvm.metadata" +; NATIVE: @llvm.used = appending global [2 x i8*] [i8* bitcast (void ()* @f.cfi to i8*), i8* bitcast (void ()* @g.cfi to i8*)], section "llvm.metadata" -; X86: define internal void @f.cfi() -; ARM: define internal void @f.cfi() +; NATIVE: define internal void @f.cfi() ; WASM32: define void @f() !type !{{[0-9]+}} !wasm.index ![[I0:[0-9]+]] define void @f() !type !0 { ret void } -; X86: define internal void @g.cfi() -; ARM: define internal void @g.cfi() +; NATIVE: define internal void @g.cfi() ; WASM32: define internal void @g() !type !{{[0-9]+}} !wasm.index ![[I1:[0-9]+]] define internal void @g() !type !0 { ret void @@ -76,10 +90,8 @@ ret i1 %x } -; X86: declare void @f() -; ARM: declare void @f() -; X86: declare hidden void @g() -; ARM: declare hidden void @g() +; NATIVE: declare void @f() +; NATIVE: declare hidden void @g() ; WASM32: ![[I0]] = !{i64 1} ; WASM32: ![[I1]] = !{i64 2}