Index: llvm/trunk/lib/Target/AArch64/AArch64ExpandPseudoInsts.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64ExpandPseudoInsts.cpp +++ llvm/trunk/lib/Target/AArch64/AArch64ExpandPseudoInsts.cpp @@ -892,8 +892,13 @@ } case AArch64::MOVbaseTLS: { unsigned DstReg = MI.getOperand(0).getReg(); + auto SysReg = AArch64SysReg::TPIDR_EL0; + MachineFunction *MF = MBB.getParent(); + if (MF->getTarget().getTargetTriple().isOSFuchsia() && + MF->getTarget().getCodeModel() == CodeModel::Kernel) + SysReg = AArch64SysReg::TPIDR_EL1; BuildMI(MBB, MBBI, MI.getDebugLoc(), TII->get(AArch64::MRS), DstReg) - .addImm(AArch64SysReg::TPIDR_EL0); + .addImm(SysReg); MI.eraseFromParent(); return true; } Index: llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp +++ llvm/trunk/lib/Target/AArch64/AArch64FastISel.cpp @@ -458,7 +458,7 @@ // MachO still uses GOT for large code-model accesses, but ELF requires // movz/movk sequences, which FastISel doesn't handle yet. - if (TM.getCodeModel() != CodeModel::Small && !Subtarget->isTargetMachO()) + if (!Subtarget->useSmallAddressing() && !Subtarget->isTargetMachO()) return 0; unsigned char OpFlags = Subtarget->ClassifyGlobalReference(GV, TM); @@ -3147,8 +3147,8 @@ return false; CodeModel::Model CM = TM.getCodeModel(); - // Only support the small and large code model. - if (CM != CodeModel::Small && CM != CodeModel::Large) + // Only support the small-addressing and large code models. + if (CM != CodeModel::Large && !Subtarget->useSmallAddressing()) return false; // FIXME: Add large code model support for ELF. @@ -3199,7 +3199,7 @@ // Issue the call. MachineInstrBuilder MIB; - if (CM == CodeModel::Small) { + if (Subtarget->useSmallAddressing()) { const MCInstrDesc &II = TII.get(Addr.getReg() ? AArch64::BLR : AArch64::BL); MIB = BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, II); if (Symbol) Index: llvm/trunk/lib/Target/AArch64/AArch64ISelLowering.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64ISelLowering.cpp +++ llvm/trunk/lib/Target/AArch64/AArch64ISelLowering.cpp @@ -3572,7 +3572,7 @@ AArch64TargetLowering::LowerELFGlobalTLSAddress(SDValue Op, SelectionDAG &DAG) const { assert(Subtarget->isTargetELF() && "This function expects an ELF target"); - assert(getTargetMachine().getCodeModel() == CodeModel::Small && + assert(Subtarget->useSmallAddressing() && "ELF TLS only supported in small memory model"); // Different choices can be made for the maximum size of the TLS area for a // module. For the small address model, the default TLS size is 16MiB and the Index: llvm/trunk/lib/Target/AArch64/AArch64Subtarget.h =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64Subtarget.h +++ llvm/trunk/lib/Target/AArch64/AArch64Subtarget.h @@ -250,6 +250,18 @@ bool useAA() const override { return UseAA; } + bool useSmallAddressing() const { + switch (TLInfo.getTargetMachine().getCodeModel()) { + case CodeModel::Kernel: + // Kernel is currently allowed only for Fuchsia targets, + // where it is the same as Small for almost all purposes. + case CodeModel::Small: + return true; + default: + return false; + } + } + /// ParseSubtargetFeatures - Parses features string setting specified /// subtarget options. Definition of function is auto generated by tblgen. void ParseSubtargetFeatures(StringRef CPU, StringRef FS); Index: llvm/trunk/lib/Target/AArch64/AArch64Subtarget.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/AArch64Subtarget.cpp +++ llvm/trunk/lib/Target/AArch64/AArch64Subtarget.cpp @@ -147,9 +147,9 @@ if (!TM.shouldAssumeDSOLocal(*GV->getParent(), GV)) return AArch64II::MO_GOT; - // The small code mode's direct accesses use ADRP, which cannot necessarily - // produce the value 0 (if the code is above 4GB). - if (TM.getCodeModel() == CodeModel::Small && GV->hasExternalWeakLinkage()) + // The small code model's direct accesses use ADRP, which cannot + // necessarily produce the value 0 (if the code is above 4GB). + if (useSmallAddressing() && GV->hasExternalWeakLinkage()) return AArch64II::MO_GOT; return AArch64II::MO_NO_FLAG; Index: llvm/trunk/lib/Target/AArch64/MCTargetDesc/AArch64MCTargetDesc.cpp =================================================================== --- llvm/trunk/lib/Target/AArch64/MCTargetDesc/AArch64MCTargetDesc.cpp +++ llvm/trunk/lib/Target/AArch64/MCTargetDesc/AArch64MCTargetDesc.cpp @@ -84,9 +84,14 @@ // no matter how far away they are. else if (CM == CodeModel::JITDefault) CM = CodeModel::Large; - else if (CM != CodeModel::Small && CM != CodeModel::Large) - report_fatal_error( - "Only small and large code models are allowed on AArch64"); + else if (CM != CodeModel::Small && CM != CodeModel::Large) { + if (!TT.isOSFuchsia()) + report_fatal_error( + "Only small and large code models are allowed on AArch64"); + else if (CM != CodeModel::Kernel) + report_fatal_error( + "Only small, kernel, and large code models are allowed on AArch64"); + } } static MCInstPrinter *createAArch64MCInstPrinter(const Triple &T, Index: llvm/trunk/test/CodeGen/AArch64/arm64-builtins-linux.ll =================================================================== --- llvm/trunk/test/CodeGen/AArch64/arm64-builtins-linux.ll +++ llvm/trunk/test/CodeGen/AArch64/arm64-builtins-linux.ll @@ -1,4 +1,6 @@ ; RUN: llc < %s -mtriple=aarch64-linux-gnu | FileCheck %s +; RUN: llc < %s -mtriple=aarch64-fuchsia | FileCheck %s +; RUN: llc < %s -mtriple=aarch64-fuchsia -code-model=kernel | FileCheck --check-prefix=FUCHSIA-KERNEL %s ; Function Attrs: nounwind readnone declare i8* @llvm.thread.pointer() #1 @@ -6,6 +8,8 @@ define i8* @thread_pointer() { ; CHECK: thread_pointer: ; CHECK: mrs {{x[0-9]+}}, TPIDR_EL0 +; FUCHSIA-KERNEL: thread_pointer: +; FUCHSIA-KERNEL: mrs {{x[0-9]+}}, TPIDR_EL1 %1 = tail call i8* @llvm.thread.pointer() ret i8* %1 } Index: llvm/trunk/test/CodeGen/AArch64/arm64-elf-globals.ll =================================================================== --- llvm/trunk/test/CodeGen/AArch64/arm64-elf-globals.ll +++ llvm/trunk/test/CodeGen/AArch64/arm64-elf-globals.ll @@ -2,6 +2,10 @@ ; RUN: llc -mtriple=arm64-linux-gnu -o - %s -O0 -mcpu=cyclone | FileCheck %s --check-prefix=CHECK-FAST ; RUN: llc -mtriple=arm64-linux-gnu -relocation-model=pic -o - %s -mcpu=cyclone | FileCheck %s --check-prefix=CHECK-PIC ; RUN: llc -mtriple=arm64-linux-gnu -O0 -relocation-model=pic -o - %s -mcpu=cyclone | FileCheck %s --check-prefix=CHECK-FAST-PIC +; RUN: llc -mtriple=aarch64-fuchsia -code-model=kernel -o - %s -mcpu=cyclone | FileCheck %s +; RUN: llc -mtriple=aarch64-fuchsia -code-model=kernel -o - %s -O0 -mcpu=cyclone | FileCheck %s --check-prefix=CHECK-FAST +; RUN: llc -mtriple=aarch64-fuchsia -code-model=kernel -relocation-model=pic -o - %s -mcpu=cyclone | FileCheck %s --check-prefix=CHECK-PIC +; RUN: llc -mtriple=aarch64-fuchsia -code-model=kernel -O0 -relocation-model=pic -o - %s -mcpu=cyclone | FileCheck %s --check-prefix=CHECK-FAST-PIC @var8 = external global i8, align 1 @var16 = external global i16, align 2 Index: llvm/trunk/test/CodeGen/AArch64/stack-protector-target.ll =================================================================== --- llvm/trunk/test/CodeGen/AArch64/stack-protector-target.ll +++ llvm/trunk/test/CodeGen/AArch64/stack-protector-target.ll @@ -1,6 +1,7 @@ ; Test target-specific stack cookie location. ; RUN: llc -mtriple=aarch64-linux-android < %s -o - | FileCheck --check-prefix=ANDROID-AARCH64 %s -; RUN: llc -mtriple=aarch64-fuchsia < %s -o - | FileCheck --check-prefix=FUCHSIA-AARCH64 %s +; RUN: llc -mtriple=aarch64-fuchsia < %s -o - | FileCheck --check-prefixes=FUCHSIA-AARCH64-COMMON,FUCHSIA-AARCH64-USER %s +; RUN: llc -mtriple=aarch64-fuchsia -code-model=kernel < %s -o - | FileCheck --check-prefixes=FUCHSIA-AARCH64-COMMON,FUCHSIA-AARCH64-KERNEL %s define void @_Z1fv() sspreq { entry: @@ -19,9 +20,10 @@ ; ANDROID-AARCH64: ldr [[D:.*]], [sp, ; ANDROID-AARCH64: cmp [[C]], [[D]] -; FUCHSIA-AARCH64: mrs [[A:.*]], TPIDR_EL0 -; FUCHSIA-AARCH64: ldur [[B:.*]], {{\[}}[[A]], #-16] -; FUCHSIA-AARCH64: str [[B]], [sp, -; FUCHSIA-AARCH64: ldur [[C:.*]], {{\[}}[[A]], #-16] -; FUCHSIA-AARCH64: ldr [[D:.*]], [sp, -; FUCHSIA-AARCH64: cmp [[C]], [[D]] +; FUCHSIA-AARCH64-USER: mrs [[A:.*]], TPIDR_EL0 +; FUCHSIA-AARCH64-KERNEL: mrs [[A:.*]], TPIDR_EL1 +; FUCHSIA-AARCH64-COMMON: ldur [[B:.*]], {{\[}}[[A]], #-16] +; FUCHSIA-AARCH64-COMMON: str [[B]], [sp, +; FUCHSIA-AARCH64-COMMON: ldur [[C:.*]], {{\[}}[[A]], #-16] +; FUCHSIA-AARCH64-COMMON: ldr [[D:.*]], [sp, +; FUCHSIA-AARCH64-COMMON: cmp [[C]], [[D]]