diff --git a/llvm/lib/Target/ARM/ARMISelLowering.h b/llvm/lib/Target/ARM/ARMISelLowering.h --- a/llvm/lib/Target/ARM/ARMISelLowering.h +++ b/llvm/lib/Target/ARM/ARMISelLowering.h @@ -633,6 +633,9 @@ Value *emitStoreConditional(IRBuilderBase &Builder, Value *Val, Value *Addr, AtomicOrdering Ord) const override; + Value *castToInt8PtrInDefaultAddressSpace(IRBuilderBase &Builder, Module &M, + Value *Addr) const; + void emitAtomicCmpXchgNoStoreLLBalance(IRBuilderBase &Builder) const override; diff --git a/llvm/lib/Target/ARM/ARMISelLowering.cpp b/llvm/lib/Target/ARM/ARMISelLowering.cpp --- a/llvm/lib/Target/ARM/ARMISelLowering.cpp +++ b/llvm/lib/Target/ARM/ARMISelLowering.cpp @@ -19615,6 +19615,25 @@ return !Subtarget->hasMinSize() || Subtarget->isTargetWindows(); } +Value * +ARMTargetLowering::castToInt8PtrInDefaultAddressSpace(IRBuilderBase &Builder, + Module &M, Value *Addr) const { + unsigned AddressSpace = cast(Addr->getType())->getAddressSpace(); + Addr = Builder.CreateBitCast( + Addr, Type::getInt8PtrTy(M.getContext(), AddressSpace)); + + PointerType *Int8PtrTy = Type::getInt8PtrTy(M.getContext()); + if (AddressSpace != Int8PtrTy->getAddressSpace()) { + // The pointer does not point to the default address space. + // Perform an address space cast to the default address space. + // + // We assume that atomic operations have the same semantics on ARM + // irrespective of the address space. + Addr = Builder.CreateAddrSpaceCast(Addr, Int8PtrTy); + } + return Addr; +} + Value *ARMTargetLowering::emitLoadLinked(IRBuilderBase &Builder, Value *Addr, AtomicOrdering Ord) const { Module *M = Builder.GetInsertBlock()->getParent()->getParent(); @@ -19629,7 +19648,7 @@ IsAcquire ? Intrinsic::arm_ldaexd : Intrinsic::arm_ldrexd; Function *Ldrex = Intrinsic::getDeclaration(M, Int); - Addr = Builder.CreateBitCast(Addr, Type::getInt8PtrTy(M->getContext())); + Addr = castToInt8PtrInDefaultAddressSpace(Builder, *M, Addr); Value *LoHi = Builder.CreateCall(Ldrex, Addr, "lohi"); Value *Lo = Builder.CreateExtractValue(LoHi, 0, "lo"); @@ -19678,7 +19697,8 @@ Value *Hi = Builder.CreateTrunc(Builder.CreateLShr(Val, 32), Int32Ty, "hi"); if (!Subtarget->isLittle()) std::swap(Lo, Hi); - Addr = Builder.CreateBitCast(Addr, Type::getInt8PtrTy(M->getContext())); + + Addr = castToInt8PtrInDefaultAddressSpace(Builder, *M, Addr); return Builder.CreateCall(Strex, {Lo, Hi, Addr}); } diff --git a/llvm/test/CodeGen/ARM/atomic-i64-other-address-space.ll b/llvm/test/CodeGen/ARM/atomic-i64-other-address-space.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/ARM/atomic-i64-other-address-space.ll @@ -0,0 +1,69 @@ +; RUN: llc < %s -mtriple=armv6k-linux-gnueabihf | FileCheck %s --check-prefixes=ARMV6K +; RUN: llc < %s -mtriple=armv7-linux-gnueabihf | FileCheck %s --check-prefixes=ARMV7 +; RUN: llc < %s -mtriple=thumbv7-linux-gnueabihf | FileCheck %s --check-prefixes=THUMBV7 + +;THUMBV7: atomic_load_i64_other_address_space: +;THUMBV7: @ %bb.0: @ %load_entry +;THUMBV7-NEXT: ldrexd r0, r1, [r0] +;THUMBV7-NEXT: clrex +;THUMBV7-NEXT: dmb ish +;THUMBV7-NEXT: bx lr +;THUMBV7: atomic_store_i64_other_address_space: +;THUMBV7: @ %bb.0: @ %store_entry +;THUMBV7-NEXT: dmb ish +;THUMBV7-NEXT:.LBB1_1: @ %atomicrmw.start +;THUMBV7: ldrexd r1, r12, [r0] +;THUMBV7-NEXT: strexd r1, r2, r3, [r0] +;THUMBV7-NEXT: cmp r1, #0 +;THUMBV7-NEXT: bne .LBB1_1 +;THUMBV7-NEXT:@ %bb.2: @ %atomicrmw.end +;THUMBV7-NEXT: bx lr + +;ARMV6K: atomic_load_i64_other_address_space: +;ARMV6K: @ %bb.0: @ %load_entry +;ARMV6K-NEXT: ldrexd r0, r1, [r0] +;ARMV6K-NEXT: mov r2, #0 +;ARMV6K-NEXT: mcr p15, #0, r2, c7, c10, #5 +;ARMV6K-NEXT: bx lr +;ARMV6K: atomic_store_i64_other_address_space: +;ARMV6K: @ %bb.0: @ %store_entry +;ARMV6K: push {r4, r5, r11, lr} +;ARMV6K: mov r1, #0 +;ARMV6K: mcr p15, #0, r1, c7, c10, #5 +;ARMV6K-NEXT: .LBB1_1: @ %atomicrmw.start +;ARMV6K: ldrexd r4, r5, [r0] +;ARMV6K-NEXT: strexd r1, r2, r3, [r0] +;ARMV6K-NEXT: cmp r1, #0 +;ARMV6K-NEXT: bne .LBB1_1 +;ARMV6K-NEXT: @ %bb.2: @ %atomicrmw.end +;ARMV6K-NEXT: pop {r4, r5, r11, pc} + +; ARMV7: atomic_load_i64_other_address_space: +; ARMV7: @ %bb.0: @ %load_entry +; ARMV7-NEXT: ldrexd r0, r1, [r0] +; ARMV7-NEXT: clrex +; ARMV7-NEXT: dmb ish +; ARMV7-NEXT: bx lr +; ARMV7: atomic_store_i64_other_address_space: +; ARMV7: @ %bb.0: @ %store_entry +; ARMV7: push {r4, r5, r11, lr} +; ARMV7: dmb ish +; ARMV7: .LBB1_1: @ %atomicrmw.start +; ARMV7: ldrexd r4, r5, [r0] +; ARMV7-NEXT: strexd r1, r2, r3, [r0] +; ARMV7-NEXT: cmp r1, #0 +; ARMV7-NEXT: bne .LBB1_1 +; ARMV7-NEXT: @ %bb.2: @ %atomicrmw.end +; ARMV7-NEXT: pop {r4, r5, r11, pc} + +define i64 @atomic_load_i64_other_address_space(i64 addrspace(1)* align 8 %ptr) { +load_entry: + %val = load atomic i64, i64 addrspace(1)* %ptr acquire, align 8 + ret i64 %val +} + +define void @atomic_store_i64_other_address_space(i64 addrspace(1)* align 8 %ptr, i64 %value) { +store_entry: + store atomic i64 %value, i64 addrspace(1)* %ptr release, align 8 + ret void +}