Index: lib/Target/PowerPC/PPCISelLowering.cpp =================================================================== --- lib/Target/PowerPC/PPCISelLowering.cpp +++ lib/Target/PowerPC/PPCISelLowering.cpp @@ -613,10 +613,10 @@ setOperationAction(ISD::READCYCLECOUNTER, MVT::i64, Legal); } - setOperationAction(ISD::ATOMIC_LOAD, MVT::i32, Expand); - setOperationAction(ISD::ATOMIC_STORE, MVT::i32, Expand); - setOperationAction(ISD::ATOMIC_LOAD, MVT::i64, Expand); - setOperationAction(ISD::ATOMIC_STORE, MVT::i64, Expand); + if (!isPPC64) { + setOperationAction(ISD::ATOMIC_LOAD, MVT::i64, Expand); + setOperationAction(ISD::ATOMIC_STORE, MVT::i64, Expand); + } setBooleanContents(ZeroOrOneBooleanContent); // Altivec instructions set fields to all zeros or all ones. Index: lib/Target/PowerPC/PPCInstr64Bit.td =================================================================== --- lib/Target/PowerPC/PPCInstr64Bit.td +++ lib/Target/PowerPC/PPCInstr64Bit.td @@ -1135,3 +1135,9 @@ def : Pat<(unaligned4store i64:$rS, xoaddr:$dst), (STDX $rS, xoaddr:$dst)>; +// 64-bits atomic loads and stores +def : Pat<(atomic_load_64 ixaddr:$src), (LD memrix:$src)>; +def : Pat<(atomic_load_64 xaddr:$src), (LDX memrr:$src)>; + +def : Pat<(atomic_store_64 ixaddr:$ptr, i64:$val), (STD g8rc:$val, memrix:$ptr)>; +def : Pat<(atomic_store_64 xaddr:$ptr, i64:$val), (STDX g8rc:$val, memrr:$ptr)>; Index: lib/Target/PowerPC/PPCInstrInfo.td =================================================================== --- lib/Target/PowerPC/PPCInstrInfo.td +++ lib/Target/PowerPC/PPCInstrInfo.td @@ -3695,3 +3695,19 @@ defm : TrapExtendedMnemonic<"lnl", 5>; defm : TrapExtendedMnemonic<"lng", 6>; defm : TrapExtendedMnemonic<"u", 31>; + +// Atomic loads +def : Pat<(atomic_load_8 iaddr:$src), (LBZ memri:$src)>; +def : Pat<(atomic_load_16 iaddr:$src), (LHZ memri:$src)>; +def : Pat<(atomic_load_32 iaddr:$src), (LWZ memri:$src)>; +def : Pat<(atomic_load_8 xaddr:$src), (LBZX memrr:$src)>; +def : Pat<(atomic_load_16 xaddr:$src), (LHZX memrr:$src)>; +def : Pat<(atomic_load_32 xaddr:$src), (LWZX memrr:$src)>; + +// Atomic stores +def : Pat<(atomic_store_8 iaddr:$ptr, i32:$val), (STB gprc:$val, memri:$ptr)>; +def : Pat<(atomic_store_16 iaddr:$ptr, i32:$val), (STH gprc:$val, memri:$ptr)>; +def : Pat<(atomic_store_32 iaddr:$ptr, i32:$val), (STW gprc:$val, memri:$ptr)>; +def : Pat<(atomic_store_8 xaddr:$ptr, i32:$val), (STBX gprc:$val, memrr:$ptr)>; +def : Pat<(atomic_store_16 xaddr:$ptr, i32:$val), (STHX gprc:$val, memrr:$ptr)>; +def : Pat<(atomic_store_32 xaddr:$ptr, i32:$val), (STWX gprc:$val, memrr:$ptr)>; Index: test/CodeGen/PowerPC/atomic-2.ll =================================================================== --- test/CodeGen/PowerPC/atomic-2.ll +++ test/CodeGen/PowerPC/atomic-2.ll @@ -30,8 +30,9 @@ entry: ; CHECK: @atomic_store store atomic i64 %val, i64* %mem release, align 64 -; CHECK: ldarx -; CHECK: stdcx. +; CHECK: sync 1 +; CHECK-NOT: stdcx +; CHECK: std ret void } @@ -39,9 +40,9 @@ entry: ; CHECK: @atomic_load %tmp = load atomic i64* %mem acquire, align 64 -; CHECK: ldarx -; CHECK: stdcx. -; CHECK: stdcx. +; CHECK-NOT: ldarx +; CHECK: ld +; CHECK: sync 1 ret i64 %tmp } Index: test/CodeGen/PowerPC/atomics.ll =================================================================== --- test/CodeGen/PowerPC/atomics.ll +++ test/CodeGen/PowerPC/atomics.ll @@ -11,18 +11,21 @@ ; We also vary orderings to check for barriers. define i8 @load_i8_unordered(i8* %mem) { ; CHECK-LABEL: load_i8_unordered +; CHECK: lbz ; CHECK-NOT: sync %val = load atomic i8* %mem unordered, align 1 ret i8 %val } define i16 @load_i16_monotonic(i16* %mem) { ; CHECK-LABEL: load_i16_monotonic +; CHECK: lhz ; CHECK-NOT: sync %val = load atomic i16* %mem monotonic, align 2 ret i16 %val } define i32 @load_i32_acquire(i32* %mem) { ; CHECK-LABEL: load_i32_acquire +; CHECK: lwz %val = load atomic i32* %mem acquire, align 4 ; CHECK: sync 1 ret i32 %val @@ -30,6 +33,9 @@ define i64 @load_i64_seq_cst(i64* %mem) { ; CHECK-LABEL: load_i64_seq_cst ; CHECK: sync 0 +; PPC32: __sync_ +; PPC64-NOT: __sync_ +; PPC64: ld %val = load atomic i64* %mem seq_cst, align 8 ; CHECK: sync 1 ret i64 %val @@ -39,24 +45,30 @@ define void @store_i8_unordered(i8* %mem) { ; CHECK-LABEL: store_i8_unordered ; CHECK-NOT: sync +; CHECK: stb store atomic i8 42, i8* %mem unordered, align 1 ret void } define void @store_i16_monotonic(i16* %mem) { ; CHECK-LABEL: store_i16_monotonic ; CHECK-NOT: sync +; CHECK: sth store atomic i16 42, i16* %mem monotonic, align 2 ret void } define void @store_i32_release(i32* %mem) { ; CHECK-LABEL: store_i32_release ; CHECK: sync 1 +; CHECK: stw store atomic i32 42, i32* %mem release, align 4 ret void } define void @store_i64_seq_cst(i64* %mem) { ; CHECK-LABEL: store_i64_seq_cst ; CHECK: sync 0 +; PPC32: __sync_ +; PPC64-NOT: __sync_ +; PPC64: std store atomic i64 42, i64* %mem seq_cst, align 8 ret void } Index: test/CodeGen/PowerPC/pr15630.ll =================================================================== --- test/CodeGen/PowerPC/pr15630.ll +++ test/CodeGen/PowerPC/pr15630.ll @@ -13,4 +13,5 @@ ret void } -; CHECK: stwcx. +; CHECK: sync +; CHECK: stb