diff --git a/clang/include/clang/Basic/BuiltinsPPC.def b/clang/include/clang/Basic/BuiltinsPPC.def --- a/clang/include/clang/Basic/BuiltinsPPC.def +++ b/clang/include/clang/Basic/BuiltinsPPC.def @@ -114,7 +114,7 @@ BUILTIN(__builtin_ppc_rlwimi, "UiUiUiIUiIUi", "") BUILTIN(__builtin_ppc_rldimi, "ULLiULLiULLiIUiIULLi", "") // load -BUILTIN(__builtin_ppc_load2r, "UiUs*", "") +BUILTIN(__builtin_ppc_load2r, "UsUs*", "") BUILTIN(__builtin_ppc_load4r, "UiUi*", "") BUILTIN(__builtin_ppc_load8r, "ULLiULLi*", "") // store diff --git a/clang/lib/CodeGen/CGBuiltin.cpp b/clang/lib/CodeGen/CGBuiltin.cpp --- a/clang/lib/CodeGen/CGBuiltin.cpp +++ b/clang/lib/CodeGen/CGBuiltin.cpp @@ -15624,6 +15624,12 @@ Value *Rotate = Builder.CreateCall(F, {Ops[0], Ops[0], ShiftAmt}); return Builder.CreateAnd(Rotate, Ops[2]); } + case PPC::BI__builtin_ppc_load2r: { + Function *F = CGM.getIntrinsic(Intrinsic::ppc_load2r); + Ops[0] = Builder.CreateBitCast(Ops[0], Int8PtrTy); + Value *LoadIntrinsic = Builder.CreateCall(F, Ops); + return Builder.CreateTrunc(LoadIntrinsic, Int16Ty); + } // FMA variations case PPC::BI__builtin_vsx_xvmaddadp: case PPC::BI__builtin_vsx_xvmaddasp: diff --git a/clang/test/CodeGen/builtins-ppc-xlcompat-load-store-reversed.c b/clang/test/CodeGen/builtins-ppc-xlcompat-load-store-reversed.c --- a/clang/test/CodeGen/builtins-ppc-xlcompat-load-store-reversed.c +++ b/clang/test/CodeGen/builtins-ppc-xlcompat-load-store-reversed.c @@ -1,4 +1,4 @@ -// REQUIRES: powerpc-registered-target. +// REQUIRES: powerpc-registered-target // RUN: %clang_cc1 -triple powerpc64-unknown-unknown \ // RUN: -emit-llvm %s -o - -target-cpu pwr7 | FileCheck %s // RUN: %clang_cc1 -triple powerpc64le-unknown-unknown \ diff --git a/llvm/include/llvm/IR/IntrinsicsPowerPC.td b/llvm/include/llvm/IR/IntrinsicsPowerPC.td --- a/llvm/include/llvm/IR/IntrinsicsPowerPC.td +++ b/llvm/include/llvm/IR/IntrinsicsPowerPC.td @@ -1626,8 +1626,7 @@ Intrinsic<[llvm_i64_ty], [llvm_i64_ty, llvm_i64_ty, llvm_i64_ty], [IntrNoMem]>; // load def int_ppc_load2r - : GCCBuiltin<"__builtin_ppc_load2r">, - Intrinsic<[llvm_i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>; + : Intrinsic<[llvm_i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>; def int_ppc_load4r : GCCBuiltin<"__builtin_ppc_load4r">, Intrinsic<[llvm_i32_ty], [llvm_ptr_ty], [IntrReadMem, IntrArgMemOnly]>; diff --git a/llvm/lib/Target/PowerPC/PPCISelLowering.cpp b/llvm/lib/Target/PowerPC/PPCISelLowering.cpp --- a/llvm/lib/Target/PowerPC/PPCISelLowering.cpp +++ b/llvm/lib/Target/PowerPC/PPCISelLowering.cpp @@ -15637,6 +15637,18 @@ Known.Zero = ~1U; // All bits but the low one are known to be zero. break; } + break; + } + case ISD::INTRINSIC_W_CHAIN: { + switch (cast(Op.getOperand(1))->getZExtValue()) { + default: + break; + case Intrinsic::ppc_load2r: + // Top bits are cleared for load2r (which is the same as lhbrx). + Known.Zero = 0xFFFF0000; + break; + } + break; } } } diff --git a/llvm/test/CodeGen/PowerPC/builtins-ppc-xlcompat-load-store-reversed.ll b/llvm/test/CodeGen/PowerPC/builtins-ppc-xlcompat-load-store-reversed.ll --- a/llvm/test/CodeGen/PowerPC/builtins-ppc-xlcompat-load-store-reversed.ll +++ b/llvm/test/CodeGen/PowerPC/builtins-ppc-xlcompat-load-store-reversed.ll @@ -52,12 +52,10 @@ define dso_local zeroext i16 @test_builtin_ppc_load2r() { ; CHECK-64B-LABEL: test_builtin_ppc_load2r: ; CHECK-64B: lhbrx 3, 0, 3 -; CHECK-64B-NEXT: clrldi 3, 3, 48 ; CHECK-64B-NEXT: blr ; CHECK-32B-LABEL: test_builtin_ppc_load2r: ; CHECK-32B: lhbrx 3, 0, 3 -; CHECK-32B-NEXT: clrlwi 3, 3, 16 ; CHECK-32B-NEXT: blr entry: %0 = load i16*, i16** @us_addr, align 8