Index: include/llvm/IR/IntrinsicsPowerPC.td =================================================================== --- include/llvm/IR/IntrinsicsPowerPC.td +++ include/llvm/IR/IntrinsicsPowerPC.td @@ -896,6 +896,18 @@ PowerPC_VSX_Intrinsic<"xxinsertw",[llvm_v4i32_ty], [llvm_v4i32_ty,llvm_v2i64_ty,llvm_i32_ty], [IntrNoMem]>; + +// Vector Permute Doubleword Immediate +def int_ppc_vsx_xxpermdi : + PowerPC_VSX_Intrinsic<"xxpermdi",[llvm_v2i64_ty], + [llvm_v2i64_ty,llvm_v2i64_ty,llvm_i32_ty], + [IntrNoMem]>; + +// Vector Shift Left Double by Word Immediate +def int_ppc_vsx_xxsldwi: + PowerPC_VSX_Intrinsic<"xxsldwi",[llvm_v2i64_ty], + [llvm_v2i64_ty,llvm_v2i64_ty,llvm_i32_ty], + [IntrNoMem]>; } //===----------------------------------------------------------------------===// Index: lib/Target/PowerPC/PPCInstrVSX.td =================================================================== --- lib/Target/PowerPC/PPCInstrVSX.td +++ lib/Target/PowerPC/PPCInstrVSX.td @@ -2282,6 +2282,16 @@ (v2i64 (COPY_TO_REGCLASS (XXEXTRACTUW $A, imm:$IMM), VSRC))>; } // AddedComplexity = 400, HasP9Vector + let Predicates = [HasVSX] in { + // Extra patterns expanding to vector Permute Doubleword Immediate + def : Pat<(v2i64 (int_ppc_vsx_xxpermdi v2i64:$A, v2i64:$B, imm:$IMM)), + (v2i64 (XXPERMDI $A, $B, imm:$IMM))>; + + // Extra patterns expanding to vector Shift Left Double by Word Immediate + def : Pat<(v2i64 (int_ppc_vsx_xxsldwi v2i64:$A, v2i64:$B, imm:$IMM)), + (v2i64 (XXSLDWI $A, $B, imm:$IMM))>; + } // HasVSX + //===--------------------------------------------------------------------===// // Test Data Class SP/DP/QP Index: test/CodeGen/PowerPC/vsx.ll =================================================================== --- test/CodeGen/PowerPC/vsx.ll +++ test/CodeGen/PowerPC/vsx.ll @@ -1234,3 +1234,79 @@ } ; Function Attrs: nounwind readnone declare void @llvm.ppc.vsx.stxvd2x.be(<2 x double>, i8*) + +define <2 x i64> @test87(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test87 +; CHECK: xxmrghd 34, 34, 35 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxpermdi(<2 x i64> %a, <2 x i64> %b, i32 0) + ret <2 x i64> %ans +} + +define <2 x i64> @test88(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test88 +; CHECK: xxpermdi 34, 34, 35, 1 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxpermdi(<2 x i64> %a, <2 x i64> %b, i32 1) + ret <2 x i64> %ans +} + +define <2 x i64> @test89(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test89 +; CHECK: xxpermdi 34, 34, 35, 2 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxpermdi(<2 x i64> %a, <2 x i64> %b, i32 2) + ret <2 x i64> %ans +} + +define <2 x i64> @test90(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test90 +; CHECK: xxmrgld 34, 34, 35 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxpermdi(<2 x i64> %a, <2 x i64> %b, i32 3) + ret <2 x i64> %ans +} + +declare <2 x i64> @llvm.ppc.vsx.xxpermdi(<2 x i64>, <2 x i64>, i32) + +define <2 x i64> @test91(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test91 +; CHECK: xxsldwi 34, 34, 35, 0 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxsldwi(<2 x i64> %a, <2 x i64> %b, i32 0) + ret <2 x i64> %ans +} + +define <2 x i64> @test92(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test92 +; CHECK: xxsldwi 34, 34, 35, 1 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxsldwi(<2 x i64> %a, <2 x i64> %b, i32 1) + ret <2 x i64> %ans +} + +define <2 x i64> @test93(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test93 +; CHECK: xxsldwi 34, 34, 35, 2 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxsldwi(<2 x i64> %a, <2 x i64> %b, i32 2) + ret <2 x i64> %ans +} + +define <2 x i64> @test94(<2 x i64> %a, <2 x i64> %b) { +entry: +; CHECK-LABEL: test94 +; CHECK: xxsldwi 34, 34, 35, 3 +; CHECK-NEXT: blr + %ans = tail call <2 x i64> @llvm.ppc.vsx.xxsldwi(<2 x i64> %a, <2 x i64> %b, i32 3) + ret <2 x i64> %ans +} + +declare <2 x i64> @llvm.ppc.vsx.xxsldwi(<2 x i64>, <2 x i64>, i32)