Index: include/llvm/IR/IntrinsicsX86.td =================================================================== --- include/llvm/IR/IntrinsicsX86.td +++ include/llvm/IR/IntrinsicsX86.td @@ -1417,6 +1417,60 @@ [llvm_v8i64_ty, llvm_v8i64_ty, llvm_v8i64_ty, llvm_i8_ty], [IntrNoMem]>; + def int_x86_avx512_mask_vpermi2var_qi_128 : + GCCBuiltin<"__builtin_ia32_vpermi2varqi128_mask">, + Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, + llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty], + [IntrNoMem]>; + + def int_x86_avx512_mask_vpermt2var_qi_128 : + GCCBuiltin<"__builtin_ia32_vpermt2varqi128_mask">, + Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, + llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty], + [IntrNoMem]>; + + def int_x86_avx512_maskz_vpermt2var_qi_128 : + GCCBuiltin<"__builtin_ia32_vpermt2varqi128_maskz">, + Intrinsic<[llvm_v16i8_ty], [llvm_v16i8_ty, + llvm_v16i8_ty, llvm_v16i8_ty, llvm_i8_ty], + [IntrNoMem]>; + + def int_x86_avx512_mask_vpermi2var_qi_256 : + GCCBuiltin<"__builtin_ia32_vpermi2varqi256_mask">, + Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, + llvm_v32i8_ty, llvm_v32i8_ty, llvm_i16_ty], + [IntrNoMem]>; + + def int_x86_avx512_mask_vpermt2var_qi_256 : + GCCBuiltin<"__builtin_ia32_vpermt2varqi256_mask">, + Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, + llvm_v32i8_ty, llvm_v32i8_ty, llvm_i16_ty], + [IntrNoMem]>; + + def int_x86_avx512_maskz_vpermt2var_qi_256 : + GCCBuiltin<"__builtin_ia32_vpermt2varqi256_maskz">, + Intrinsic<[llvm_v32i8_ty], [llvm_v32i8_ty, + llvm_v32i8_ty, llvm_v32i8_ty, llvm_i16_ty], + [IntrNoMem]>; + + def int_x86_avx512_mask_vpermi2var_qi_512 : + GCCBuiltin<"__builtin_ia32_vpermi2varqi512_mask">, + Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, + llvm_v64i8_ty, llvm_v64i8_ty, llvm_i32_ty], + [IntrNoMem]>; + + def int_x86_avx512_mask_vpermt2var_qi_512 : + GCCBuiltin<"__builtin_ia32_vpermt2varqi512_mask">, + Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, + llvm_v64i8_ty, llvm_v64i8_ty, llvm_i32_ty], + [IntrNoMem]>; + + def int_x86_avx512_maskz_vpermt2var_qi_512 : + GCCBuiltin<"__builtin_ia32_vpermt2varqi512_maskz">, + Intrinsic<[llvm_v64i8_ty], [llvm_v64i8_ty, + llvm_v64i8_ty, llvm_v64i8_ty, llvm_i32_ty], + [IntrNoMem]>; + def int_x86_avx512_mask_vpermil_pd_128 : GCCBuiltin<"__builtin_ia32_vpermilpd_mask">, Intrinsic<[llvm_v2f64_ty], Index: lib/Target/X86/X86IntrinsicsInfo.h =================================================================== --- lib/Target/X86/X86IntrinsicsInfo.h +++ lib/Target/X86/X86IntrinsicsInfo.h @@ -1795,6 +1795,12 @@ X86ISD::VPERMIV3, 0), X86_INTRINSIC_DATA(avx512_mask_vpermi2var_q_512, VPERM_3OP_MASK, X86ISD::VPERMIV3, 0), + X86_INTRINSIC_DATA(avx512_mask_vpermi2var_qi_128, VPERM_3OP_MASK, + X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_mask_vpermi2var_qi_256, VPERM_3OP_MASK, + X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_mask_vpermi2var_qi_512, VPERM_3OP_MASK, + X86ISD::VPERMV3, 0), X86_INTRINSIC_DATA(avx512_mask_vpermil_pd_128, INTR_TYPE_2OP_IMM8_MASK, X86ISD::VPERMILPI, 0), X86_INTRINSIC_DATA(avx512_mask_vpermil_pd_256, INTR_TYPE_2OP_IMM8_MASK, @@ -1849,6 +1855,12 @@ X86ISD::VPERMV3, 0), X86_INTRINSIC_DATA(avx512_mask_vpermt2var_q_512, VPERM_3OP_MASK, X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_mask_vpermt2var_qi_128, VPERM_3OP_MASK, + X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_mask_vpermt2var_qi_256, VPERM_3OP_MASK, + X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_mask_vpermt2var_qi_512, VPERM_3OP_MASK, + X86ISD::VPERMV3, 0), X86_INTRINSIC_DATA(avx512_mask_xor_pd_128, INTR_TYPE_2OP_MASK, X86ISD::FXOR, 0), X86_INTRINSIC_DATA(avx512_mask_xor_pd_256, INTR_TYPE_2OP_MASK, X86ISD::FXOR, 0), X86_INTRINSIC_DATA(avx512_mask_xor_pd_512, INTR_TYPE_2OP_MASK, X86ISD::FXOR, 0), @@ -1931,6 +1943,12 @@ X86ISD::VPERMV3, 0), X86_INTRINSIC_DATA(avx512_maskz_vpermt2var_q_512, VPERM_3OP_MASKZ, X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_maskz_vpermt2var_qi_128, VPERM_3OP_MASKZ, + X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_maskz_vpermt2var_qi_256, VPERM_3OP_MASKZ, + X86ISD::VPERMV3, 0), + X86_INTRINSIC_DATA(avx512_maskz_vpermt2var_qi_512, VPERM_3OP_MASKZ, + X86ISD::VPERMV3, 0), X86_INTRINSIC_DATA(avx512_pbroadcastb_128, INTR_TYPE_1OP_MASK, X86ISD::VBROADCAST, 0), X86_INTRINSIC_DATA(avx512_pbroadcastb_256, INTR_TYPE_1OP_MASK, Index: test/CodeGen/X86/avx512vbmi-intrinsics.ll =================================================================== --- test/CodeGen/X86/avx512vbmi-intrinsics.ll +++ test/CodeGen/X86/avx512vbmi-intrinsics.ll @@ -19,3 +19,60 @@ %res4 = add <64 x i8> %res3, %res2 ret <64 x i8> %res4 } + +declare <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i32) + +define <64 x i8>@test_int_x86_avx512_mask_vpermi2var_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 %x3) { +; CHECK-LABEL: test_int_x86_avx512_mask_vpermi2var_qi_512: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovq %rdi, %k1 +; CHECK-NEXT: vmovaps %zmm1, %zmm3 +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm3 {%k1} +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm1 +; CHECK-NEXT: vpxord %zmm4, %zmm4, %zmm4 +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm4 {%k1} {z} +; CHECK-NEXT: vpaddb %zmm4, %zmm3, %zmm0 +; CHECK-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; CHECK-NEXT: retq + %res = call <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 %x3) + %res1 = call <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8> %x0, <64 x i8> zeroinitializer, <64 x i8> %x2, i32 %x3) + %res2 = call <64 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 -1) + %res3 = add <64 x i8> %res, %res1 + %res4 = add <64 x i8> %res3, %res2 + ret <64 x i8> %res4 +} + +declare <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i32) + +define <64 x i8>@test_int_x86_avx512_mask_vpermt2var_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 %x3) { +; CHECK-LABEL: test_int_x86_avx512_mask_vpermt2var_qi_512: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovq %rdi, %k1 +; CHECK-NEXT: vmovaps %zmm1, %zmm3 +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm3 {%k1} +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm1 +; CHECK-NEXT: vpxord %zmm4, %zmm4, %zmm4 +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm4 {%k1} {z} +; CHECK-NEXT: vpaddb %zmm4, %zmm3, %zmm0 +; CHECK-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; CHECK-NEXT: retq + %res = call <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 %x3) + %res1 = call <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> zeroinitializer, <64 x i8> %x2, i32 %x3) + %res2 = call <64 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 -1) + %res3 = add <64 x i8> %res, %res1 + %res4 = add <64 x i8> %res3, %res2 + ret <64 x i8> %res4 +} + +declare <64 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.512(<64 x i8>, <64 x i8>, <64 x i8>, i32) + +define <64 x i8>@test_int_x86_avx512_maskz_vpermt2var_qi_512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 %x3) { +; CHECK-LABEL: test_int_x86_avx512_maskz_vpermt2var_qi_512: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovq %rdi, %k1 +; CHECK-NEXT: vpermt2b %zmm2, %zmm0, %zmm1 {%k1} {z} +; CHECK-NEXT: vmovaps %zmm1, %zmm0 +; CHECK-NEXT: retq + %res = call <64 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.512(<64 x i8> %x0, <64 x i8> %x1, <64 x i8> %x2, i32 %x3) + ret <64 x i8> %res +} Index: test/CodeGen/X86/avx512vbmivl-intrinsics.ll =================================================================== --- test/CodeGen/X86/avx512vbmivl-intrinsics.ll +++ test/CodeGen/X86/avx512vbmivl-intrinsics.ll @@ -40,3 +40,116 @@ ret <32 x i8> %res4 } +declare <16 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.128(<16 x i8>, <16 x i8>, <16 x i8>, i8) + +define <16 x i8>@test_int_x86_avx512_mask_vpermi2var_qi_128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 %x3) { +; CHECK-LABEL: test_int_x86_avx512_mask_vpermi2var_qi_128: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovw %edi, %k1 +; CHECK-NEXT: vmovaps %zmm1, %zmm3 +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm3 {%k1} +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm1 +; CHECK-NEXT: vpxor %xmm4, %xmm4, %xmm4 +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm4 {%k1} {z} +; CHECK-NEXT: vpaddb %xmm4, %xmm3, %xmm0 +; CHECK-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; CHECK-NEXT: retq + %res = call <16 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 %x3) + %res1 = call <16 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.128(<16 x i8> %x0, <16 x i8> zeroinitializer, <16 x i8> %x2, i8 %x3) + %res2 = call <16 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 -1) + %res3 = add <16 x i8> %res, %res1 + %res4 = add <16 x i8> %res3, %res2 + ret <16 x i8> %res4 +} + +declare <32 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.256(<32 x i8>, <32 x i8>, <32 x i8>, i16) + +define <32 x i8>@test_int_x86_avx512_mask_vpermi2var_qi_256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 %x3) { +; CHECK-LABEL: test_int_x86_avx512_mask_vpermi2var_qi_256: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovd %edi, %k1 +; CHECK-NEXT: vmovaps %zmm1, %zmm3 +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm3 {%k1} +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm1 +; CHECK-NEXT: vpxor %ymm4, %ymm4, %ymm4 +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm4 {%k1} {z} +; CHECK-NEXT: vpaddb %ymm4, %ymm3, %ymm0 +; CHECK-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; CHECK-NEXT: retq + %res = call <32 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 %x3) + %res1 = call <32 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.256(<32 x i8> %x0, <32 x i8> zeroinitializer, <32 x i8> %x2, i16 %x3) + %res2 = call <32 x i8> @llvm.x86.avx512.mask.vpermi2var.qi.256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 -1) + %res3 = add <32 x i8> %res, %res1 + %res4 = add <32 x i8> %res3, %res2 + ret <32 x i8> %res4 +} + +declare <16 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.128(<16 x i8>, <16 x i8>, <16 x i8>, i8) + +define <16 x i8>@test_int_x86_avx512_mask_vpermt2var_qi_128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 %x3) { +; CHECK-LABEL: test_int_x86_avx512_mask_vpermt2var_qi_128: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovw %edi, %k1 +; CHECK-NEXT: vmovaps %zmm1, %zmm3 +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm3 {%k1} +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm1 +; CHECK-NEXT: vpxor %xmm4, %xmm4, %xmm4 +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm4 {%k1} {z} +; CHECK-NEXT: vpaddb %xmm4, %xmm3, %xmm0 +; CHECK-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; CHECK-NEXT: retq + %res = call <16 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 %x3) + %res1 = call <16 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.128(<16 x i8> %x0, <16 x i8> zeroinitializer, <16 x i8> %x2, i8 %x3) + %res2 = call <16 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 -1) + %res3 = add <16 x i8> %res, %res1 + %res4 = add <16 x i8> %res3, %res2 + ret <16 x i8> %res4 +} + +declare <32 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.256(<32 x i8>, <32 x i8>, <32 x i8>, i16) + +define <32 x i8>@test_int_x86_avx512_mask_vpermt2var_qi_256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 %x3) { +; CHECK-LABEL: test_int_x86_avx512_mask_vpermt2var_qi_256: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovd %edi, %k1 +; CHECK-NEXT: vmovaps %zmm1, %zmm3 +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm3 {%k1} +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm1 +; CHECK-NEXT: vpxor %ymm4, %ymm4, %ymm4 +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm4 {%k1} {z} +; CHECK-NEXT: vpaddb %ymm4, %ymm3, %ymm0 +; CHECK-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; CHECK-NEXT: retq + %res = call <32 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 %x3) + %res1 = call <32 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.256(<32 x i8> %x0, <32 x i8> zeroinitializer, <32 x i8> %x2, i16 %x3) + %res2 = call <32 x i8> @llvm.x86.avx512.mask.vpermt2var.qi.256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 -1) + %res3 = add <32 x i8> %res, %res1 + %res4 = add <32 x i8> %res3, %res2 + ret <32 x i8> %res4 +} + +declare <16 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.128(<16 x i8>, <16 x i8>, <16 x i8>, i8) + +define <16 x i8>@test_int_x86_avx512_maskz_vpermt2var_qi_128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 %x3) { +; CHECK-LABEL: test_int_x86_avx512_maskz_vpermt2var_qi_128: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovw %edi, %k1 +; CHECK-NEXT: vpermt2b %xmm2, %xmm0, %xmm1 {%k1} {z} +; CHECK-NEXT: vmovaps %zmm1, %zmm0 +; CHECK-NEXT: retq + %res = call <16 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.128(<16 x i8> %x0, <16 x i8> %x1, <16 x i8> %x2, i8 %x3) + ret <16 x i8> %res +} + +declare <32 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.256(<32 x i8>, <32 x i8>, <32 x i8>, i16) + +define <32 x i8>@test_int_x86_avx512_maskz_vpermt2var_qi_256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 %x3) { +; CHECK-LABEL: test_int_x86_avx512_maskz_vpermt2var_qi_256: +; CHECK: ## BB#0: +; CHECK-NEXT: kmovd %edi, %k1 +; CHECK-NEXT: vpermt2b %ymm2, %ymm0, %ymm1 {%k1} {z} +; CHECK-NEXT: vmovaps %zmm1, %zmm0 +; CHECK-NEXT: retq + %res = call <32 x i8> @llvm.x86.avx512.maskz.vpermt2var.qi.256(<32 x i8> %x0, <32 x i8> %x1, <32 x i8> %x2, i16 %x3) + ret <32 x i8> %res +}