Index: lib/Target/X86/X86InstrAVX512.td =================================================================== --- lib/Target/X86/X86InstrAVX512.td +++ lib/Target/X86/X86InstrAVX512.td @@ -9399,26 +9399,26 @@ (OpNode (X86VBroadcast (_.ScalarLdFrag addr:$src3)), _.RC:$src2, _.RC:$src1, (i8 imm:$src4)), _.RC:$src1)), - (!cast(NAME#_.ZSuffix#rmik) _.RC:$src1, _.KRCWM:$mask, + (!cast(NAME#_.ZSuffix#rmbik) _.RC:$src1, _.KRCWM:$mask, _.RC:$src2, addr:$src3, (VPTERNLOG321_imm8 imm:$src4))>; def : Pat<(_.VT (vselect _.KRCWM:$mask, (OpNode _.RC:$src2, _.RC:$src1, (X86VBroadcast (_.ScalarLdFrag addr:$src3)), (i8 imm:$src4)), _.RC:$src1)), - (!cast(NAME#_.ZSuffix#rmik) _.RC:$src1, _.KRCWM:$mask, + (!cast(NAME#_.ZSuffix#rmbik) _.RC:$src1, _.KRCWM:$mask, _.RC:$src2, addr:$src3, (VPTERNLOG213_imm8 imm:$src4))>; def : Pat<(_.VT (vselect _.KRCWM:$mask, (OpNode _.RC:$src2, (X86VBroadcast (_.ScalarLdFrag addr:$src3)), _.RC:$src1, (i8 imm:$src4)), _.RC:$src1)), - (!cast(NAME#_.ZSuffix#rmik) _.RC:$src1, _.KRCWM:$mask, + (!cast(NAME#_.ZSuffix#rmbik) _.RC:$src1, _.KRCWM:$mask, _.RC:$src2, addr:$src3, (VPTERNLOG231_imm8 imm:$src4))>; def : Pat<(_.VT (vselect _.KRCWM:$mask, (OpNode (X86VBroadcast (_.ScalarLdFrag addr:$src3)), _.RC:$src1, _.RC:$src2, (i8 imm:$src4)), _.RC:$src1)), - (!cast(NAME#_.ZSuffix#rmik) _.RC:$src1, _.KRCWM:$mask, + (!cast(NAME#_.ZSuffix#rmbik) _.RC:$src1, _.KRCWM:$mask, _.RC:$src2, addr:$src3, (VPTERNLOG312_imm8 imm:$src4))>; } Index: test/CodeGen/X86/avx512-vpternlog-commute.ll =================================================================== --- test/CodeGen/X86/avx512-vpternlog-commute.ll +++ test/CodeGen/X86/avx512-vpternlog-commute.ll @@ -1008,7 +1008,7 @@ ; CHECK-LABEL: vpternlog_v16i32_012_broadcast0_mask1: ; CHECK: ## BB#0: ; CHECK-NEXT: kmovd %esi, %k1 -; CHECK-NEXT: vpternlogd $92, (%rdi), %zmm1, %zmm0 {%k1} +; CHECK-NEXT: vpternlogd $92, (%rdi){1to16}, %zmm1, %zmm0 {%k1} ; CHECK-NEXT: retq %x0scalar = load i32, i32* %x0ptr %vecinit.i = insertelement <16 x i32> undef, i32 %x0scalar, i32 0 @@ -1023,7 +1023,7 @@ ; CHECK-LABEL: vpternlog_v16i32_012_broadcast0_mask2: ; CHECK: ## BB#0: ; CHECK-NEXT: kmovd %esi, %k1 -; CHECK-NEXT: vpternlogd $58, (%rdi), %zmm0, %zmm1 {%k1} +; CHECK-NEXT: vpternlogd $58, (%rdi){1to16}, %zmm0, %zmm1 {%k1} ; CHECK-NEXT: vmovdqa64 %zmm1, %zmm0 ; CHECK-NEXT: retq %x0scalar = load i32, i32* %x0ptr @@ -1039,7 +1039,7 @@ ; CHECK-LABEL: vpternlog_v16i32_012_broadcast1_mask2: ; CHECK: ## BB#0: ; CHECK-NEXT: kmovd %esi, %k1 -; CHECK-NEXT: vpternlogd $46, (%rdi), %zmm0, %zmm1 {%k1} +; CHECK-NEXT: vpternlogd $46, (%rdi){1to16}, %zmm0, %zmm1 {%k1} ; CHECK-NEXT: vmovdqa64 %zmm1, %zmm0 ; CHECK-NEXT: retq %x1scalar = load i32, i32* %x1ptr @@ -1055,7 +1055,7 @@ ; CHECK-LABEL: vpternlog_v16i32_012_broadcast2_mask1: ; CHECK: ## BB#0: ; CHECK-NEXT: kmovd %esi, %k1 -; CHECK-NEXT: vpternlogd $78, (%rdi), %zmm0, %zmm1 {%k1} +; CHECK-NEXT: vpternlogd $78, (%rdi){1to16}, %zmm0, %zmm1 {%k1} ; CHECK-NEXT: vmovdqa64 %zmm1, %zmm0 ; CHECK-NEXT: retq %x2scalar = load i32, i32* %x2ptr