Index: ../llvmTmp/lib/Target/X86/X86InstrAVX512.td =================================================================== --- ../llvmTmp/lib/Target/X86/X86InstrAVX512.td +++ ../llvmTmp/lib/Target/X86/X86InstrAVX512.td @@ -2884,10 +2884,22 @@ !strconcat(asm, "\t{$src3, $src2, $dst {${mask}}|$dst {${mask}}, $src2, $src3}"), [], IIC_SSE_MOV_S_RR>, EVEX_4V, VEX_LIG, EVEX_K; + def rrkz : SI<0x10, MRMSrcReg, (outs VR128X:$dst), + (ins VK1WM:$mask, RC:$src2, RC:$src3), + !strconcat(asm, + "\t{$src3, $src2, $dst {${mask}} {z}|$dst {${mask}} {z}, $src2, $src3}"), + [], IIC_SSE_MOV_S_RR>, EVEX_4V, VEX_LIG, EVEX_KZ; def rm : SI<0x10, MRMSrcMem, (outs RC:$dst), (ins x86memop:$src), !strconcat(asm, "\t{$src, $dst|$dst, $src}"), [(set RC:$dst, (mem_pat addr:$src))], IIC_SSE_MOV_S_RM>, EVEX, VEX_LIG; + let Constraints = "$src0 = $dst" in + def rmk : SI<0x10, MRMSrcMem, (outs RC:$dst), (ins RC:$src0, VK1WM:$mask, x86memop:$src), + !strconcat(asm, "\t{$src, $dst {${mask}}|$dst {${mask}}, $src}"), + [], IIC_SSE_MOV_S_RM>, EVEX, VEX_LIG, EVEX_K; + def rmkz : SI<0x10, MRMSrcMem, (outs RC:$dst), (ins VK1WM:$mask, x86memop:$src), + !strconcat(asm, "\t{$src, $dst {${mask}} {z}|$dst {${mask}} {z}, $src}"), + [], IIC_SSE_MOV_S_RM>, EVEX, VEX_LIG, EVEX_KZ; let mayStore = 1 in { def mr: SI<0x11, MRMDestMem, (outs), (ins x86memop:$dst, RC:$src), !strconcat(asm, "\t{$src, $dst|$dst, $src}"), Index: ../llvmTmp/test/MC/X86/avx512-encodings.s =================================================================== --- ../llvmTmp/test/MC/X86/avx512-encodings.s +++ ../llvmTmp/test/MC/X86/avx512-encodings.s @@ -18489,3 +18489,58 @@ // CHECK: encoding: [0x62,0x61,0xfd,0x08,0x17,0x8a,0xf8,0xfb,0xff,0xff] vmovhpd %xmm25, -1032(%rdx) +// CHECK: vmovsd (%rcx), %xmm25 {%k3} +// CHECK: encoding: [0x62,0x61,0xff,0x0b,0x10,0x09] + vmovsd (%rcx), %xmm25 {%k3} + +// CHECK: vmovsd (%rcx), %xmm25 {%k3} {z} +// CHECK: encoding: [0x62,0x61,0xff,0x8b,0x10,0x09] + vmovsd (%rcx), %xmm25 {%k3} {z} + +// CHECK: vmovsd %xmm19, %xmm3, %xmm27 {%k3} {z} +// CHECK: encoding: [0x62,0x21,0xe7,0x8b,0x10,0xdb] + vmovsd %xmm19, %xmm3, %xmm27 {%k3} {z} + +// CHECK: vmovss (%rcx), %xmm2 {%k4} +// CHECK: encoding: [0x62,0xf1,0x7e,0x0c,0x10,0x11] + vmovss (%rcx), %xmm2 {%k4} + +// CHECK: vmovss (%rcx), %xmm2 {%k4} {z} +// CHECK: encoding: [0x62,0xf1,0x7e,0x8c,0x10,0x11] + vmovss (%rcx), %xmm2 {%k4} {z} + +// CHECK: vmovss %xmm26, %xmm9, %xmm28 {%k4} {z} +// CHECK: encoding: [0x62,0x01,0x36,0x8c,0x10,0xe2] + vmovss %xmm26, %xmm9, %xmm28 {%k4} {z} + +// CHECK: vmovsd %xmm15, %xmm22, %xmm21 {%k7} {z} +// CHECK: encoding: [0x62,0xc1,0xcf,0x87,0x10,0xef] + vmovsd %xmm15, %xmm22, %xmm21 {%k7} {z} + +// CHECK: vmovsd %xmm8, %xmm13, %xmm3 {%k5} {z} +// CHECK: encoding: [0x62,0xd1,0x97,0x8d,0x10,0xd8] + vmovsd %xmm8, %xmm13, %xmm3 {%k5} {z} + +// CHECK: vmovss %xmm2, %xmm27, %xmm17 {%k2} {z} +// CHECK: encoding: [0x62,0xe1,0x26,0x82,0x10,0xca] + vmovss %xmm2, %xmm27, %xmm17 {%k2} {z} + +// CHECK: vmovss %xmm23, %xmm19, %xmm10 {%k3} {z} +// CHECK: encoding: [0x62,0x31,0x66,0x83,0x10,0xd7] + vmovss %xmm23, %xmm19, %xmm10 {%k3} {z} + +// CHECK: vmovsd %xmm4, %xmm15, %xmm4 {%k6} {z} +// CHECK: encoding: [0x62,0xf1,0x87,0x8e,0x10,0xe4] + vmovsd %xmm4, %xmm15, %xmm4 {%k6} {z} + +// CHECK: vmovsd %xmm14, %xmm2, %xmm20 {%k7} {z} +// CHECK: encoding: [0x62,0xc1,0xef,0x8f,0x10,0xe6] + vmovsd %xmm14, %xmm2, %xmm20 {%k7} {z} + +// CHECK: vmovss %xmm19, %xmm11, %xmm21 {%k3} {z} +// CHECK: encoding: [0x62,0xa1,0x26,0x8b,0x10,0xeb] + vmovss %xmm19, %xmm11, %xmm21 {%k3} {z} + +// CHECK: vmovss %xmm24, %xmm27, %xmm15 {%k2} {z} +// CHECK: encoding: [0x62,0x11,0x26,0x82,0x10,0xf8] + vmovss %xmm24, %xmm27, %xmm15 {%k2} {z} \ No newline at end of file