Index: llvm/include/llvm/IR/IntrinsicsAArch64.td =================================================================== --- llvm/include/llvm/IR/IntrinsicsAArch64.td +++ llvm/include/llvm/IR/IntrinsicsAArch64.td @@ -2007,4 +2007,37 @@ def int_aarch64_sve_pmullb_pair : AdvSIMD_2VectorArg_Intrinsic; def int_aarch64_sve_pmullt_pair : AdvSIMD_2VectorArg_Intrinsic; +// +// SVE2 - Optional AES, SHA-3 and SM4 +// + +def int_aarch64_sve_aesd : GCCBuiltin<"__builtin_sve_svaesd_u8">, + Intrinsic<[llvm_nxv16i8_ty], + [llvm_nxv16i8_ty, llvm_nxv16i8_ty], + [IntrNoMem]>; +def int_aarch64_sve_aesimc : GCCBuiltin<"__builtin_sve_svaesimc_u8">, + Intrinsic<[llvm_nxv16i8_ty], + [llvm_nxv16i8_ty], + [IntrNoMem]>; +def int_aarch64_sve_aese : GCCBuiltin<"__builtin_sve_svaese_u8">, + Intrinsic<[llvm_nxv16i8_ty], + [llvm_nxv16i8_ty, llvm_nxv16i8_ty], + [IntrNoMem]>; +def int_aarch64_sve_aesmc : GCCBuiltin<"__builtin_sve_svaesmc_u8">, + Intrinsic<[llvm_nxv16i8_ty], + [llvm_nxv16i8_ty], + [IntrNoMem]>; +def int_aarch64_sve_rax1 : GCCBuiltin<"__builtin_sve_svrax1_u64">, + Intrinsic<[llvm_nxv2i64_ty], + [llvm_nxv2i64_ty, llvm_nxv2i64_ty], + [IntrNoMem]>; +def int_aarch64_sve_sm4e : GCCBuiltin<"__builtin_sve_svsm4e_u32">, + Intrinsic<[llvm_nxv4i32_ty], + [llvm_nxv4i32_ty, llvm_nxv4i32_ty], + [IntrNoMem]>; +def int_aarch64_sve_sm4ekey : GCCBuiltin<"__builtin_sve_svsm4ekey_u32">, + Intrinsic<[llvm_nxv4i32_ty], + [llvm_nxv4i32_ty, llvm_nxv4i32_ty], + [IntrNoMem]>; + } Index: llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td =================================================================== --- llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td +++ llvm/lib/Target/AArch64/AArch64SVEInstrInfo.td @@ -1801,12 +1801,12 @@ let Predicates = [HasSVE2AES] in { // SVE2 crypto destructive binary operations - def AESE_ZZZ_B : sve2_crypto_des_bin_op<0b00, "aese", ZPR8>; - def AESD_ZZZ_B : sve2_crypto_des_bin_op<0b01, "aesd", ZPR8>; + defm AESE_ZZZ_B : sve2_crypto_des_bin_op<0b00, "aese", ZPR8, int_aarch64_sve_aese, nxv16i8>; + defm AESD_ZZZ_B : sve2_crypto_des_bin_op<0b01, "aesd", ZPR8, int_aarch64_sve_aesd, nxv16i8>; // SVE2 crypto unary operations - def AESMC_ZZ_B : sve2_crypto_unary_op<0b0, "aesmc">; - def AESIMC_ZZ_B : sve2_crypto_unary_op<0b1, "aesimc">; + defm AESMC_ZZ_B : sve2_crypto_unary_op<0b0, "aesmc", int_aarch64_sve_aesmc>; + defm AESIMC_ZZ_B : sve2_crypto_unary_op<0b1, "aesimc", int_aarch64_sve_aesimc>; // PMULLB and PMULLT instructions which operate with 64-bit source and // 128-bit destination elements are enabled with crypto extensions, similar @@ -1817,14 +1817,14 @@ let Predicates = [HasSVE2SM4] in { // SVE2 crypto constructive binary operations - def SM4EKEY_ZZZ_S : sve2_crypto_cons_bin_op<0b0, "sm4ekey", ZPR32>; + defm SM4EKEY_ZZZ_S : sve2_crypto_cons_bin_op<0b0, "sm4ekey", ZPR32, int_aarch64_sve_sm4ekey, nxv4i32>; // SVE2 crypto destructive binary operations - def SM4E_ZZZ_S : sve2_crypto_des_bin_op<0b10, "sm4e", ZPR32>; + defm SM4E_ZZZ_S : sve2_crypto_des_bin_op<0b10, "sm4e", ZPR32, int_aarch64_sve_sm4e, nxv4i32>; } let Predicates = [HasSVE2SHA3] in { // SVE2 crypto constructive binary operations - def RAX1_ZZZ_D : sve2_crypto_cons_bin_op<0b1, "rax1", ZPR64>; + defm RAX1_ZZZ_D : sve2_crypto_cons_bin_op<0b1, "rax1", ZPR64, int_aarch64_sve_rax1, nxv2i64>; } let Predicates = [HasSVE2BitPerm] in { Index: llvm/lib/Target/AArch64/SVEInstrFormats.td =================================================================== --- llvm/lib/Target/AArch64/SVEInstrFormats.td +++ llvm/lib/Target/AArch64/SVEInstrFormats.td @@ -7024,6 +7024,12 @@ let Inst{4-0} = Zd; } +multiclass sve2_crypto_cons_bin_op { + def NAME : sve2_crypto_cons_bin_op; + def : SVE_2_Op_Pat(NAME)>; +} + class sve2_crypto_des_bin_op opc, string asm, ZPRRegOp zprty> : I<(outs zprty:$Zdn), (ins zprty:$_Zdn, zprty:$Zm), asm, "\t$Zdn, $_Zdn, $Zm", @@ -7041,8 +7047,14 @@ let Constraints = "$Zdn = $_Zdn"; } -class sve2_crypto_unary_op -: I<(outs ZPR8:$Zdn), (ins ZPR8:$_Zdn), +multiclass sve2_crypto_des_bin_op opc, string asm, ZPRRegOp zprty, + SDPatternOperator op, ValueType vt> { + def NAME : sve2_crypto_des_bin_op; + def : SVE_2_Op_Pat(NAME)>; +} + +class sve2_crypto_unary_op +: I<(outs zprty:$Zdn), (ins zprty:$_Zdn), asm, "\t$Zdn, $_Zdn", "", []>, Sched<[]> { @@ -7054,3 +7066,8 @@ let Constraints = "$Zdn = $_Zdn"; } + +multiclass sve2_crypto_unary_op { + def NAME : sve2_crypto_unary_op; + def : SVE_1_Op_Pat(NAME)>; +} Index: llvm/test/CodeGen/AArch64/sve2-intrinsics-crypto.ll =================================================================== --- /dev/null +++ llvm/test/CodeGen/AArch64/sve2-intrinsics-crypto.ll @@ -0,0 +1,99 @@ +; RUN: llc -mtriple=aarch64-linux-gnu -mattr=+sve2-aes,+sve2-sha3,+sve2-sm4 -asm-verbose=0 < %s | FileCheck %s + +; +; AESD +; + +define @aesd_i8( %a, %b) { +; CHECK-LABEL: aesd_i8: +; CHECK: aesd z0.b, z0.b, z1.b +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.aesd( %a, + %b) + ret %out +} + +; +; AESIMC +; + +define @aesimc_i8( %a) { +; CHECK-LABEL: aesimc_i8: +; CHECK: aesimc z0.b, z0.b +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.aesimc( %a) + ret %out +} + +; +; AESE +; + +define @aese_i8( %a, %b) { +; CHECK-LABEL: aese_i8: +; CHECK: aese z0.b, z0.b, z1.b +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.aese( %a, + %b) + ret %out +} + +; +; AESMC +; + +define @aesmc_i8( %a) { +; CHECK-LABEL: aesmc_i8: +; CHECK: aesmc z0.b, z0.b +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.aesmc( %a) + ret %out +} + +; +; RAX1 +; + +define @rax1_i64( %a, %b) { +; CHECK-LABEL: rax1_i64: +; CHECK: rax1 z0.d, z0.d, z1.d +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.rax1( %a, + %b) + ret %out +} + +; +; SM4E +; + +define @sm4e_i32( %a, %b) { +; CHECK-LABEL: sm4e_i32: +; CHECK: sm4e z0.s, z0.s, z1.s +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.sm4e( %a, + %b) + ret %out +} + +; +; SM4EKEY +; + +define @sm4ekey_i32( %a, %b) { +; CHECK-LABEL: sm4ekey_i32: +; CHECK: sm4ekey z0.s, z0.s, z1.s +; CHECK-NEXT: ret + %out = call @llvm.aarch64.sve.sm4ekey( %a, + %b) + ret %out +} + + +declare @llvm.aarch64.sve.aesd(, ) +declare @llvm.aarch64.sve.aesimc() +declare @llvm.aarch64.sve.aese(, ) +declare @llvm.aarch64.sve.aesmc() +declare @llvm.aarch64.sve.rax1(, ) +declare @llvm.aarch64.sve.sm4e(, ) +declare @llvm.aarch64.sve.sm4ekey(, )