diff --git a/clang/include/clang/Basic/BuiltinsRISCV.def b/clang/include/clang/Basic/BuiltinsRISCV.def --- a/clang/include/clang/Basic/BuiltinsRISCV.def +++ b/clang/include/clang/Basic/BuiltinsRISCV.def @@ -24,60 +24,60 @@ TARGET_BUILTIN(__builtin_riscv_ctz_64, "iUWi", "nc", "zbb,64bit") // Zbc or Zbkc extension -TARGET_BUILTIN(__builtin_riscv_clmul, "LiLiLi", "nc", "zbc|zbkc") -TARGET_BUILTIN(__builtin_riscv_clmulh, "LiLiLi", "nc", "zbc|zbkc") -TARGET_BUILTIN(__builtin_riscv_clmulr, "LiLiLi", "nc", "zbc") +TARGET_BUILTIN(__builtin_riscv_clmul, "ULiULiULi", "nc", "zbc|zbkc") +TARGET_BUILTIN(__builtin_riscv_clmulh, "ULiULiULi", "nc", "zbc|zbkc") +TARGET_BUILTIN(__builtin_riscv_clmulr, "ULiULiULi", "nc", "zbc") // Zbkx -TARGET_BUILTIN(__builtin_riscv_xperm4, "LiLiLi", "nc", "zbkx") -TARGET_BUILTIN(__builtin_riscv_xperm8, "LiLiLi", "nc", "zbkx") +TARGET_BUILTIN(__builtin_riscv_xperm4, "ULiULiULi", "nc", "zbkx") +TARGET_BUILTIN(__builtin_riscv_xperm8, "ULiULiULi", "nc", "zbkx") // Zbkb extension -TARGET_BUILTIN(__builtin_riscv_brev8, "LiLi", "nc", "zbkb") -TARGET_BUILTIN(__builtin_riscv_zip_32, "ZiZi", "nc", "zbkb,32bit") -TARGET_BUILTIN(__builtin_riscv_unzip_32, "ZiZi", "nc", "zbkb,32bit") +TARGET_BUILTIN(__builtin_riscv_brev8, "ULiULi", "nc", "zbkb") +TARGET_BUILTIN(__builtin_riscv_zip_32, "UZiUZi", "nc", "zbkb,32bit") +TARGET_BUILTIN(__builtin_riscv_unzip_32, "UZiUZi", "nc", "zbkb,32bit") // Zknd extension -TARGET_BUILTIN(__builtin_riscv_aes32dsi_32, "ZiZiZiIUi", "nc", "zknd,32bit") -TARGET_BUILTIN(__builtin_riscv_aes32dsmi_32, "ZiZiZiIUi", "nc", "zknd,32bit") -TARGET_BUILTIN(__builtin_riscv_aes64ds_64, "WiWiWi", "nc", "zknd,64bit") -TARGET_BUILTIN(__builtin_riscv_aes64dsm_64, "WiWiWi", "nc", "zknd,64bit") -TARGET_BUILTIN(__builtin_riscv_aes64im_64, "WiWi", "nc", "zknd,64bit") +TARGET_BUILTIN(__builtin_riscv_aes32dsi_32, "UZiUZiUZiIUi", "nc", "zknd,32bit") +TARGET_BUILTIN(__builtin_riscv_aes32dsmi_32, "UZiUZiUZiIUi", "nc", "zknd,32bit") +TARGET_BUILTIN(__builtin_riscv_aes64ds_64, "UWiUWiUWi", "nc", "zknd,64bit") +TARGET_BUILTIN(__builtin_riscv_aes64dsm_64, "UWiUWiUWi", "nc", "zknd,64bit") +TARGET_BUILTIN(__builtin_riscv_aes64im_64, "UWiUWi", "nc", "zknd,64bit") // Zknd & zkne -TARGET_BUILTIN(__builtin_riscv_aes64ks1i_64, "WiWiIUi", "nc", "zknd|zkne,64bit") -TARGET_BUILTIN(__builtin_riscv_aes64ks2_64, "WiWiWi", "nc", "zknd|zkne,64bit") +TARGET_BUILTIN(__builtin_riscv_aes64ks1i_64, "UWiUWiIUi", "nc", "zknd|zkne,64bit") +TARGET_BUILTIN(__builtin_riscv_aes64ks2_64, "UWiUWiUWi", "nc", "zknd|zkne,64bit") // Zkne extension -TARGET_BUILTIN(__builtin_riscv_aes32esi_32, "ZiZiZiIUi", "nc", "zkne,32bit") -TARGET_BUILTIN(__builtin_riscv_aes32esmi_32, "ZiZiZiIUi", "nc", "zkne,32bit") -TARGET_BUILTIN(__builtin_riscv_aes64es_64, "WiWiWi", "nc", "zkne,64bit") -TARGET_BUILTIN(__builtin_riscv_aes64esm_64, "WiWiWi", "nc", "zkne,64bit") +TARGET_BUILTIN(__builtin_riscv_aes32esi_32, "UZiUZiUZiIUi", "nc", "zkne,32bit") +TARGET_BUILTIN(__builtin_riscv_aes32esmi_32, "UZiUZiUZiIUi", "nc", "zkne,32bit") +TARGET_BUILTIN(__builtin_riscv_aes64es_64, "UWiUWiUWi", "nc", "zkne,64bit") +TARGET_BUILTIN(__builtin_riscv_aes64esm_64, "UWiUWiUWi", "nc", "zkne,64bit") // Zknh extension -TARGET_BUILTIN(__builtin_riscv_sha256sig0, "LiLi", "nc", "zknh") -TARGET_BUILTIN(__builtin_riscv_sha256sig1, "LiLi", "nc", "zknh") -TARGET_BUILTIN(__builtin_riscv_sha256sum0, "LiLi", "nc", "zknh") -TARGET_BUILTIN(__builtin_riscv_sha256sum1, "LiLi", "nc", "zknh") +TARGET_BUILTIN(__builtin_riscv_sha256sig0, "ULiULi", "nc", "zknh") +TARGET_BUILTIN(__builtin_riscv_sha256sig1, "ULiULi", "nc", "zknh") +TARGET_BUILTIN(__builtin_riscv_sha256sum0, "ULiULi", "nc", "zknh") +TARGET_BUILTIN(__builtin_riscv_sha256sum1, "ULiULi", "nc", "zknh") -TARGET_BUILTIN(__builtin_riscv_sha512sig0h_32, "ZiZiZi", "nc", "zknh,32bit") -TARGET_BUILTIN(__builtin_riscv_sha512sig0l_32, "ZiZiZi", "nc", "zknh,32bit") -TARGET_BUILTIN(__builtin_riscv_sha512sig1h_32, "ZiZiZi", "nc", "zknh,32bit") -TARGET_BUILTIN(__builtin_riscv_sha512sig1l_32, "ZiZiZi", "nc", "zknh,32bit") -TARGET_BUILTIN(__builtin_riscv_sha512sum0r_32, "ZiZiZi", "nc", "zknh,32bit") -TARGET_BUILTIN(__builtin_riscv_sha512sum1r_32, "ZiZiZi", "nc", "zknh,32bit") -TARGET_BUILTIN(__builtin_riscv_sha512sig0_64, "WiWi", "nc", "zknh,64bit") -TARGET_BUILTIN(__builtin_riscv_sha512sig1_64, "WiWi", "nc", "zknh,64bit") -TARGET_BUILTIN(__builtin_riscv_sha512sum0_64, "WiWi", "nc", "zknh,64bit") -TARGET_BUILTIN(__builtin_riscv_sha512sum1_64, "WiWi", "nc", "zknh,64bit") +TARGET_BUILTIN(__builtin_riscv_sha512sig0h_32, "UZiUZiUZi", "nc", "zknh,32bit") +TARGET_BUILTIN(__builtin_riscv_sha512sig0l_32, "UZiUZiUZi", "nc", "zknh,32bit") +TARGET_BUILTIN(__builtin_riscv_sha512sig1h_32, "UZiUZiUZi", "nc", "zknh,32bit") +TARGET_BUILTIN(__builtin_riscv_sha512sig1l_32, "UZiUZiUZi", "nc", "zknh,32bit") +TARGET_BUILTIN(__builtin_riscv_sha512sum0r_32, "UZiUZiUZi", "nc", "zknh,32bit") +TARGET_BUILTIN(__builtin_riscv_sha512sum1r_32, "UZiUZiUZi", "nc", "zknh,32bit") +TARGET_BUILTIN(__builtin_riscv_sha512sig0_64, "UWiUWi", "nc", "zknh,64bit") +TARGET_BUILTIN(__builtin_riscv_sha512sig1_64, "UWiUWi", "nc", "zknh,64bit") +TARGET_BUILTIN(__builtin_riscv_sha512sum0_64, "UWiUWi", "nc", "zknh,64bit") +TARGET_BUILTIN(__builtin_riscv_sha512sum1_64, "UWiUWi", "nc", "zknh,64bit") // Zksed extension -TARGET_BUILTIN(__builtin_riscv_sm4ed, "LiLiLiIUi", "nc", "zksed") -TARGET_BUILTIN(__builtin_riscv_sm4ks, "LiLiLiIUi", "nc", "zksed") +TARGET_BUILTIN(__builtin_riscv_sm4ed, "ULiULiULiIUi", "nc", "zksed") +TARGET_BUILTIN(__builtin_riscv_sm4ks, "ULiULiULiIUi", "nc", "zksed") // Zksh extension -TARGET_BUILTIN(__builtin_riscv_sm3p0, "LiLi", "nc", "zksh") -TARGET_BUILTIN(__builtin_riscv_sm3p1, "LiLi", "nc", "zksh") +TARGET_BUILTIN(__builtin_riscv_sm3p0, "ULiULi", "nc", "zksh") +TARGET_BUILTIN(__builtin_riscv_sm3p1, "ULiULi", "nc", "zksh") // Zihintntl extension TARGET_BUILTIN(__builtin_riscv_ntl_load, "v.", "t", "experimental-zihintntl") diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-xtheadbb.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-xtheadbb.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-xtheadbb.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-xtheadbb.c @@ -10,7 +10,7 @@ // RV32XTHEADBB-NEXT: [[TMP1:%.*]] = call i32 @llvm.ctlz.i32(i32 [[TMP0]], i1 false) // RV32XTHEADBB-NEXT: ret i32 [[TMP1]] // -int clz_32(int a) { +int clz_32(unsigned int a) { return __builtin_riscv_clz_32(a); } @@ -23,6 +23,6 @@ // RV32XTHEADBB-NEXT: [[TMP1:%.*]] = call i32 @llvm.ctlz.i32(i32 [[NOT]], i1 false) // RV32XTHEADBB-NEXT: ret i32 [[TMP1]] // -int clo_32(int a) { +int clo_32(unsigned int a) { return __builtin_riscv_clz_32(~a); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbc.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbc.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbc.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbc.c @@ -13,7 +13,7 @@ // RV32ZBC-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.clmul.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBC-NEXT: ret i32 [[TMP2]] // -long clmul(long a, long b) { +unsigned long clmul(unsigned long a, unsigned long b) { return __builtin_riscv_clmul(a, b); } @@ -28,7 +28,7 @@ // RV32ZBC-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.clmulh.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBC-NEXT: ret i32 [[TMP2]] // -long clmulh(long a, long b) { +unsigned long clmulh(unsigned long a, unsigned long b) { return __builtin_riscv_clmulh(a, b); } @@ -43,6 +43,6 @@ // RV32ZBC-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.clmulr.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBC-NEXT: ret i32 [[TMP2]] // -long clmulr(long a, long b) { +unsigned long clmulr(unsigned long a, unsigned long b) { return __builtin_riscv_clmulr(a, b); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkb.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkb.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkb.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkb.c @@ -10,7 +10,7 @@ // RV32ZBKB-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.brev8.i32(i32 [[TMP0]]) // RV32ZBKB-NEXT: ret i32 [[TMP1]] // -long brev8(long rs1) +unsigned long brev8(unsigned long rs1) { return __builtin_riscv_brev8(rs1); } @@ -23,7 +23,7 @@ // RV32ZBKB-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.zip.i32(i32 [[TMP0]]) // RV32ZBKB-NEXT: ret i32 [[TMP1]] // -int zip(int rs1) +unsigned int zip(unsigned int rs1) { return __builtin_riscv_zip_32(rs1); } @@ -36,7 +36,7 @@ // RV32ZBKB-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.unzip.i32(i32 [[TMP0]]) // RV32ZBKB-NEXT: ret i32 [[TMP1]] // -int unzip(int rs1) +unsigned int unzip(unsigned int rs1) { return __builtin_riscv_unzip_32(rs1); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkc.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkc.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkc.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkc.c @@ -13,7 +13,7 @@ // RV32ZBKC-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.clmul.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBKC-NEXT: ret i32 [[TMP2]] // -long clmul(long a, long b) { +unsigned long clmul(unsigned long a, unsigned long b) { return __builtin_riscv_clmul(a, b); } @@ -28,6 +28,6 @@ // RV32ZBKC-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.clmulh.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBKC-NEXT: ret i32 [[TMP2]] // -long clmulh(long a, long b) { +unsigned long clmulh(unsigned long a, unsigned long b) { return __builtin_riscv_clmulh(a, b); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkx.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkx.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkx.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv32-zbkx.c @@ -13,7 +13,7 @@ // RV32ZBKX-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.xperm8.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBKX-NEXT: ret i32 [[TMP2]] // -long xperm8(long rs1, long rs2) +unsigned long xperm8(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_xperm8(rs1, rs2); } @@ -29,7 +29,7 @@ // RV32ZBKX-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.xperm4.i32(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZBKX-NEXT: ret i32 [[TMP2]] // -long xperm4(long rs1, long rs2) +unsigned long xperm4(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_xperm4(rs1, rs2); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-xtheadbb.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-xtheadbb.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-xtheadbb.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-xtheadbb.c @@ -10,7 +10,7 @@ // RV64XTHEADBB-NEXT: [[TMP1:%.*]] = call i32 @llvm.ctlz.i32(i32 [[TMP0]], i1 false) // RV64XTHEADBB-NEXT: ret i32 [[TMP1]] // -int clz_32(int a) { +int clz_32(unsigned int a) { return __builtin_riscv_clz_32(a); } @@ -23,7 +23,7 @@ // RV64XTHEADBB-NEXT: [[TMP1:%.*]] = call i32 @llvm.ctlz.i32(i32 [[NOT]], i1 false) // RV64XTHEADBB-NEXT: ret i32 [[TMP1]] // -int clo_32(int a) { +int clo_32(unsigned int a) { return __builtin_riscv_clz_32(~a); } @@ -36,7 +36,7 @@ // RV64XTHEADBB-NEXT: [[CAST:%.*]] = trunc i64 [[TMP1]] to i32 // RV64XTHEADBB-NEXT: ret i32 [[CAST]] // -int clz_64(long a) { +int clz_64(unsigned long a) { return __builtin_riscv_clz_64(a); } @@ -50,6 +50,6 @@ // RV64XTHEADBB-NEXT: [[CAST:%.*]] = trunc i64 [[TMP1]] to i32 // RV64XTHEADBB-NEXT: ret i32 [[CAST]] // -int clo_64(long a) { +int clo_64(unsigned long a) { return __builtin_riscv_clz_64(~a); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbc.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbc.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbc.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbc.c @@ -13,7 +13,7 @@ // RV64ZBC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmul.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBC-NEXT: ret i64 [[TMP2]] // -long clmul(long a, long b) { +unsigned long clmul(unsigned long a, unsigned long b) { return __builtin_riscv_clmul(a, b); } @@ -28,7 +28,7 @@ // RV64ZBC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmulh.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBC-NEXT: ret i64 [[TMP2]] // -long clmulh(long a, long b) { +unsigned long clmulh(unsigned long a, unsigned long b) { return __builtin_riscv_clmulh(a, b); } @@ -43,6 +43,6 @@ // RV64ZBC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmulr.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBC-NEXT: ret i64 [[TMP2]] // -long clmulr(long a, long b) { +unsigned long clmulr(unsigned long a, unsigned long b) { return __builtin_riscv_clmulr(a, b); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb-error.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb-error.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb-error.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb-error.c @@ -1,12 +1,12 @@ // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py // RUN: %clang_cc1 -triple riscv64 -target-feature +zbkb -verify %s -o - -int zip(int rs1) +unsigned int zip(unsigned int rs1) { return __builtin_riscv_zip_32(rs1); // expected-error {{builtin requires: 'RV32'}} } -int unzip(int rs1) +unsigned int unzip(unsigned int rs1) { return __builtin_riscv_unzip_32(rs1); // expected-error {{builtin requires: 'RV32'}} } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkb.c @@ -10,7 +10,7 @@ // RV64ZBKB-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.brev8.i64(i64 [[TMP0]]) // RV64ZBKB-NEXT: ret i64 [[TMP1]] // -long brev8(long rs1) +unsigned long brev8(unsigned long rs1) { return __builtin_riscv_brev8(rs1); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkc.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkc.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkc.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkc.c @@ -13,7 +13,7 @@ // RV64ZBKC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmul.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBKC-NEXT: ret i64 [[TMP2]] // -long clmul(long a, long b) { +unsigned long clmul(unsigned long a, unsigned long b) { return __builtin_riscv_clmul(a, b); } @@ -28,6 +28,6 @@ // RV64ZBKC-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.clmulh.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBKC-NEXT: ret i64 [[TMP2]] // -long clmulh(long a, long b) { +unsigned long clmulh(unsigned long a, unsigned long b) { return __builtin_riscv_clmulh(a, b); } diff --git a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkx.c b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkx.c --- a/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkx.c +++ b/clang/test/CodeGen/RISCV/rvb-intrinsics/riscv64-zbkx.c @@ -13,7 +13,7 @@ // RV64ZBKX-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.xperm8.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBKX-NEXT: ret i64 [[TMP2]] // -long xperm8(long rs1, long rs2) +unsigned long xperm8(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_xperm8(rs1, rs2); } @@ -29,7 +29,7 @@ // RV64ZBKX-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.xperm4.i64(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZBKX-NEXT: ret i64 [[TMP2]] // -long xperm4(long rs1, long rs2) +unsigned long xperm4(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_xperm4(rs1, rs2); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknd.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknd.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknd.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknd.c @@ -13,7 +13,7 @@ // RV32ZKND-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.aes32dsi(i32 [[TMP0]], i32 [[TMP1]], i32 3) // RV32ZKND-NEXT: ret i32 [[TMP2]] // -int aes32dsi(int rs1, int rs2) { +unsigned int aes32dsi(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_aes32dsi_32(rs1, rs2, 3); } @@ -28,6 +28,6 @@ // RV32ZKND-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.aes32dsmi(i32 [[TMP0]], i32 [[TMP1]], i32 3) // RV32ZKND-NEXT: ret i32 [[TMP2]] // -int aes32dsmi(int rs1, int rs2) { +unsigned int aes32dsmi(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_aes32dsmi_32(rs1, rs2, 3); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zkne.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zkne.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zkne.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zkne.c @@ -13,7 +13,7 @@ // RV32ZKNE-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.aes32esi(i32 [[TMP0]], i32 [[TMP1]], i32 3) // RV32ZKNE-NEXT: ret i32 [[TMP2]] // -int aes32esi(int rs1, int rs2) { +unsigned int aes32esi(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_aes32esi_32(rs1, rs2, 3); } @@ -28,6 +28,6 @@ // RV32ZKNE-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.aes32esmi(i32 [[TMP0]], i32 [[TMP1]], i32 3) // RV32ZKNE-NEXT: ret i32 [[TMP2]] // -int aes32esmi(int rs1, int rs2) { +unsigned int aes32esmi(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_aes32esmi_32(rs1, rs2, 3); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknh.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknh.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknh.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zknh.c @@ -10,7 +10,7 @@ // RV32ZKNH-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.sha256sig0.i32(i32 [[TMP0]]) // RV32ZKNH-NEXT: ret i32 [[TMP1]] // -long sha256sig0(long rs1) { +unsigned long sha256sig0(unsigned long rs1) { return __builtin_riscv_sha256sig0(rs1); } @@ -22,7 +22,7 @@ // RV32ZKNH-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.sha256sig1.i32(i32 [[TMP0]]) // RV32ZKNH-NEXT: ret i32 [[TMP1]] // -long sha256sig1(long rs1) { +unsigned long sha256sig1(unsigned long rs1) { return __builtin_riscv_sha256sig1(rs1); } @@ -34,7 +34,7 @@ // RV32ZKNH-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.sha256sum0.i32(i32 [[TMP0]]) // RV32ZKNH-NEXT: ret i32 [[TMP1]] // -long sha256sum0(long rs1) { +unsigned long sha256sum0(unsigned long rs1) { return __builtin_riscv_sha256sum0(rs1); } @@ -46,7 +46,7 @@ // RV32ZKNH-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.sha256sum1.i32(i32 [[TMP0]]) // RV32ZKNH-NEXT: ret i32 [[TMP1]] // -long sha256sum1(long rs1) { +unsigned long sha256sum1(unsigned long rs1) { return __builtin_riscv_sha256sum1(rs1); } @@ -61,7 +61,7 @@ // RV32ZKNH-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sha512sig0h(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZKNH-NEXT: ret i32 [[TMP2]] // -int sha512sig0h(int rs1, int rs2) { +unsigned int sha512sig0h(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_sha512sig0h_32(rs1, rs2); } @@ -76,7 +76,7 @@ // RV32ZKNH-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sha512sig0l(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZKNH-NEXT: ret i32 [[TMP2]] // -int sha512sig0l(int rs1, int rs2) { +unsigned int sha512sig0l(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_sha512sig0l_32(rs1, rs2); } @@ -91,7 +91,7 @@ // RV32ZKNH-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sha512sig1h(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZKNH-NEXT: ret i32 [[TMP2]] // -int sha512sig1h(int rs1, int rs2) { +unsigned int sha512sig1h(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_sha512sig1h_32(rs1, rs2); } @@ -106,7 +106,7 @@ // RV32ZKNH-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sha512sig1l(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZKNH-NEXT: ret i32 [[TMP2]] // -int sha512sig1l(int rs1, int rs2) { +unsigned int sha512sig1l(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_sha512sig1l_32(rs1, rs2); } @@ -121,7 +121,7 @@ // RV32ZKNH-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sha512sum0r(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZKNH-NEXT: ret i32 [[TMP2]] // -int sha512sum0r(int rs1, int rs2) { +unsigned int sha512sum0r(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_sha512sum0r_32(rs1, rs2); } @@ -136,6 +136,6 @@ // RV32ZKNH-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sha512sum1r(i32 [[TMP0]], i32 [[TMP1]]) // RV32ZKNH-NEXT: ret i32 [[TMP2]] // -int sha512sum1r(int rs1, int rs2) { +unsigned int sha512sum1r(unsigned int rs1, unsigned int rs2) { return __builtin_riscv_sha512sum1r_32(rs1, rs2); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksed.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksed.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksed.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksed.c @@ -13,7 +13,7 @@ // RV32ZKSED-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sm4ks.i32(i32 [[TMP0]], i32 [[TMP1]], i32 0) // RV32ZKSED-NEXT: ret i32 [[TMP2]] // -long sm4ks(long rs1, long rs2) { +unsigned long sm4ks(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_sm4ks(rs1, rs2, 0); } @@ -29,6 +29,6 @@ // RV32ZKSED-NEXT: [[TMP2:%.*]] = call i32 @llvm.riscv.sm4ed.i32(i32 [[TMP0]], i32 [[TMP1]], i32 0) // RV32ZKSED-NEXT: ret i32 [[TMP2]] // -long sm4ed(long rs1, long rs2) { +unsigned long sm4ed(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_sm4ed(rs1, rs2, 0); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksh.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksh.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksh.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv32-zksh.c @@ -10,7 +10,7 @@ // RV32ZKSH-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.sm3p0.i32(i32 [[TMP0]]) // RV32ZKSH-NEXT: ret i32 [[TMP1]] // -long sm3p0(long rs1) +unsigned long sm3p0(unsigned long rs1) { return __builtin_riscv_sm3p0(rs1); } @@ -23,6 +23,6 @@ // RV32ZKSH-NEXT: [[TMP1:%.*]] = call i32 @llvm.riscv.sm3p1.i32(i32 [[TMP0]]) // RV32ZKSH-NEXT: ret i32 [[TMP1]] // -long sm3p1(long rs1) { +unsigned long sm3p1(unsigned long rs1) { return __builtin_riscv_sm3p1(rs1); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd-zkne.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd-zkne.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd-zkne.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd-zkne.c @@ -12,7 +12,7 @@ // RV64ZKND-ZKNE-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.aes64ks1i(i64 [[TMP0]], i32 0) // RV64ZKND-ZKNE-NEXT: ret i64 [[TMP1]] // -long aes64ks1i(long rs1) { +unsigned long aes64ks1i(unsigned long rs1) { return __builtin_riscv_aes64ks1i_64(rs1, 0); } @@ -27,6 +27,6 @@ // RV64ZKND-ZKNE-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.aes64ks2(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZKND-ZKNE-NEXT: ret i64 [[TMP2]] // -long aes64ks2(long rs1, long rs2) { +unsigned long aes64ks2(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_aes64ks2_64(rs1, rs2); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknd.c @@ -14,7 +14,7 @@ // RV64ZKND-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.aes64dsm(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZKND-NEXT: ret i64 [[TMP2]] // -long aes64dsm(long rs1, long rs2) { +unsigned long aes64dsm(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_aes64dsm_64(rs1, rs2); } @@ -30,7 +30,7 @@ // RV64ZKND-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.aes64ds(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZKND-NEXT: ret i64 [[TMP2]] // -long aes64ds(long rs1, long rs2) { +unsigned long aes64ds(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_aes64ds_64(rs1, rs2); } @@ -43,6 +43,6 @@ // RV64ZKND-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.aes64im(i64 [[TMP0]]) // RV64ZKND-NEXT: ret i64 [[TMP1]] // -long aes64im(long rs1) { +unsigned long aes64im(unsigned long rs1) { return __builtin_riscv_aes64im_64(rs1); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zkne.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zkne.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zkne.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zkne.c @@ -14,7 +14,7 @@ // RV64ZKNE-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.aes64es(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZKNE-NEXT: ret i64 [[TMP2]] // -long aes64es(long rs1, long rs2) { +unsigned long aes64es(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_aes64es_64(rs1, rs2); } @@ -30,6 +30,6 @@ // RV64ZKNE-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.aes64esm(i64 [[TMP0]], i64 [[TMP1]]) // RV64ZKNE-NEXT: ret i64 [[TMP2]] // -long aes64esm(long rs1, long rs2) { +unsigned long aes64esm(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_aes64esm_64(rs1, rs2); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknh.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknh.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknh.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zknh.c @@ -11,7 +11,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha512sig0(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha512sig0(long rs1) { +unsigned long sha512sig0(unsigned long rs1) { return __builtin_riscv_sha512sig0_64(rs1); } @@ -24,7 +24,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha512sig1(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha512sig1(long rs1) { +unsigned long sha512sig1(unsigned long rs1) { return __builtin_riscv_sha512sig1_64(rs1); } @@ -37,7 +37,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha512sum0(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha512sum0(long rs1) { +unsigned long sha512sum0(unsigned long rs1) { return __builtin_riscv_sha512sum0_64(rs1); } @@ -50,7 +50,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha512sum1(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha512sum1(long rs1) { +unsigned long sha512sum1(unsigned long rs1) { return __builtin_riscv_sha512sum1_64(rs1); } @@ -63,7 +63,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha256sig0.i64(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha256sig0(long rs1) { +unsigned long sha256sig0(unsigned long rs1) { return __builtin_riscv_sha256sig0(rs1); } @@ -75,7 +75,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha256sig1.i64(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha256sig1(long rs1) { +unsigned long sha256sig1(unsigned long rs1) { return __builtin_riscv_sha256sig1(rs1); } @@ -88,7 +88,7 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha256sum0.i64(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha256sum0(long rs1) { +unsigned long sha256sum0(unsigned long rs1) { return __builtin_riscv_sha256sum0(rs1); } @@ -100,6 +100,6 @@ // RV64ZKNH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sha256sum1.i64(i64 [[TMP0]]) // RV64ZKNH-NEXT: ret i64 [[TMP1]] // -long sha256sum1(long rs1) { +unsigned long sha256sum1(unsigned long rs1) { return __builtin_riscv_sha256sum1(rs1); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksed.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksed.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksed.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksed.c @@ -13,7 +13,7 @@ // RV64ZKSED-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.sm4ks.i64(i64 [[TMP0]], i64 [[TMP1]], i32 0) // RV64ZKSED-NEXT: ret i64 [[TMP2]] // -long sm4ks(long rs1, long rs2) { +unsigned long sm4ks(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_sm4ks(rs1, rs2, 0); } @@ -28,6 +28,6 @@ // RV64ZKSED-NEXT: [[TMP2:%.*]] = call i64 @llvm.riscv.sm4ed.i64(i64 [[TMP0]], i64 [[TMP1]], i32 0) // RV64ZKSED-NEXT: ret i64 [[TMP2]] // -long sm4ed(long rs1, long rs2) { +unsigned long sm4ed(unsigned long rs1, unsigned long rs2) { return __builtin_riscv_sm4ed(rs1, rs2, 0); } diff --git a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksh.c b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksh.c --- a/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksh.c +++ b/clang/test/CodeGen/RISCV/rvk-intrinsics/riscv64-zksh.c @@ -10,7 +10,7 @@ // RV64ZKSH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sm3p0.i64(i64 [[TMP0]]) // RV64ZKSH-NEXT: ret i64 [[TMP1]] // -long sm3p0(long rs1) { +unsigned long sm3p0(unsigned long rs1) { return __builtin_riscv_sm3p0(rs1); } @@ -23,6 +23,6 @@ // RV64ZKSH-NEXT: [[TMP1:%.*]] = call i64 @llvm.riscv.sm3p1.i64(i64 [[TMP0]]) // RV64ZKSH-NEXT: ret i64 [[TMP1]] // -long sm3p1(long rs1) { +unsigned long sm3p1(unsigned long rs1) { return __builtin_riscv_sm3p1(rs1); }