Index: lib/CodeGen/CGBuiltin.cpp =================================================================== --- lib/CodeGen/CGBuiltin.cpp +++ lib/CodeGen/CGBuiltin.cpp @@ -5221,14 +5221,14 @@ "rbit of unusual size!"); llvm::Value *Arg = EmitScalarExpr(E->getArg(0)); return Builder.CreateCall( - CGM.getIntrinsic(Intrinsic::aarch64_rbit, Arg->getType()), Arg, "rbit"); + CGM.getIntrinsic(Intrinsic::bitreverse, Arg->getType()), Arg, "rbit"); } if (BuiltinID == AArch64::BI__builtin_arm_rbit64) { assert((getContext().getTypeSize(E->getType()) == 64) && "rbit of unusual size!"); llvm::Value *Arg = EmitScalarExpr(E->getArg(0)); return Builder.CreateCall( - CGM.getIntrinsic(Intrinsic::aarch64_rbit, Arg->getType()), Arg, "rbit"); + CGM.getIntrinsic(Intrinsic::bitreverse, Arg->getType()), Arg, "rbit"); } if (BuiltinID == AArch64::BI__clear_cache) { Index: test/CodeGen/arm_acle.c =================================================================== --- test/CodeGen/arm_acle.c +++ test/CodeGen/arm_acle.c @@ -245,14 +245,14 @@ // ARM-LABEL: test_rbit // AArch32: call i32 @llvm.arm.rbit -// AArch64: call i32 @llvm.aarch64.rbit.i32 +// AArch64: call i32 @llvm.bitreverse.i32 uint32_t test_rbit(uint32_t t) { return __rbit(t); } // ARM-LABEL: test_rbitl // AArch32: call i32 @llvm.arm.rbit -// AArch64: call i64 @llvm.aarch64.rbit.i64 +// AArch64: call i64 @llvm.bitreverse.i64 long test_rbitl(long t) { return __rbitl(t); } @@ -260,7 +260,7 @@ // ARM-LABEL: test_rbitll // AArch32: call i32 @llvm.arm.rbit // AArch32: call i32 @llvm.arm.rbit -// AArch64: call i64 @llvm.aarch64.rbit.i64 +// AArch64: call i64 @llvm.bitreverse.i64 uint64_t test_rbitll(uint64_t t) { return __rbitll(t); } Index: test/CodeGen/builtins-arm64.c =================================================================== --- test/CodeGen/builtins-arm64.c +++ test/CodeGen/builtins-arm64.c @@ -10,12 +10,12 @@ // CHECK: call {{.*}} @llvm.thread.pointer() } -// CHECK: call {{.*}} @llvm.aarch64.rbit.i32(i32 %a) +// CHECK: call {{.*}} @llvm.bitreverse.i32(i32 %a) unsigned rbit(unsigned a) { return __builtin_arm_rbit(a); } -// CHECK: call {{.*}} @llvm.aarch64.rbit.i64(i64 %a) +// CHECK: call {{.*}} @llvm.bitreverse.i64(i64 %a) unsigned long long rbit64(unsigned long long a) { return __builtin_arm_rbit64(a); }