diff --git a/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp b/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp --- a/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp +++ b/llvm/lib/CodeGen/GlobalISel/LegalizerHelper.cpp @@ -580,15 +580,18 @@ break; case TargetOpcode::G_MEMCPY: RTLibcall = RTLIB::MEMCPY; + Args[0].Flags[0].setReturned(); break; case TargetOpcode::G_MEMMOVE: RTLibcall = RTLIB::MEMMOVE; + Args[0].Flags[0].setReturned(); break; case TargetOpcode::G_MEMSET: RTLibcall = RTLIB::MEMSET; + Args[0].Flags[0].setReturned(); break; default: - return LegalizerHelper::UnableToLegalize; + llvm_unreachable("unsupported opcode"); } const char *Name = TLI.getLibcallName(RTLibcall); @@ -610,7 +613,6 @@ if (!CLI.lowerCall(MIRBuilder, Info)) return LegalizerHelper::UnableToLegalize; - if (Info.LoweredTailCall) { assert(Info.IsTailCall && "Lowered tail call when it wasn't a tail call?"); diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memcpy-et-al.mir b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memcpy-et-al.mir --- a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memcpy-et-al.mir +++ b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memcpy-et-al.mir @@ -18,7 +18,7 @@ ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) - ; CHECK: BL &memcpy, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 + ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 @@ -71,7 +71,7 @@ ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) - ; CHECK: BL &memmove, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 + ; CHECK: BL &memmove, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 @@ -100,7 +100,7 @@ ; CHECK: [[COPY3:%[0-9]+]]:_(s32) = COPY [[COPY1]](s32) ; CHECK: $w1 = COPY [[COPY3]](s32) ; CHECK: $x2 = COPY [[ZEXT]](s64) - ; CHECK: BL &memset, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $w1, implicit $x2 + ; CHECK: BL &memset, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $w1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: RET_ReallyLR %0:_(p0) = COPY $x0 @@ -129,7 +129,7 @@ ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) - ; CHECK: BL &memcpy, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 + ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: $x0 = COPY [[ZEXT]](s64) ; CHECK: RET_ReallyLR implicit $x0 @@ -158,7 +158,7 @@ ; CHECK: $x0 = COPY [[COPY]](p0) ; CHECK: $x1 = COPY [[COPY1]](p0) ; CHECK: $x2 = COPY [[ZEXT]](s64) - ; CHECK: BL &memcpy, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 + ; CHECK: BL &memcpy, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2 ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp ; CHECK: TCRETURNdi &memset, 0, csr_aarch64_aapcs, implicit $sp %0:_(p0) = COPY $x0 diff --git a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memlib-debug-loc.mir b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memlib-debug-loc.mir --- a/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memlib-debug-loc.mir +++ b/llvm/test/CodeGen/AArch64/GlobalISel/legalize-memlib-debug-loc.mir @@ -47,7 +47,7 @@ ; We're checking that the BL call has the debug loc of the original intrinsic call. ; CHECK-LABEL: name: test_memset_debug - ; CHECK: BL &memset, csr_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $w1, implicit $x2, debug-location !11 + ; CHECK: BL &memset, csr_aarch64_aapcs_thisreturn, implicit-def $lr, implicit $sp, implicit $x0, implicit $w1, implicit $x2, debug-location !11 ; CHECK: RET_ReallyLR debug-location !12 %0:_(p0) = COPY $x0 %1:_(s32) = COPY $w1