Index: llvm/trunk/lib/CodeGen/Analysis.cpp =================================================================== --- llvm/trunk/lib/CodeGen/Analysis.cpp +++ llvm/trunk/lib/CodeGen/Analysis.cpp @@ -565,6 +565,17 @@ return false; const Value *RetVal = Ret->getOperand(0), *CallVal = I; + // Intrinsic like llvm.memcpy has no return value, but will return the + // first argument if it is expanded as libcall. + const CallInst *Call = cast(I); + if (Function *F = Call->getCalledFunction()) { + Intrinsic::ID IID = F->getIntrinsicID(); + if ((IID == Intrinsic::memcpy || IID == Intrinsic::memmove || + IID == Intrinsic::memset) && + RetVal == Call->getArgOperand(0)) + return true; + } + SmallVector RetPath, CallPath; SmallVector RetSubTypes, CallSubTypes; Index: llvm/trunk/test/CodeGen/X86/tailcall-mem-intrinsics.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/tailcall-mem-intrinsics.ll +++ llvm/trunk/test/CodeGen/X86/tailcall-mem-intrinsics.ll @@ -24,6 +24,30 @@ ret void } +; CHECK-LABEL: tail_memcpy_ret +; CHECK: jmp memcpy +define i8* @tail_memcpy_ret(i8* nocapture %p, i8* nocapture readonly %q, i32 %n) #0 { +entry: + tail call void @llvm.memcpy.p0i8.p0i8.i32(i8* %p, i8* %q, i32 %n, i32 1, i1 false) + ret i8* %p +} + +; CHECK-LABEL: tail_memmove_ret +; CHECK: jmp memmove +define i8* @tail_memmove_ret(i8* nocapture %p, i8* nocapture readonly %q, i32 %n) #0 { +entry: + tail call void @llvm.memmove.p0i8.p0i8.i32(i8* %p, i8* %q, i32 %n, i32 1, i1 false) + ret i8* %p +} + +; CHECK-LABEL: tail_memset_ret +; CHECK: jmp memset +define i8* @tail_memset_ret(i8* nocapture %p, i8 %c, i32 %n) #0 { +entry: + tail call void @llvm.memset.p0i8.i32(i8* %p, i8 %c, i32 %n, i32 1, i1 false) + ret i8* %p +} + declare void @llvm.memcpy.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i32, i32, i1) #0 declare void @llvm.memmove.p0i8.p0i8.i32(i8* nocapture, i8* nocapture readonly, i32, i32, i1) #0 declare void @llvm.memset.p0i8.i32(i8* nocapture, i8, i32, i32, i1) #0