Index: llvm/lib/Analysis/ConstantFolding.cpp =================================================================== --- llvm/lib/Analysis/ConstantFolding.cpp +++ llvm/lib/Analysis/ConstantFolding.cpp @@ -1392,6 +1392,7 @@ case Intrinsic::fma: case Intrinsic::fmuladd: case Intrinsic::copysign: + case Intrinsic::launder_invariant_group: case Intrinsic::round: case Intrinsic::masked_load: case Intrinsic::sadd_with_overflow: @@ -1594,9 +1595,19 @@ if (IntrinsicID == Intrinsic::cos) return Constant::getNullValue(Ty); if (IntrinsicID == Intrinsic::bswap || - IntrinsicID == Intrinsic::bitreverse) + IntrinsicID == Intrinsic::bitreverse || + IntrinsicID == Intrinsic::launder_invariant_group) return Operands[0]; } + + if (isa(Operands[0]) && + Operands[0]->getType()->getPointerAddressSpace() == 0) { + // launder(null) == null iff in addrspace 0 + if (IntrinsicID == Intrinsic::launder_invariant_group) + return Operands[0]; + return nullptr; + } + if (auto *Op = dyn_cast(Operands[0])) { if (IntrinsicID == Intrinsic::convert_to_fp16) { APFloat Val(Op->getValueAPF()); Index: llvm/lib/Transforms/Utils/Local.cpp =================================================================== --- llvm/lib/Transforms/Utils/Local.cpp +++ llvm/lib/Transforms/Utils/Local.cpp @@ -379,8 +379,9 @@ // Special case intrinsics that "may have side effects" but can be deleted // when dead. if (IntrinsicInst *II = dyn_cast(I)) { - // Safe to delete llvm.stacksave if dead. - if (II->getIntrinsicID() == Intrinsic::stacksave) + // Safe to delete llvm.stacksave and launder.invariant.group if dead. + if (II->getIntrinsicID() == Intrinsic::stacksave || + II->getIntrinsicID() == Intrinsic::launder_invariant_group) return true; // Lifetime intrinsics are dead when their right-hand is undef. Index: llvm/test/Transforms/InstCombine/invariant.group.ll =================================================================== --- /dev/null +++ llvm/test/Transforms/InstCombine/invariant.group.ll @@ -0,0 +1,34 @@ +; RUN: opt -instcombine -S < %s | FileCheck %s + +; CHECK-LABEL: define i8* @simplifyNullLaunder() +define i8* @simplifyNullLaunder() { +; CHECK-NEXT: ret i8* null + %b2 = call i8* @llvm.launder.invariant.group.p0i8(i8* null) + ret i8* %b2 +} + +; CHECK-LABEL: define i8 addrspace(42)* @dontsimplifyNullLaunderForDifferentAddrspace() +define i8 addrspace(42)* @dontsimplifyNullLaunderForDifferentAddrspace() { +; CHECK: %b2 = call i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)* null) +; CHECK: ret i8 addrspace(42)* %b2 + %b2 = call i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)* null) + ret i8 addrspace(42)* %b2 +} + +; CHECK-LABEL: define i8* @simplifyUndefLaunder() +define i8* @simplifyUndefLaunder() { +; CHECK-NEXT: ret i8* undef + %b2 = call i8* @llvm.launder.invariant.group.p0i8(i8* undef) + ret i8* %b2 +} + +; CHECK-LABEL: define i8 addrspace(42)* @simplifyUndefLaunder2() +define i8 addrspace(42)* @simplifyUndefLaunder2() { +; CHECK-NEXT: ret i8 addrspace(42)* undef + %b2 = call i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)* undef) + ret i8 addrspace(42)* %b2 +} + + +declare i8* @llvm.launder.invariant.group.p0i8(i8*) +declare i8 addrspace(42)* @llvm.launder.invariant.group.p42i8(i8 addrspace(42)*)