Index: llvm/trunk/lib/Analysis/Loads.cpp =================================================================== --- llvm/trunk/lib/Analysis/Loads.cpp +++ llvm/trunk/lib/Analysis/Loads.cpp @@ -107,11 +107,14 @@ return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Align, Size, DL, CtxI, DT, Visited); - if (auto CS = ImmutableCallSite(V)) + if (auto CS = ImmutableCallSite(V)) { if (const Value *RV = CS.getReturnedArgOperand()) return isDereferenceableAndAlignedPointer(RV, Align, Size, DL, CtxI, DT, Visited); - + if (CS.getIntrinsicID() == Intrinsic::launder_invariant_group) + return isDereferenceableAndAlignedPointer(CS->getOperand(0), Align, Size, + DL, CtxI, DT, Visited); + } // If we don't know, assume the worst. return false; } Index: llvm/trunk/lib/Analysis/ValueTracking.cpp =================================================================== --- llvm/trunk/lib/Analysis/ValueTracking.cpp +++ llvm/trunk/lib/Analysis/ValueTracking.cpp @@ -1953,9 +1953,12 @@ if (LI->getMetadata(LLVMContext::MD_nonnull)) return true; - if (auto CS = ImmutableCallSite(V)) + if (auto CS = ImmutableCallSite(V)) { if (CS.isReturnNonNull()) return true; + if (CS.getIntrinsicID() == Intrinsic::ID::launder_invariant_group) + return isKnownNonZero(CS->getOperand(0), Depth + 1, Q); + } } // The remaining tests are all recursive, so bail out if we hit the limit. Index: llvm/trunk/test/Analysis/ValueTracking/deref-bitcast-of-gep.ll =================================================================== --- llvm/trunk/test/Analysis/ValueTracking/deref-bitcast-of-gep.ll +++ llvm/trunk/test/Analysis/ValueTracking/deref-bitcast-of-gep.ll @@ -80,3 +80,28 @@ leave: ret void } + +define void @checkLaunder(i8* align 4 dereferenceable(1024) %p) { +; CHECK-LABEL: @checkLaunder( +; CHECK: entry: +; CHECK: %l = call i8* @llvm.launder.invariant.group.p0i8(i8* %p) +; CHECK: %val = load i8, i8* %l +; CHECK: br label %loop +; CHECK: loop: +; CHECK: call void @use(i32 0) +; CHECK-NEXT: call void @use8(i8 %val) + +entry: + %l = call i8* @llvm.launder.invariant.group.p0i8(i8* %p) + br label %loop + +loop: + call void @use(i32 0) + %val = load i8, i8* %l, !invariant.load !{} + call void @use8(i8 %val) + br label %loop +} + +declare i8* @llvm.launder.invariant.group.p0i8(i8*) + +declare void @use8(i8) Index: llvm/trunk/test/Analysis/ValueTracking/invariant.group.ll =================================================================== --- llvm/trunk/test/Analysis/ValueTracking/invariant.group.ll +++ llvm/trunk/test/Analysis/ValueTracking/invariant.group.ll @@ -0,0 +1,19 @@ +; RUN: opt -S -instsimplify -instcombine < %s | FileCheck %s + +; CHECK-LABEL: define void @checkNonnull() +define void @checkNonnull() { +; CHECK: %p = call i8* @llvm.launder.invariant.group.p0i8(i8* nonnull %0) +; CHECK: %p2 = call i8* @llvm.launder.invariant.group.p0i8(i8* nonnull %p) +; CHECK: call void @use(i8* nonnull %p2) +entry: + %0 = alloca i8, align 8 + + %p = call i8* @llvm.launder.invariant.group.p0i8(i8* %0) + %p2 = call i8* @llvm.launder.invariant.group.p0i8(i8* %p) + call void @use(i8* %p2) + + ret void +} + +declare i8* @llvm.launder.invariant.group.p0i8(i8*) +declare void @use(i8*)