Index: llvm/lib/Analysis/BasicAliasAnalysis.cpp =================================================================== --- llvm/lib/Analysis/BasicAliasAnalysis.cpp +++ llvm/lib/Analysis/BasicAliasAnalysis.cpp @@ -132,7 +132,17 @@ /// Returns true if the pointer is one which would have been considered an /// escape by isNonEscapingLocalObject. static bool isEscapeSource(const Value *V) { - if (isa(V) || isa(V) || isa(V)) + if (auto CS = ImmutableCallSite(V)) { + // launder_invariant_group captures its argument only by returning it, + // so it might not be considered an escape by isNonEscapingLocalObject + if (CS.getIntrinsicID() == Intrinsic::launder_invariant_group) { + return false; + } + + return true; + } + + if (isa(V)) return true; // The load case works because isNonEscapingLocalObject considers all Index: llvm/test/Analysis/BasicAA/invariant_group.ll =================================================================== --- /dev/null +++ llvm/test/Analysis/BasicAA/invariant_group.ll @@ -0,0 +1,30 @@ +; RUN: opt < %s -basicaa -gvn -S | FileCheck %s + +; The input *.ll had been adapted from bug 37458: +; +; struct A { virtual void f(); int n; }; +; +; int h() { +; A a; +; a.n = 42; +; return __builtin_launder(&a)->n; +; } + +%struct.A = type <{ i8*, i8 }> + +; CHECK: testLaunderInvariantGroupIsNotEscapeSource +define i8 @testLaunderInvariantGroupIsNotEscapeSource() { +entry: + %a = alloca %struct.A, align 8 + %a.bitcast = bitcast %struct.A* %a to i8* + %n = getelementptr inbounds %struct.A, %struct.A* %a, i64 0, i32 1 + store i8 42, i8* %n + %a.laundered = call i8* @llvm.launder.invariant.group.p0i8(i8* nonnull %a.bitcast) + %n.laundered = getelementptr inbounds i8, i8* %a.laundered, i64 8 + %v = load i8, i8* %n.laundered +; make sure that the load from %n.laundered to %v aliases the store of 42 to %n +; CHECK: ret i8 42 + ret i8 %v +} + +declare i8* @llvm.launder.invariant.group.p0i8(i8*)