Index: docs/LangRef.rst =================================================================== --- docs/LangRef.rst +++ docs/LangRef.rst @@ -4904,7 +4904,8 @@ the optimizer that every ``load`` and ``store`` to the same pointer operand within the same invariant group can be assumed to load or store the same value (but see the ``llvm.invariant.group.barrier`` intrinsic which affects -when two pointers are considered the same). +when two pointers are considered the same). Pointers returned by bitcast or +getelementptr with only zero indices are considered the same. Examples: Index: lib/Analysis/MemoryDependenceAnalysis.cpp =================================================================== --- lib/Analysis/MemoryDependenceAnalysis.cpp +++ lib/Analysis/MemoryDependenceAnalysis.cpp @@ -339,43 +339,63 @@ MemDepResult MemoryDependenceResults::getInvariantGroupPointerDependency(LoadInst *LI, BasicBlock *BB) { + + auto *InvariantGroupMD = LI->getMetadata(LLVMContext::MD_invariant_group); + if (!InvariantGroupMD) + return MemDepResult::getUnknown(); + Value *LoadOperand = LI->getPointerOperand(); // It's is not safe to walk the use list of global value, because function // passes aren't allowed to look outside their functions. if (isa(LoadOperand)) return MemDepResult::getUnknown(); - auto *InvariantGroupMD = LI->getMetadata(LLVMContext::MD_invariant_group); - if (!InvariantGroupMD) - return MemDepResult::getUnknown(); - - SmallSet Seen; // Queue to process all pointers that are equivalent to load operand. - SmallVector LoadOperandsQueue; - LoadOperandsQueue.push_back(LoadOperand); - Seen.insert(LoadOperand); + SmallVector LoadOperandsQueue; + SmallSet SeenValues; + auto TryInsertToQueue = [&](Value *V) { + if (SeenValues.insert(V).second) + LoadOperandsQueue.push_back(V); + }; + + TryInsertToQueue(LoadOperand); while (!LoadOperandsQueue.empty()) { - Value *Ptr = LoadOperandsQueue.pop_back_val(); + const Value *Ptr = LoadOperandsQueue.pop_back_val(); + assert(Ptr); if (isa(Ptr)) continue; - if (auto *BCI = dyn_cast(Ptr)) { - if (Seen.insert(BCI->getOperand(0)).second) { - LoadOperandsQueue.push_back(BCI->getOperand(0)); - } - } - - for (Use &Us : Ptr->uses()) { + // Value comes from bitcast: Ptr = bitcast x. Insert x. + if (auto *BCI = dyn_cast(Ptr)) + TryInsertToQueue(BCI->getOperand(0)); + // Gep with zeros is equivalent to bitcast. + // FIXME: we are not sure if some bitcast should be canonicalized to gep 0 + // or gep 0 to bitcast because of SROA, so there are 2 forms. When typeless + // pointers will be upstream then both cases will be gone (and this BFS + // also won't be needed). + if (auto *GEP = dyn_cast(Ptr)) + if (GEP->hasAllZeroIndices()) + TryInsertToQueue(GEP->getOperand(0)); + + for (const Use &Us : Ptr->uses()) { auto *U = dyn_cast(Us.getUser()); if (!U || U == LI || !DT.dominates(U, LI)) continue; + // Bitcast or gep with zeros are using Ptr. Add to queue to check it's + // users. U = bitcast Ptr if (auto *BCI = dyn_cast(U)) { - if (Seen.insert(BCI).second) { - LoadOperandsQueue.push_back(BCI); - } + TryInsertToQueue(U); continue; } + // U = getelementptr Ptr, 0, 0... + if (auto *GEP = dyn_cast(U)) { + if (GEP->hasAllZeroIndices()) { + TryInsertToQueue(U); + continue; + } + } + // If we hit load/store with the same invariant.group metadata (and the // same pointer operand) we can assume that value pointed by pointer // operand didn't change. Index: test/Transforms/GVN/invariant.group.ll =================================================================== --- test/Transforms/GVN/invariant.group.ll +++ test/Transforms/GVN/invariant.group.ll @@ -319,6 +319,28 @@ ret i8 %d } +; CHECK-LABEL: define void @_Z5testGv() { +define void @_Z5testGv() { + %a = alloca %struct.A, align 8 + %1 = bitcast %struct.A* %a to i8* + %2 = getelementptr inbounds %struct.A, %struct.A* %a, i64 0, i32 0 + store i32 (...)** bitcast (i8** getelementptr inbounds ([3 x i8*], [3 x i8*]* @_ZTV1A, i64 0, i64 2) to i32 (...)**), i32 (...)*** %2, align 8, !invariant.group !0 + call void @_ZN1A3fooEv(%struct.A* nonnull dereferenceable(8) %a) + %3 = load i8, i8* @unknownPtr, align 4 + %4 = icmp eq i8 %3, 0 + br i1 %4, label %_Z1gR1A.exit, label %5 + + %6 = bitcast %struct.A* %a to void (%struct.A*)*** + %7 = load void (%struct.A*)**, void (%struct.A*)*** %6, align 8, !invariant.group !0 + %8 = load void (%struct.A*)*, void (%struct.A*)** %7, align 8 +; CHECK: call void @_ZN1A3fooEv(%struct.A* nonnull %a) + call void %8(%struct.A* nonnull %a) + br label %_Z1gR1A.exit + +_Z1gR1A.exit: ; preds = %0, %5 + ret void +} + declare void @foo(i8*) declare void @bar(i8) declare i8* @getPointer(i8*) Index: test/Transforms/NewGVN/invariant.group.ll =================================================================== --- test/Transforms/NewGVN/invariant.group.ll +++ test/Transforms/NewGVN/invariant.group.ll @@ -320,6 +320,28 @@ ret i8 %d } +; CHECK-LABEL: define void @_Z5testGv() { +define void @_Z5testGv() { + %a = alloca %struct.A, align 8 + %1 = bitcast %struct.A* %a to i8* + %2 = getelementptr inbounds %struct.A, %struct.A* %a, i64 0, i32 0 + store i32 (...)** bitcast (i8** getelementptr inbounds ([3 x i8*], [3 x i8*]* @_ZTV1A, i64 0, i64 2) to i32 (...)**), i32 (...)*** %2, align 8, !invariant.group !0 + call void @_ZN1A3fooEv(%struct.A* nonnull dereferenceable(8) %a) + %3 = load i8, i8* @unknownPtr, align 4 + %4 = icmp eq i8 %3, 0 + br i1 %4, label %_Z1gR1A.exit, label %5 + + %6 = bitcast %struct.A* %a to void (%struct.A*)*** + %7 = load void (%struct.A*)**, void (%struct.A*)*** %6, align 8, !invariant.group !0 + %8 = load void (%struct.A*)*, void (%struct.A*)** %7, align 8 +; CHECK: call void @_ZN1A3fooEv(%struct.A* nonnull %a) + call void %8(%struct.A* nonnull %a) + br label %_Z1gR1A.exit + +_Z1gR1A.exit: ; preds = %0, %5 + ret void +} + declare void @foo(i8*) declare void @bar(i8) declare i8* @getPointer(i8*)