diff --git a/llvm/lib/CodeGen/SelectionDAG/DAGCombiner.cpp b/llvm/lib/CodeGen/SelectionDAG/DAGCombiner.cpp --- a/llvm/lib/CodeGen/SelectionDAG/DAGCombiner.cpp +++ b/llvm/lib/CodeGen/SelectionDAG/DAGCombiner.cpp @@ -615,7 +615,7 @@ SmallVectorImpl &Aliases); /// Return true if there is any possibility that the two addresses overlap. - bool isAlias(SDNode *Op0, SDNode *Op1) const; + bool mayAlias(SDNode *Op0, SDNode *Op1) const; /// Walk up chain skipping non-aliasing memory nodes, looking for a better /// chain (aliasing node.) @@ -18279,7 +18279,7 @@ case ISD::LIFETIME_END: // We can forward past any lifetime start/end that can be proven not to // alias the node. - if (!isAlias(Chain.getNode(), N)) + if (!mayAlias(Chain.getNode(), N)) Chains.push_back(Chain.getOperand(0)); break; case ISD::STORE: { @@ -23194,7 +23194,7 @@ } /// Return true if there is any possibility that the two addresses overlap. -bool DAGCombiner::isAlias(SDNode *Op0, SDNode *Op1) const { +bool DAGCombiner::mayAlias(SDNode *Op0, SDNode *Op1) const { struct MemUseCharacteristics { bool IsVolatile; @@ -23354,7 +23354,7 @@ // TODO: Relax aliasing for unordered atomics (see D66309) bool IsOpLoad = isa(C.getNode()) && cast(C.getNode())->isSimple(); - if ((IsLoad && IsOpLoad) || !isAlias(N, C.getNode())) { + if ((IsLoad && IsOpLoad) || !mayAlias(N, C.getNode())) { // Look further up the chain. C = C.getOperand(0); return true; @@ -23372,7 +23372,7 @@ case ISD::LIFETIME_END: { // We can forward past any lifetime start/end that can be proven not to // alias the memory access. - if (!isAlias(N, C.getNode())) { + if (!mayAlias(N, C.getNode())) { // Look further up the chain. C = C.getOperand(0); return true;