diff --git a/mlir/lib/Transforms/LoopFusion.cpp b/mlir/lib/Transforms/LoopFusion.cpp --- a/mlir/lib/Transforms/LoopFusion.cpp +++ b/mlir/lib/Transforms/LoopFusion.cpp @@ -768,11 +768,17 @@ // could be used by loop nest nodes. Node node(nextNodeId++, &op); nodes.insert({node.id, node}); - } else if (op.getNumResults() == 0) { - // Create graph node for top-level non-result op, which could be memory - // side-effect and modify memrefs used by loop nest nodes. - Node node(nextNodeId++, &op); - nodes.insert({node.id, node}); + } else if (auto effectInterface = dyn_cast(op)) { + // Create graph node for top-level op, which could have a memory write + // side effect. + SmallVector effects; + effectInterface.getEffects(effects); + if (llvm::any_of(effects, [](const MemoryEffects::EffectInstance &it) { + return isa(it.getEffect()); + })) { + Node node(nextNodeId++, &op); + nodes.insert({node.id, node}); + } } }