diff --git a/mlir/lib/Dialect/Shape/IR/Shape.cpp b/mlir/lib/Dialect/Shape/IR/Shape.cpp --- a/mlir/lib/Dialect/Shape/IR/Shape.cpp +++ b/mlir/lib/Dialect/Shape/IR/Shape.cpp @@ -399,46 +399,6 @@ return failure(); } } - -// For shapes that were created by some operations, we can obtain partial -// information on the shapes and sometimes determine if they will be -// broadcastable with that. -struct CstrBroadcastablePartialInfo - : public OpRewritePattern { - using OpRewritePattern::OpRewritePattern; - - LogicalResult matchAndRewrite(CstrBroadcastableOp op, - PatternRewriter &rewriter) const override { - SmallVector lhsShape, rhsShape; - if (failed(getShapeVec(op.lhs(), lhsShape))) - return failure(); - if (failed(getShapeVec(op.rhs(), rhsShape))) - return failure(); - if (!OpTrait::util::staticallyKnownBroadcastable(lhsShape, rhsShape)) - return failure(); - - rewriter.replaceOpWithNewOp(op.getOperation(), true); - return success(); - } -}; - -// Scalars are always broadcastable. -struct CstrBroadcastableScalar : public OpRewritePattern { - using OpRewritePattern::OpRewritePattern; - - LogicalResult matchAndRewrite(CstrBroadcastableOp op, - PatternRewriter &rewriter) const override { - SmallVector shape; - if (failed(getShapeVec(op.lhs(), shape)) || shape.size() > 0) - return failure(); - if (failed(getShapeVec(op.rhs(), shape)) || shape.size() > 0) - return failure(); - - rewriter.replaceOpWithNewOp(op.getOperation(), true); - return success(); - } -}; - } // namespace void CstrBroadcastableOp::getCanonicalizationPatterns( @@ -446,8 +406,7 @@ // Canonicalization patterns have overlap with the considerations during // folding in case additional shape information is inferred at some point that // does not result in folding. - patterns.insert(context); + patterns.insert(context); } OpFoldResult CstrBroadcastableOp::fold(ArrayRef operands) {