diff --git a/mlir/lib/Dialect/SparseTensor/Transforms/CodegenEnv.h b/mlir/lib/Dialect/SparseTensor/Transforms/CodegenEnv.h --- a/mlir/lib/Dialect/SparseTensor/Transforms/CodegenEnv.h +++ b/mlir/lib/Dialect/SparseTensor/Transforms/CodegenEnv.h @@ -89,7 +89,7 @@ // LoopEmitter delegates. // - constexpr TensorLevel makeTensorLevel(TensorId t, Level l) const { + TensorLevel makeTensorLevel(TensorId t, Level l) const { // Make sure LoopEmitter, GenericOp, and Merger agree on the number of // tensors. Merger has one more synthetic tensor for loop invariants. assert(loopEmitter.getNumTensors() == linalgOp->getNumOperands() && diff --git a/mlir/lib/Dialect/SparseTensor/Transforms/LoopEmitter.h b/mlir/lib/Dialect/SparseTensor/Transforms/LoopEmitter.h --- a/mlir/lib/Dialect/SparseTensor/Transforms/LoopEmitter.h +++ b/mlir/lib/Dialect/SparseTensor/Transforms/LoopEmitter.h @@ -195,7 +195,7 @@ unsigned getNumTensors() const { return tensors.size(); } /// Compresses a TensorId and Level into a TensorLevel. - constexpr TensorLevel makeTensorLevel(TensorId t, Level l) const { + TensorLevel makeTensorLevel(TensorId t, Level l) const { return l * getNumTensors() + t; }