From 175b9af484f483c3423ab2f78db5de7e25b64c31 Mon Sep 17 00:00:00 2001 From: Aart Bik Date: Tue, 5 Apr 2022 16:56:07 -0700 Subject: [PATCH] [mlir][sparse] avoid reserving dense storage for ptr/idx This avoids a rather big bug where we were reserving dense space for the ptx/idx in the first sparse dimension. For example, using CSR for a 140874 x 140874 matrix with 3977139 nonzero would reserve the full 19845483876 space. This revision fixes this for now, but we need to revisit the reservation heuristic to make this better. Reviewed By: bixia Differential Revision: https://reviews.llvm.org/D123166 --- mlir/lib/ExecutionEngine/SparseTensorUtils.cpp | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/mlir/lib/ExecutionEngine/SparseTensorUtils.cpp b/mlir/lib/ExecutionEngine/SparseTensorUtils.cpp index 03c5465..fbc8584 100644 --- a/mlir/lib/ExecutionEngine/SparseTensorUtils.cpp +++ b/mlir/lib/ExecutionEngine/SparseTensorUtils.cpp @@ -262,12 +262,14 @@ public: for (uint64_t r = 0; r < rank; r++) rev[perm[r]] = r; // Provide hints on capacity of pointers and indices. - // TODO: needs fine-tuning based on sparsity + // TODO: needs much fine-tuning based on actual sparsity; currently + // we reserve pointer/index space based on all previous dense + // dimensions, which works well up to first sparse dim; but + // we should really use nnz and dense/sparse distribution. bool allDense = true; uint64_t sz = 1; for (uint64_t r = 0; r < rank; r++) { assert(sizes[r] > 0 && "Dimension size zero has trivial storage"); - sz = checkedMul(sz, sizes[r]); if (sparsity[r] == DimLevelType::kCompressed) { pointers[r].reserve(sz + 1); indices[r].reserve(sz); @@ -280,6 +282,7 @@ public: } else { assert(sparsity[r] == DimLevelType::kDense && "singleton not yet supported"); + sz = checkedMul(sz, sizes[r]); } } // Then assign contents from coordinate scheme tensor if provided. -- 2.7.4