Make path range loading explicit
authorcdalton <cdalton@nvidia.com>
Mon, 5 Oct 2015 22:56:34 +0000 (15:56 -0700)
committerCommit bot <commit-bot@chromium.org>
Mon, 5 Oct 2015 22:56:34 +0000 (15:56 -0700)
Requires the caller to explicitly preload paths within a range before
calling drawPaths. This allows us to remove the implicit lazy load,
thereby eliminating a redundant check on every redraw of a text blob.

BUG=skia:

Review URL: https://codereview.chromium.org/1382013002

src/gpu/GrPathRange.cpp
src/gpu/GrPathRange.h
src/gpu/GrPathRendering.h
src/gpu/GrStencilAndCoverTextContext.cpp
src/gpu/batches/GrDrawPathBatch.h

index 5e71014..117051d 100644 (file)
@@ -8,9 +8,6 @@
 #include "GrPathRange.h"
 #include "SkPath.h"
 
-enum {
-    kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading.
-};
 
 GrPathRange::GrPathRange(GrGpu* gpu,
                          PathGenerator* pathGenerator)
@@ -28,51 +25,24 @@ GrPathRange::GrPathRange(GrGpu* gpu,
       fNumPaths(numPaths) {
 }
 
-void GrPathRange::willDrawPaths(const void* indices, PathIndexType indexType, int count) const {
-    if (!fPathGenerator) {
-        return;
-    }
-
+void GrPathRange::loadPathsIfNeeded(const void* indices, PathIndexType indexType, int count) const {
     switch (indexType) {
-        case kU8_PathIndexType: return this->willDrawPaths<uint8_t>(indices, count);
-        case kU16_PathIndexType: return this->willDrawPaths<uint16_t>(indices, count);
-        case kU32_PathIndexType: return this->willDrawPaths<uint32_t>(indices, count);
+        case kU8_PathIndexType: return this->loadPathsIfNeeded<uint8_t>(indices, count);
+        case kU16_PathIndexType: return this->loadPathsIfNeeded<uint16_t>(indices, count);
+        case kU32_PathIndexType: return this->loadPathsIfNeeded<uint32_t>(indices, count);
         default: SkFAIL("Unknown path index type");
     }
 }
 
-template<typename IndexType> void GrPathRange::willDrawPaths(const void* indices, int count) const {
-    SkASSERT(fPathGenerator);
-
-    const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices);
-    bool didLoadPaths = false;
-
-    for (int i = 0; i < count; ++i) {
-        SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths));
-
-        const int groupIndex = indexArray[i] / kPathsPerGroup;
-        const int groupByte = groupIndex / 8;
-        const uint8_t groupBit = 1 << (groupIndex % 8);
-
-        const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit);
-        if (!hasPath) {
-            // We track which paths are loaded in groups of kPathsPerGroup. To
-            // mark a path as loaded we need to load the entire group.
-            const int groupFirstPath = groupIndex * kPathsPerGroup;
-            const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1;
+#ifdef SK_DEBUG
 
-            SkPath path;
-            for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) {
-                fPathGenerator->generatePath(pathIdx, &path);
-                this->onInitPath(pathIdx, path);
-            }
-
-            fGeneratedPaths[groupByte] |= groupBit;
-            didLoadPaths = true;
-        }
-    }
-
-    if (didLoadPaths) {
-        this->didChangeGpuMemorySize();
+void GrPathRange::assertPathsLoaded(const void* indices, PathIndexType indexType, int count) const {
+    switch (indexType) {
+        case kU8_PathIndexType: return this->assertPathsLoaded<uint8_t>(indices, count);
+        case kU16_PathIndexType: return this->assertPathsLoaded<uint16_t>(indices, count);
+        case kU32_PathIndexType: return this->assertPathsLoaded<uint32_t>(indices, count);
+        default: SkFAIL("Unknown path index type");
     }
 }
+
+#endif
index a2483c1..23b8beb 100644 (file)
@@ -9,10 +9,10 @@
 #define GrPathRange_DEFINED
 
 #include "GrGpuResource.h"
+#include "SkPath.h"
 #include "SkRefCnt.h"
 #include "SkTArray.h"
 
-class SkPath;
 class SkDescriptor;
 
 /**
@@ -70,7 +70,67 @@ public:
     int getNumPaths() const { return fNumPaths; }
     const PathGenerator* getPathGenerator() const { return fPathGenerator.get(); }
 
+    void loadPathsIfNeeded(const void* indices, PathIndexType, int count) const;
+
+    template<typename IndexType> void loadPathsIfNeeded(const void* indices, int count) const {
+        if (!fPathGenerator) {
+            return;
+        }
+
+        const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices);
+        bool didLoadPaths = false;
+
+        for (int i = 0; i < count; ++i) {
+            SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths));
+
+            const int groupIndex = indexArray[i] / kPathsPerGroup;
+            const int groupByte = groupIndex / 8;
+            const uint8_t groupBit = 1 << (groupIndex % 8);
+
+            const bool hasPath = SkToBool(fGeneratedPaths[groupByte] & groupBit);
+            if (!hasPath) {
+                // We track which paths are loaded in groups of kPathsPerGroup. To
+                // mark a path as loaded we need to load the entire group.
+                const int groupFirstPath = groupIndex * kPathsPerGroup;
+                const int groupLastPath = SkTMin(groupFirstPath + kPathsPerGroup, fNumPaths) - 1;
+
+                SkPath path;
+                for (int pathIdx = groupFirstPath; pathIdx <= groupLastPath; ++pathIdx) {
+                    fPathGenerator->generatePath(pathIdx, &path);
+                    this->onInitPath(pathIdx, path);
+                }
+
+                fGeneratedPaths[groupByte] |= groupBit;
+                didLoadPaths = true;
+            }
+        }
+
+        if (didLoadPaths) {
+            this->didChangeGpuMemorySize();
+        }
+    }
+
 #ifdef SK_DEBUG
+    void assertPathsLoaded(const void* indices, PathIndexType, int count) const;
+
+    template<typename IndexType> void assertPathsLoaded(const void* indices, int count) const {
+        if (!fPathGenerator) {
+            return;
+        }
+
+        const IndexType* indexArray = reinterpret_cast<const IndexType*>(indices);
+
+        for (int i = 0; i < count; ++i) {
+            SkASSERT(indexArray[i] < static_cast<uint32_t>(fNumPaths));
+
+            const int groupIndex = indexArray[i] / kPathsPerGroup;
+            const int groupByte = groupIndex / 8;
+            const uint8_t groupBit = 1 << (groupIndex % 8);
+
+            SkASSERT(fGeneratedPaths[groupByte] & groupBit);
+        }
+    }
+
     virtual bool isEqualTo(const SkDescriptor& desc) const {
         return nullptr != fPathGenerator.get() && fPathGenerator->isEqualTo(desc);
     }
@@ -82,10 +142,9 @@ protected:
     virtual void onInitPath(int index, const SkPath&) const = 0;
 
 private:
-    // Notify when paths will be drawn in case this is a lazy-loaded path range.
-    friend class GrPathRendering;
-    void willDrawPaths(const void* indices, PathIndexType, int count) const;
-    template<typename IndexType> void willDrawPaths(const void* indices, int count) const;
+    enum {
+        kPathsPerGroup = 16 // Paths get tracked in groups of 16 for lazy loading.
+    };
 
     mutable SkAutoTUnref<PathGenerator> fPathGenerator;
     mutable SkTArray<uint8_t, true /*MEM_COPY*/> fGeneratedPaths;
index 47c9a46..a2e9c02 100644 (file)
@@ -179,7 +179,9 @@ public:
         if (GrXferBarrierType barrierType = args.fPipeline->xferBarrierType(*fGpu->caps())) {
             fGpu->xferBarrier(args.fPipeline->getRenderTarget(), barrierType);
         }
-        pathRange->willDrawPaths(indices, indexType, count);
+#ifdef SK_DEBUG
+        pathRange->assertPathsLoaded(indices, indexType, count);
+#endif
         this->onDrawPaths(args, pathRange, indices, indexType, transformValues, transformType,
                           count);
     }
index c3c49f9..0dc9029 100644 (file)
@@ -378,6 +378,8 @@ void GrStencilAndCoverTextContext::TextRun::setText(const char text[], size_t by
         fy += SkFixedMul(glyph.fAdvanceY, fixedSizeRatio);
     }
 
+    fDraw->loadGlyphPathsIfNeeded();
+
     fFallbackTextBlob.reset(fallback.buildIfInitialized());
 }
 
@@ -416,6 +418,8 @@ void GrStencilAndCoverTextContext::TextRun::setPosText(const char text[], size_t
         pos += scalarsPerPosition;
     }
 
+    fDraw->loadGlyphPathsIfNeeded();
+
     fFallbackTextBlob.reset(fallback.buildIfInitialized());
 }
 
index bb76abb..91e0e41 100644 (file)
@@ -115,6 +115,10 @@ public:
 
     const GrPathRange* range() const { return fPathRange.get(); }
 
+    void loadGlyphPathsIfNeeded() {
+        fPathRange.get()->loadPathsIfNeeded<uint16_t>(fIndices.begin(), fIndices.count());
+    }
+
     static bool CanMerge(const GrPathRangeDraw& a, const GrPathRangeDraw& b) {
         return a.transformType() == b.transformType() && a.range() == b.range();
     }