2 * Copyright 2014 Google Inc.
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
10 #include "GrLayerCache.h"
11 #include "GrSurfacePriv.h"
13 DECLARE_SKMESSAGEBUS_MESSAGE(GrPictureDeletedMessage);
16 void GrCachedLayer::validate(const GrTexture* backingTexture) const {
17 SkASSERT(SK_InvalidGenID != fKey.pictureID());
18 SkASSERT(fKey.start() >= 0);
21 // If the layer is in some texture then it must occupy some rectangle
22 SkASSERT(!fRect.isEmpty());
23 if (!this->isAtlased()) {
24 // If it isn't atlased then the rectangle should start at the origin
25 SkASSERT(0.0f == fRect.fLeft && 0.0f == fRect.fTop);
28 SkASSERT(fRect.isEmpty());
29 SkASSERT(NULL == fPlot);
30 SkASSERT(!fLocked); // layers without a texture cannot be locked
34 // If a layer has a plot (i.e., is atlased) then it must point to
35 // the backing texture. Additionally, its rect should be non-empty.
36 SkASSERT(fTexture && backingTexture == fTexture);
37 SkASSERT(!fRect.isEmpty());
41 // If a layer is locked it must have a texture (though it need not be
42 // the atlas-backing texture) and occupy some space.
44 SkASSERT(!fRect.isEmpty());
47 // Unfortunately there is a brief time where a layer can be locked
48 // but not used, so we can only check the "used implies locked"
57 class GrAutoValidateLayer : ::SkNoncopyable {
59 GrAutoValidateLayer(GrTexture* backingTexture, const GrCachedLayer* layer)
60 : fBackingTexture(backingTexture)
63 fLayer->validate(backingTexture);
66 ~GrAutoValidateLayer() {
68 fLayer->validate(fBackingTexture);
71 void setBackingTexture(GrTexture* backingTexture) {
72 SkASSERT(NULL == fBackingTexture || fBackingTexture == backingTexture);
73 fBackingTexture = backingTexture;
77 const GrTexture* fBackingTexture;
78 const GrCachedLayer* fLayer;
82 GrLayerCache::GrLayerCache(GrContext* context)
84 memset(fPlotLocks, 0, sizeof(fPlotLocks));
87 GrLayerCache::~GrLayerCache() {
89 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash);
90 for (; !iter.done(); ++iter) {
91 GrCachedLayer* layer = &(*iter);
92 SkASSERT(0 == layer->uses());
97 SkASSERT(0 == fPictureHash.count());
99 // The atlas only lets go of its texture when the atlas is deleted.
103 void GrLayerCache::initAtlas() {
104 SkASSERT(NULL == fAtlas.get());
105 GR_STATIC_ASSERT(kNumPlotsX*kNumPlotsX == GrPictureInfo::kNumPlots);
107 SkISize textureSize = SkISize::Make(kAtlasTextureWidth, kAtlasTextureHeight);
108 fAtlas.reset(SkNEW_ARGS(GrAtlas, (fContext->getGpu(), kSkia8888_GrPixelConfig,
109 kRenderTarget_GrSurfaceFlag,
110 textureSize, kNumPlotsX, kNumPlotsY, false)));
113 void GrLayerCache::freeAll() {
115 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash);
116 for (; !iter.done(); ++iter) {
117 GrCachedLayer* layer = &(*iter);
123 // The atlas only lets go of its texture when the atlas is deleted.
127 GrCachedLayer* GrLayerCache::createLayer(uint32_t pictureID,
129 const SkIRect& bounds,
131 const SkPaint* paint) {
132 SkASSERT(pictureID != SK_InvalidGenID && start >= 0 && stop > 0);
134 GrCachedLayer* layer = SkNEW_ARGS(GrCachedLayer, (pictureID, start, stop, bounds, ctm, paint));
135 fLayerHash.add(layer);
139 GrCachedLayer* GrLayerCache::findLayer(uint32_t pictureID,
141 const SkIRect& bounds,
142 const SkMatrix& ctm) {
143 SkASSERT(pictureID != SK_InvalidGenID && start > 0);
144 return fLayerHash.find(GrCachedLayer::Key(pictureID, start, bounds, ctm));
147 GrCachedLayer* GrLayerCache::findLayerOrCreate(uint32_t pictureID,
149 const SkIRect& bounds,
151 const SkPaint* paint) {
152 SkASSERT(pictureID != SK_InvalidGenID && start >= 0 && stop > 0);
153 GrCachedLayer* layer = fLayerHash.find(GrCachedLayer::Key(pictureID, start, bounds, ctm));
155 layer = this->createLayer(pictureID, start, stop, bounds, ctm, paint);
161 bool GrLayerCache::tryToAtlas(GrCachedLayer* layer,
162 const GrSurfaceDesc& desc,
163 bool* needsRendering) {
164 SkDEBUGCODE(GrAutoValidateLayer avl(fAtlas ? fAtlas->getTexture() : NULL, layer);)
166 SkASSERT(PlausiblyAtlasable(desc.fWidth, desc.fHeight));
168 if (layer->locked()) {
169 // This layer is already locked
171 SkASSERT(layer->isAtlased());
172 SkASSERT(layer->rect().width() == desc.fWidth);
173 SkASSERT(layer->rect().height() == desc.fHeight);
174 *needsRendering = false;
178 if (layer->isAtlased()) {
180 // Hooray it is still in the atlas - make sure it stays there
181 layer->setLocked(true);
182 this->incPlotLock(layer->plot()->id());
183 *needsRendering = false;
192 // Not in the atlas - will it fit?
193 GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID());
194 if (NULL == pictInfo) {
195 pictInfo = SkNEW_ARGS(GrPictureInfo, (layer->pictureID()));
196 fPictureHash.add(pictInfo);
200 for (int i = 0; i < 2; ++i) { // extra pass in case we fail to add but are able to purge
201 GrPlot* plot = fAtlas->addToAtlas(&pictInfo->fPlotUsage,
202 desc.fWidth, desc.fHeight,
204 // addToAtlas can allocate the backing texture
205 SkDEBUGCODE(avl.setBackingTexture(fAtlas->getTexture()));
207 #if !GR_CACHE_HOISTED_LAYERS
208 pictInfo->incPlotUsage(plot->id());
210 // The layer was successfully added to the atlas
211 GrIRect16 bounds = GrIRect16::MakeXYWH(loc.fX, loc.fY,
212 SkToS16(desc.fWidth),
213 SkToS16(desc.fHeight));
214 layer->setTexture(fAtlas->getTexture(), bounds);
215 layer->setPlot(plot);
216 layer->setLocked(true);
217 this->incPlotLock(layer->plot()->id());
218 *needsRendering = true;
222 // The layer was rejected by the atlas (even though we know it is
223 // plausibly atlas-able). See if a plot can be purged and try again.
224 if (!this->purgePlot()) {
225 break; // We weren't able to purge any plots
233 bool GrLayerCache::lock(GrCachedLayer* layer, const GrSurfaceDesc& desc, bool* needsRendering) {
234 if (layer->locked()) {
235 // This layer is already locked
236 *needsRendering = false;
240 SkAutoTUnref<GrTexture> tex(
241 fContext->refScratchTexture(desc, GrContext::kApprox_ScratchTexMatch));
247 layer->setTexture(tex, GrIRect16::MakeWH(SkToS16(desc.fWidth), SkToS16(desc.fHeight)));
248 layer->setLocked(true);
249 *needsRendering = true;
253 void GrLayerCache::unlock(GrCachedLayer* layer) {
254 SkDEBUGCODE(GrAutoValidateLayer avl(fAtlas ? fAtlas->getTexture() : NULL, layer);)
256 if (NULL == layer || !layer->locked()) {
257 // invalid or not locked
261 if (layer->isAtlased()) {
262 const int plotID = layer->plot()->id();
264 this->decPlotLock(plotID);
265 // At this point we could aggressively clear out un-locked plots but
266 // by delaying we may be able to reuse some of the atlased layers later.
267 #if !GR_CACHE_HOISTED_LAYERS
268 // This testing code aggressively removes the atlased layers. This
269 // can be used to separate the performance contribution of less
270 // render target pingponging from that due to the re-use of cached layers
271 GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID());
274 pictInfo->decPlotUsage(plotID);
276 if (0 == pictInfo->plotUsage(plotID)) {
277 GrAtlas::RemovePlot(&pictInfo->fPlotUsage, layer->plot());
279 if (pictInfo->fPlotUsage.isEmpty()) {
280 fPictureHash.remove(pictInfo->fPictureID);
285 layer->setPlot(NULL);
286 layer->setTexture(NULL, GrIRect16::MakeEmpty());
290 layer->setTexture(NULL, GrIRect16::MakeEmpty());
293 layer->setLocked(false);
297 void GrLayerCache::validate() const {
298 int plotLocks[kNumPlotsX * kNumPlotsY];
299 memset(plotLocks, 0, sizeof(plotLocks));
301 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::ConstIter iter(&fLayerHash);
302 for (; !iter.done(); ++iter) {
303 const GrCachedLayer* layer = &(*iter);
305 layer->validate(fAtlas.get() ? fAtlas->getTexture() : NULL);
307 const GrPictureInfo* pictInfo = fPictureHash.find(layer->pictureID());
309 // If there is no picture info for this picture then all of its
310 // layers should be non-atlased.
311 SkASSERT(!layer->isAtlased());
316 SkASSERT(pictInfo->fPictureID == layer->pictureID());
318 SkASSERT(pictInfo->fPlotUsage.contains(layer->plot()));
319 #if !GR_CACHE_HOISTED_LAYERS
320 SkASSERT(pictInfo->plotUsage(layer->plot()->id()) > 0);
323 if (layer->locked()) {
324 plotLocks[layer->plot()->id()]++;
329 for (int i = 0; i < kNumPlotsX*kNumPlotsY; ++i) {
330 SkASSERT(plotLocks[i] == fPlotLocks[i]);
334 class GrAutoValidateCache : ::SkNoncopyable {
336 explicit GrAutoValidateCache(GrLayerCache* cache)
340 ~GrAutoValidateCache() {
344 GrLayerCache* fCache;
348 void GrLayerCache::purge(uint32_t pictureID) {
350 SkDEBUGCODE(GrAutoValidateCache avc(this);)
352 // We need to find all the layers associated with 'picture' and remove them.
353 SkTDArray<GrCachedLayer*> toBeRemoved;
355 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash);
356 for (; !iter.done(); ++iter) {
357 if (pictureID == (*iter).pictureID()) {
358 *toBeRemoved.append() = &(*iter);
362 for (int i = 0; i < toBeRemoved.count(); ++i) {
363 SkASSERT(0 == toBeRemoved[i]->uses());
364 this->unlock(toBeRemoved[i]);
365 fLayerHash.remove(GrCachedLayer::GetKey(*toBeRemoved[i]));
366 SkDELETE(toBeRemoved[i]);
369 GrPictureInfo* pictInfo = fPictureHash.find(pictureID);
371 fPictureHash.remove(pictureID);
376 bool GrLayerCache::purgePlot() {
377 SkDEBUGCODE(GrAutoValidateCache avc(this);)
380 GrAtlas::PlotIter iter;
382 for (plot = fAtlas->iterInit(&iter, GrAtlas::kLRUFirst_IterOrder);
384 plot = iter.prev()) {
385 if (fPlotLocks[plot->id()] > 0) {
389 this->purgePlot(plot);
396 void GrLayerCache::purgePlot(GrPlot* plot) {
397 SkASSERT(0 == fPlotLocks[plot->id()]);
399 // We need to find all the layers in 'plot' and remove them.
400 SkTDArray<GrCachedLayer*> toBeRemoved;
402 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash);
403 for (; !iter.done(); ++iter) {
404 if (plot == (*iter).plot()) {
405 *toBeRemoved.append() = &(*iter);
409 for (int i = 0; i < toBeRemoved.count(); ++i) {
410 SkASSERT(0 == toBeRemoved[i]->uses());
411 SkASSERT(!toBeRemoved[i]->locked());
413 uint32_t pictureIDToRemove = toBeRemoved[i]->pictureID();
415 // Aggressively remove layers and, if it becomes totally uncached, delete the picture info
416 fLayerHash.remove(GrCachedLayer::GetKey(*toBeRemoved[i]));
417 SkDELETE(toBeRemoved[i]);
419 GrPictureInfo* pictInfo = fPictureHash.find(pictureIDToRemove);
421 #if !GR_CACHE_HOISTED_LAYERS
422 SkASSERT(0 == pictInfo->plotUsage(plot->id()));
424 GrAtlas::RemovePlot(&pictInfo->fPlotUsage, plot);
426 if (pictInfo->fPlotUsage.isEmpty()) {
427 fPictureHash.remove(pictInfo->fPictureID);
436 #if !GR_CACHE_HOISTED_LAYERS
437 void GrLayerCache::purgeAll() {
442 GrAtlas::PlotIter iter;
444 for (plot = fAtlas->iterInit(&iter, GrAtlas::kLRUFirst_IterOrder);
446 plot = iter.prev()) {
447 SkASSERT(0 == fPlotLocks[plot->id()]);
449 this->purgePlot(plot);
452 SkASSERT(0 == fPictureHash.count());
454 fContext->discardRenderTarget(fAtlas->getTexture()->asRenderTarget());
458 class GrPictureDeletionListener : public SkPicture::DeletionListener {
459 virtual void onDeletion(uint32_t pictureID) SK_OVERRIDE{
460 const GrPictureDeletedMessage message = { pictureID };
461 SkMessageBus<GrPictureDeletedMessage>::Post(message);
465 void GrLayerCache::trackPicture(const SkPicture* picture) {
466 if (NULL == fDeletionListener) {
467 fDeletionListener.reset(SkNEW(GrPictureDeletionListener));
470 picture->addDeletionListener(fDeletionListener);
473 void GrLayerCache::processDeletedPictures() {
474 SkTDArray<GrPictureDeletedMessage> deletedPictures;
475 fPictDeletionInbox.poll(&deletedPictures);
477 for (int i = 0; i < deletedPictures.count(); i++) {
478 this->purge(deletedPictures[i].pictureID);
483 void GrLayerCache::writeLayersToDisk(const SkString& dirName) {
486 GrTexture* atlasTexture = fAtlas->getTexture();
487 if (NULL != atlasTexture) {
488 SkString fileName(dirName);
489 fileName.append("\\atlas.png");
491 atlasTexture->surfacePriv().savePixels(fileName.c_str());
495 SkTDynamicHash<GrCachedLayer, GrCachedLayer::Key>::Iter iter(&fLayerHash);
496 for (; !iter.done(); ++iter) {
497 GrCachedLayer* layer = &(*iter);
499 if (layer->isAtlased() || !layer->texture()) {
503 SkString fileName(dirName);
504 fileName.appendf("\\%d-%d.png", layer->fKey.pictureID(), layer->fKey.start());
506 layer->texture()->surfacePriv().savePixels(fileName.c_str());