2 * Copyright 2013 Google Inc.
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
8 #include "SkChecksum.h"
9 #include "SkScaledImageCache.h"
11 #include "SkPixelRef.h"
14 // This can be defined by the caller's build system
15 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
17 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
18 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024
21 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
22 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
25 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
26 return reinterpret_cast<SkScaledImageCache::ID*>(rec);
29 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
30 return reinterpret_cast<SkScaledImageCache::Rec*>(id);
33 struct SkScaledImageCache::Key {
42 fHash = SkChecksum::Murmur3(&fGenID, 28);
45 bool operator<(const Key& other) const {
46 const uint32_t* a = &fGenID;
47 const uint32_t* b = &other.fGenID;
48 for (int i = 0; i < 7; ++i) {
59 bool operator==(const Key& other) const {
60 const uint32_t* a = &fHash;
61 const uint32_t* b = &other.fHash;
62 for (int i = 0; i < 8; ++i) {
77 struct SkScaledImageCache::Rec {
78 Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) {
83 Rec(const Key& key, const SkMipMap* mip) : fKey(key) {
93 static const Key& GetKey(const Rec& rec) { return rec.fKey; }
94 static uint32_t Hash(const Key& key) { return key.fHash; }
96 size_t bytesUsed() const {
97 return fMip ? fMip->getSize() : fBitmap.getSize();
103 // this guy wants to be 64bit aligned
108 // we use either fBitmap or fMip, but not both
110 const SkMipMap* fMip;
113 #include "SkTDynamicHash.h"
115 class SkScaledImageCache::Hash :
116 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {};
119 ///////////////////////////////////////////////////////////////////////////////
121 // experimental hash to speed things up
124 #if !defined(USE_HASH)
125 static inline SkScaledImageCache::Rec* find_rec_in_list(
126 SkScaledImageCache::Rec* head, const Key & key) {
127 SkScaledImageCache::Rec* rec = head;
128 while ((rec != NULL) && (rec->fKey != key)) {
135 void SkScaledImageCache::init() {
145 fSingleAllocationByteLimit = 0;
148 // One of these should be explicit set by the caller after we return.
150 fDiscardableFactory = NULL;
153 #include "SkDiscardableMemory.h"
155 class SkOneShotDiscardablePixelRef : public SkPixelRef {
157 SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef)
158 // Ownership of the discardablememory is transfered to the pixelref
159 SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes);
160 ~SkOneShotDiscardablePixelRef();
163 virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE;
164 virtual void onUnlockPixels() SK_OVERRIDE;
165 virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE;
168 SkDiscardableMemory* fDM;
172 typedef SkPixelRef INHERITED;
175 SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info,
176 SkDiscardableMemory* dm,
182 SkASSERT(dm->data());
186 SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() {
190 bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) {
192 // we're already locked
193 SkASSERT(fDM->data());
198 // A previous call to onUnlock may have deleted our DM, so check for that
204 // since it failed, we delete it now, to free-up the resource
211 rec->fPixels = fDM->data();
212 rec->fColorTable = NULL;
213 rec->fRowBytes = fRB;
217 void SkOneShotDiscardablePixelRef::onUnlockPixels() {
218 SkASSERT(!fFirstTime);
222 size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const {
223 return this->info().getSafeSize(fRB);
226 class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator {
228 SkScaledImageCacheDiscardableAllocator(
229 SkScaledImageCache::DiscardableFactory factory) {
234 virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE;
237 SkScaledImageCache::DiscardableFactory fFactory;
240 bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap,
241 SkColorTable* ctable) {
242 size_t size = bitmap->getSize();
243 uint64_t size64 = bitmap->computeSize64();
244 if (0 == size || size64 > (uint64_t)size) {
248 SkDiscardableMemory* dm = fFactory(size);
253 // can we relax this?
254 if (kN32_SkColorType != bitmap->colorType()) {
258 SkImageInfo info = bitmap->info();
259 bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef,
260 (info, dm, bitmap->rowBytes())))->unref();
261 bitmap->lockPixels();
262 return bitmap->readyToDraw();
265 SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) {
267 fDiscardableFactory = factory;
269 fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory));
272 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
274 fTotalByteLimit = byteLimit;
277 SkScaledImageCache::~SkScaledImageCache() {
278 SkSafeUnref(fAllocator);
282 Rec* next = rec->fNext;
289 ////////////////////////////////////////////////////////////////////////////////
292 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
295 const SkIRect& bounds) {
296 const Key key(genID, scaleX, scaleY, bounds);
297 return this->findAndLock(key);
301 This private method is the fully general record finder. All other
302 record finders should call this function or the one above. */
303 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) {
304 if (key.fBounds.isEmpty()) {
308 Rec* rec = fHash->find(key);
310 Rec* rec = find_rec_in_list(fHead, key);
313 this->moveToHead(rec); // for our LRU
314 rec->fLockCount += 1;
320 This function finds the bounds of the bitmap *within its pixelRef*.
321 If the bitmap lacks a pixelRef, it will return an empty rect, since
322 that doesn't make sense. This may be a useful enough function that
323 it should be somewhere else (in SkBitmap?). */
324 static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
325 if (!(bm.pixelRef())) {
326 return SkIRect::MakeEmpty();
328 SkIPoint origin = bm.pixelRefOrigin();
329 return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height());
333 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
337 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
338 SkIRect::MakeWH(width, height));
340 SkASSERT(NULL == rec->fMip);
341 SkASSERT(rec->fBitmap.pixelRef());
342 *bitmap = rec->fBitmap;
344 return rec_to_id(rec);
347 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
351 if (0 == scaleX || 0 == scaleY) {
352 // degenerate, and the key we use for mipmaps
355 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
356 scaleY, get_bounds_from_bitmap(orig));
358 SkASSERT(NULL == rec->fMip);
359 SkASSERT(rec->fBitmap.pixelRef());
360 *scaled = rec->fBitmap;
362 return rec_to_id(rec);
365 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
366 SkMipMap const ** mip) {
367 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0,
368 get_bounds_from_bitmap(orig));
371 SkASSERT(NULL == rec->fBitmap.pixelRef());
374 return rec_to_id(rec);
378 ////////////////////////////////////////////////////////////////////////////////
380 This private method is the fully general record adder. All other
381 record adders should call this funtion. */
382 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
384 // See if we already have this key (racy inserts, etc.)
385 Rec* existing = this->findAndLock(rec->fKey);
386 if (NULL != existing) {
387 // Since we already have a matching entry, just delete the new one and return.
388 // Call sites cannot assume the passed in object will live past this call.
389 existing->fBitmap = rec->fBitmap;
391 return rec_to_id(existing);
394 this->addToHead(rec);
395 SkASSERT(1 == rec->fLockCount);
400 // We may (now) be overbudget, so see if we need to purge something.
401 this->purgeAsNeeded();
402 return rec_to_id(rec);
405 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
408 const SkBitmap& bitmap) {
409 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
410 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
411 return this->addAndLock(rec);
414 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
417 const SkBitmap& scaled) {
418 if (0 == scaleX || 0 == scaleY) {
419 // degenerate, and the key we use for mipmaps
422 SkIRect bounds = get_bounds_from_bitmap(orig);
423 if (bounds.isEmpty()) {
426 Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
427 Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
428 return this->addAndLock(rec);
431 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
432 const SkMipMap* mip) {
433 SkIRect bounds = get_bounds_from_bitmap(orig);
434 if (bounds.isEmpty()) {
437 Key key(orig.getGenerationID(), 0, 0, bounds);
438 Rec* rec = SkNEW_ARGS(Rec, (key, mip));
439 return this->addAndLock(rec);
442 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
449 while (rec != NULL) {
450 if (rec == id_to_rec(id)) {
459 Rec* rec = id_to_rec(id);
460 SkASSERT(rec->fLockCount > 0);
461 rec->fLockCount -= 1;
463 // we may have been over-budget, but now have released something, so check
464 // if we should purge.
465 if (0 == rec->fLockCount) {
466 this->purgeAsNeeded();
470 void SkScaledImageCache::purgeAsNeeded() {
474 if (fDiscardableFactory) {
475 countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT;
476 byteLimit = SK_MaxU32; // no limit based on bytes
478 countLimit = SK_MaxS32; // no limit based on count
479 byteLimit = fTotalByteLimit;
482 size_t bytesUsed = fTotalBytesUsed;
483 int countUsed = fCount;
487 if (bytesUsed < byteLimit && countUsed < countLimit) {
491 Rec* prev = rec->fPrev;
492 if (0 == rec->fLockCount) {
493 size_t used = rec->bytesUsed();
494 SkASSERT(used <= bytesUsed);
497 fHash->remove(rec->fKey);
508 fTotalBytesUsed = bytesUsed;
512 size_t SkScaledImageCache::setTotalByteLimit(size_t newLimit) {
513 size_t prevLimit = fTotalByteLimit;
514 fTotalByteLimit = newLimit;
515 if (newLimit < prevLimit) {
516 this->purgeAsNeeded();
521 ///////////////////////////////////////////////////////////////////////////////
523 void SkScaledImageCache::detach(Rec* rec) {
524 Rec* prev = rec->fPrev;
525 Rec* next = rec->fNext;
528 SkASSERT(fHead == rec);
540 rec->fNext = rec->fPrev = NULL;
543 void SkScaledImageCache::moveToHead(Rec* rec) {
562 void SkScaledImageCache::addToHead(Rec* rec) {
574 fTotalBytesUsed += rec->bytesUsed();
580 ///////////////////////////////////////////////////////////////////////////////
583 void SkScaledImageCache::validate() const {
585 SkASSERT(NULL == fTail);
586 SkASSERT(0 == fTotalBytesUsed);
590 if (fHead == fTail) {
591 SkASSERT(NULL == fHead->fPrev);
592 SkASSERT(NULL == fHead->fNext);
593 SkASSERT(fHead->bytesUsed() == fTotalBytesUsed);
597 SkASSERT(NULL == fHead->fPrev);
598 SkASSERT(NULL != fHead->fNext);
599 SkASSERT(NULL == fTail->fNext);
600 SkASSERT(NULL != fTail->fPrev);
604 const Rec* rec = fHead;
607 used += rec->bytesUsed();
608 SkASSERT(used <= fTotalBytesUsed);
611 SkASSERT(fCount == count);
617 SkASSERT(used >= rec->bytesUsed());
618 used -= rec->bytesUsed();
622 SkASSERT(0 == count);
627 void SkScaledImageCache::dump() const {
630 const Rec* rec = fHead;
633 locked += rec->fLockCount > 0;
637 SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n",
638 fCount, fTotalBytesUsed, locked,
639 fDiscardableFactory ? "discardable" : "malloc");
642 size_t SkScaledImageCache::setSingleAllocationByteLimit(size_t newLimit) {
643 size_t oldLimit = fSingleAllocationByteLimit;
644 fSingleAllocationByteLimit = newLimit;
648 size_t SkScaledImageCache::getSingleAllocationByteLimit() const {
649 return fSingleAllocationByteLimit;
652 ///////////////////////////////////////////////////////////////////////////////
654 #include "SkThread.h"
656 SK_DECLARE_STATIC_MUTEX(gMutex);
657 static SkScaledImageCache* gScaledImageCache = NULL;
658 static void cleanup_gScaledImageCache() {
659 // We'll clean this up in our own tests, but disable for clients.
660 // Chrome seems to have funky multi-process things going on in unit tests that
661 // makes this unsafe to delete when the main process atexit()s.
662 // SkLazyPtr does the same sort of thing.
664 SkDELETE(gScaledImageCache);
668 /** Must hold gMutex when calling. */
669 static SkScaledImageCache* get_cache() {
670 // gMutex is always held when this is called, so we don't need to be fancy in here.
672 if (NULL == gScaledImageCache) {
673 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
674 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create));
676 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
678 atexit(cleanup_gScaledImageCache);
680 return gScaledImageCache;
684 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
685 uint32_t pixelGenerationID,
689 SkAutoMutexAcquire am(gMutex);
690 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
693 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
694 uint32_t pixelGenerationID,
697 const SkBitmap& scaled) {
698 SkAutoMutexAcquire am(gMutex);
699 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
703 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
707 SkAutoMutexAcquire am(gMutex);
708 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled);
711 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig,
712 SkMipMap const ** mip) {
713 SkAutoMutexAcquire am(gMutex);
714 return get_cache()->findAndLockMip(orig, mip);
717 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig,
720 const SkBitmap& scaled) {
721 SkAutoMutexAcquire am(gMutex);
722 return get_cache()->addAndLock(orig, scaleX, scaleY, scaled);
725 SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig,
726 const SkMipMap* mip) {
727 SkAutoMutexAcquire am(gMutex);
728 return get_cache()->addAndLockMip(orig, mip);
731 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
732 SkAutoMutexAcquire am(gMutex);
733 get_cache()->unlock(id);
735 // get_cache()->dump();
738 size_t SkScaledImageCache::GetTotalBytesUsed() {
739 SkAutoMutexAcquire am(gMutex);
740 return get_cache()->getTotalBytesUsed();
743 size_t SkScaledImageCache::GetTotalByteLimit() {
744 SkAutoMutexAcquire am(gMutex);
745 return get_cache()->getTotalByteLimit();
748 size_t SkScaledImageCache::SetTotalByteLimit(size_t newLimit) {
749 SkAutoMutexAcquire am(gMutex);
750 return get_cache()->setTotalByteLimit(newLimit);
753 SkBitmap::Allocator* SkScaledImageCache::GetAllocator() {
754 SkAutoMutexAcquire am(gMutex);
755 return get_cache()->allocator();
758 void SkScaledImageCache::Dump() {
759 SkAutoMutexAcquire am(gMutex);
763 size_t SkScaledImageCache::SetSingleAllocationByteLimit(size_t size) {
764 SkAutoMutexAcquire am(gMutex);
765 return get_cache()->setSingleAllocationByteLimit(size);
768 size_t SkScaledImageCache::GetSingleAllocationByteLimit() {
769 SkAutoMutexAcquire am(gMutex);
770 return get_cache()->getSingleAllocationByteLimit();
773 ///////////////////////////////////////////////////////////////////////////////
775 #include "SkGraphics.h"
777 size_t SkGraphics::GetImageCacheTotalBytesUsed() {
778 return SkScaledImageCache::GetTotalBytesUsed();
781 size_t SkGraphics::GetImageCacheTotalByteLimit() {
782 return SkScaledImageCache::GetTotalByteLimit();
785 size_t SkGraphics::SetImageCacheTotalByteLimit(size_t newLimit) {
786 return SkScaledImageCache::SetTotalByteLimit(newLimit);
789 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() {
790 return SkScaledImageCache::GetSingleAllocationByteLimit();
793 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) {
794 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit);