2 * Copyright 2013 Google Inc.
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
8 #include "SkScaledImageCache.h"
10 #include "SkPixelRef.h"
13 // This can be defined by the caller's build system
14 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
16 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
17 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024
20 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
21 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
24 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
25 return reinterpret_cast<SkScaledImageCache::ID*>(rec);
28 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
29 return reinterpret_cast<SkScaledImageCache::Rec*>(id);
32 // Implemented from en.wikipedia.org/wiki/MurmurHash.
33 static uint32_t compute_hash(const uint32_t data[], int count) {
36 for (int i = 0; i < count; ++i) {
39 k = (k << 15) | (k >> 17);
43 hash = (hash << 13) | (hash >> 19);
58 struct SkScaledImageCache::Key {
67 fHash = compute_hash(&fGenID, 7);
70 bool operator<(const Key& other) const {
71 const uint32_t* a = &fGenID;
72 const uint32_t* b = &other.fGenID;
73 for (int i = 0; i < 7; ++i) {
84 bool operator==(const Key& other) const {
85 const uint32_t* a = &fHash;
86 const uint32_t* b = &other.fHash;
87 for (int i = 0; i < 8; ++i) {
102 struct SkScaledImageCache::Rec {
103 Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) {
108 Rec(const Key& key, const SkMipMap* mip) : fKey(key) {
118 static const Key& GetKey(const Rec& rec) { return rec.fKey; }
119 static uint32_t Hash(const Key& key) { return key.fHash; }
121 size_t bytesUsed() const {
122 return fMip ? fMip->getSize() : fBitmap.getSize();
128 // this guy wants to be 64bit aligned
133 // we use either fBitmap or fMip, but not both
135 const SkMipMap* fMip;
138 #include "SkTDynamicHash.h"
140 class SkScaledImageCache::Hash :
141 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {};
144 ///////////////////////////////////////////////////////////////////////////////
146 // experimental hash to speed things up
149 #if !defined(USE_HASH)
150 static inline SkScaledImageCache::Rec* find_rec_in_list(
151 SkScaledImageCache::Rec* head, const Key & key) {
152 SkScaledImageCache::Rec* rec = head;
153 while ((rec != NULL) && (rec->fKey != key)) {
160 void SkScaledImageCache::init() {
172 // One of these should be explicit set by the caller after we return.
174 fDiscardableFactory = NULL;
177 #include "SkDiscardableMemory.h"
179 class SkOneShotDiscardablePixelRef : public SkPixelRef {
181 SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef)
182 // Ownership of the discardablememory is transfered to the pixelref
183 SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes);
184 ~SkOneShotDiscardablePixelRef();
186 SK_DECLARE_UNFLATTENABLE_OBJECT()
189 virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE;
190 virtual void onUnlockPixels() SK_OVERRIDE;
191 virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE;
194 SkDiscardableMemory* fDM;
198 typedef SkPixelRef INHERITED;
201 SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info,
202 SkDiscardableMemory* dm,
208 SkASSERT(dm->data());
212 SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() {
216 bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) {
218 // we're already locked
219 SkASSERT(fDM->data());
224 // A previous call to onUnlock may have deleted our DM, so check for that
230 // since it failed, we delete it now, to free-up the resource
237 rec->fPixels = fDM->data();
238 rec->fColorTable = NULL;
239 rec->fRowBytes = fRB;
243 void SkOneShotDiscardablePixelRef::onUnlockPixels() {
244 SkASSERT(!fFirstTime);
248 size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const {
249 return this->info().getSafeSize(fRB);
252 class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator {
254 SkScaledImageCacheDiscardableAllocator(
255 SkScaledImageCache::DiscardableFactory factory) {
260 virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE;
263 SkScaledImageCache::DiscardableFactory fFactory;
266 bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap,
267 SkColorTable* ctable) {
268 size_t size = bitmap->getSize();
273 SkDiscardableMemory* dm = fFactory(size);
278 // can we relax this?
279 if (kN32_SkColorType != bitmap->colorType()) {
283 SkImageInfo info = bitmap->info();
284 bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef,
285 (info, dm, bitmap->rowBytes())))->unref();
286 bitmap->lockPixels();
287 return bitmap->readyToDraw();
290 SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) {
292 fDiscardableFactory = factory;
294 fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory));
297 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
299 fByteLimit = byteLimit;
302 SkScaledImageCache::~SkScaledImageCache() {
303 SkSafeUnref(fAllocator);
307 Rec* next = rec->fNext;
314 ////////////////////////////////////////////////////////////////////////////////
317 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
320 const SkIRect& bounds) {
321 const Key key(genID, scaleX, scaleY, bounds);
322 return this->findAndLock(key);
326 This private method is the fully general record finder. All other
327 record finders should call this function or the one above. */
328 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) {
329 if (key.fBounds.isEmpty()) {
333 Rec* rec = fHash->find(key);
335 Rec* rec = find_rec_in_list(fHead, key);
338 this->moveToHead(rec); // for our LRU
339 rec->fLockCount += 1;
345 This function finds the bounds of the bitmap *within its pixelRef*.
346 If the bitmap lacks a pixelRef, it will return an empty rect, since
347 that doesn't make sense. This may be a useful enough function that
348 it should be somewhere else (in SkBitmap?). */
349 static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
350 if (!(bm.pixelRef())) {
351 return SkIRect::MakeEmpty();
353 SkIPoint origin = bm.pixelRefOrigin();
354 return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height());
358 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
362 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
363 SkIRect::MakeWH(width, height));
365 SkASSERT(NULL == rec->fMip);
366 SkASSERT(rec->fBitmap.pixelRef());
367 *bitmap = rec->fBitmap;
369 return rec_to_id(rec);
372 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
376 if (0 == scaleX || 0 == scaleY) {
377 // degenerate, and the key we use for mipmaps
380 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
381 scaleY, get_bounds_from_bitmap(orig));
383 SkASSERT(NULL == rec->fMip);
384 SkASSERT(rec->fBitmap.pixelRef());
385 *scaled = rec->fBitmap;
387 return rec_to_id(rec);
390 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
391 SkMipMap const ** mip) {
392 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0,
393 get_bounds_from_bitmap(orig));
396 SkASSERT(NULL == rec->fBitmap.pixelRef());
399 return rec_to_id(rec);
403 ////////////////////////////////////////////////////////////////////////////////
405 This private method is the fully general record adder. All other
406 record adders should call this funtion. */
407 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
409 // See if we already have this key (racy inserts, etc.)
410 Rec* existing = this->findAndLock(rec->fKey);
411 if (NULL != existing) {
412 // Since we already have a matching entry, just delete the new one and return.
413 // Call sites cannot assume the passed in object will live past this call.
414 existing->fBitmap = rec->fBitmap;
416 return rec_to_id(existing);
419 this->addToHead(rec);
420 SkASSERT(1 == rec->fLockCount);
425 // We may (now) be overbudget, so see if we need to purge something.
426 this->purgeAsNeeded();
427 return rec_to_id(rec);
430 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
433 const SkBitmap& bitmap) {
434 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
435 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
436 return this->addAndLock(rec);
439 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
442 const SkBitmap& scaled) {
443 if (0 == scaleX || 0 == scaleY) {
444 // degenerate, and the key we use for mipmaps
447 SkIRect bounds = get_bounds_from_bitmap(orig);
448 if (bounds.isEmpty()) {
451 Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
452 Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
453 return this->addAndLock(rec);
456 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
457 const SkMipMap* mip) {
458 SkIRect bounds = get_bounds_from_bitmap(orig);
459 if (bounds.isEmpty()) {
462 Key key(orig.getGenerationID(), 0, 0, bounds);
463 Rec* rec = SkNEW_ARGS(Rec, (key, mip));
464 return this->addAndLock(rec);
467 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
474 while (rec != NULL) {
475 if (rec == id_to_rec(id)) {
484 Rec* rec = id_to_rec(id);
485 SkASSERT(rec->fLockCount > 0);
486 rec->fLockCount -= 1;
488 // we may have been over-budget, but now have released something, so check
489 // if we should purge.
490 if (0 == rec->fLockCount) {
491 this->purgeAsNeeded();
495 void SkScaledImageCache::purgeAsNeeded() {
499 if (fDiscardableFactory) {
500 countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT;
501 byteLimit = SK_MaxU32; // no limit based on bytes
503 countLimit = SK_MaxS32; // no limit based on count
504 byteLimit = fByteLimit;
507 size_t bytesUsed = fBytesUsed;
508 int countUsed = fCount;
512 if (bytesUsed < byteLimit && countUsed < countLimit) {
516 Rec* prev = rec->fPrev;
517 if (0 == rec->fLockCount) {
518 size_t used = rec->bytesUsed();
519 SkASSERT(used <= bytesUsed);
522 fHash->remove(rec->fKey);
533 fBytesUsed = bytesUsed;
537 size_t SkScaledImageCache::setByteLimit(size_t newLimit) {
538 size_t prevLimit = fByteLimit;
539 fByteLimit = newLimit;
540 if (newLimit < prevLimit) {
541 this->purgeAsNeeded();
546 ///////////////////////////////////////////////////////////////////////////////
548 void SkScaledImageCache::detach(Rec* rec) {
549 Rec* prev = rec->fPrev;
550 Rec* next = rec->fNext;
553 SkASSERT(fHead == rec);
565 rec->fNext = rec->fPrev = NULL;
568 void SkScaledImageCache::moveToHead(Rec* rec) {
587 void SkScaledImageCache::addToHead(Rec* rec) {
599 fBytesUsed += rec->bytesUsed();
605 ///////////////////////////////////////////////////////////////////////////////
608 void SkScaledImageCache::validate() const {
610 SkASSERT(NULL == fTail);
611 SkASSERT(0 == fBytesUsed);
615 if (fHead == fTail) {
616 SkASSERT(NULL == fHead->fPrev);
617 SkASSERT(NULL == fHead->fNext);
618 SkASSERT(fHead->bytesUsed() == fBytesUsed);
622 SkASSERT(NULL == fHead->fPrev);
623 SkASSERT(NULL != fHead->fNext);
624 SkASSERT(NULL == fTail->fNext);
625 SkASSERT(NULL != fTail->fPrev);
629 const Rec* rec = fHead;
632 used += rec->bytesUsed();
633 SkASSERT(used <= fBytesUsed);
636 SkASSERT(fCount == count);
642 SkASSERT(used >= rec->bytesUsed());
643 used -= rec->bytesUsed();
647 SkASSERT(0 == count);
652 void SkScaledImageCache::dump() const {
655 const Rec* rec = fHead;
658 locked += rec->fLockCount > 0;
662 SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n",
663 fCount, fBytesUsed, locked,
664 fDiscardableFactory ? "discardable" : "malloc");
667 ///////////////////////////////////////////////////////////////////////////////
669 #include "SkThread.h"
671 SK_DECLARE_STATIC_MUTEX(gMutex);
672 static SkScaledImageCache* gScaledImageCache = NULL;
673 static void cleanup_gScaledImageCache() {
674 // We'll clean this up in our own tests, but disable for clients.
675 // Chrome seems to have funky multi-process things going on in unit tests that
676 // makes this unsafe to delete when the main process atexit()s.
677 // SkLazyPtr does the same sort of thing.
679 SkDELETE(gScaledImageCache);
683 /** Must hold gMutex when calling. */
684 static SkScaledImageCache* get_cache() {
685 // gMutex is always held when this is called, so we don't need to be fancy in here.
687 if (NULL == gScaledImageCache) {
688 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
689 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create));
691 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
693 atexit(cleanup_gScaledImageCache);
695 return gScaledImageCache;
699 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
700 uint32_t pixelGenerationID,
704 SkAutoMutexAcquire am(gMutex);
705 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
708 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
709 uint32_t pixelGenerationID,
712 const SkBitmap& scaled) {
713 SkAutoMutexAcquire am(gMutex);
714 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
718 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
722 SkAutoMutexAcquire am(gMutex);
723 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled);
726 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig,
727 SkMipMap const ** mip) {
728 SkAutoMutexAcquire am(gMutex);
729 return get_cache()->findAndLockMip(orig, mip);
732 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig,
735 const SkBitmap& scaled) {
736 SkAutoMutexAcquire am(gMutex);
737 return get_cache()->addAndLock(orig, scaleX, scaleY, scaled);
740 SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig,
741 const SkMipMap* mip) {
742 SkAutoMutexAcquire am(gMutex);
743 return get_cache()->addAndLockMip(orig, mip);
746 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
747 SkAutoMutexAcquire am(gMutex);
748 get_cache()->unlock(id);
750 // get_cache()->dump();
753 size_t SkScaledImageCache::GetBytesUsed() {
754 SkAutoMutexAcquire am(gMutex);
755 return get_cache()->getBytesUsed();
758 size_t SkScaledImageCache::GetByteLimit() {
759 SkAutoMutexAcquire am(gMutex);
760 return get_cache()->getByteLimit();
763 size_t SkScaledImageCache::SetByteLimit(size_t newLimit) {
764 SkAutoMutexAcquire am(gMutex);
765 return get_cache()->setByteLimit(newLimit);
768 SkBitmap::Allocator* SkScaledImageCache::GetAllocator() {
769 SkAutoMutexAcquire am(gMutex);
770 return get_cache()->allocator();
773 void SkScaledImageCache::Dump() {
774 SkAutoMutexAcquire am(gMutex);
778 ///////////////////////////////////////////////////////////////////////////////
780 #include "SkGraphics.h"
782 size_t SkGraphics::GetImageCacheBytesUsed() {
783 return SkScaledImageCache::GetBytesUsed();
786 size_t SkGraphics::GetImageCacheByteLimit() {
787 return SkScaledImageCache::GetByteLimit();
790 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) {
791 return SkScaledImageCache::SetByteLimit(newLimit);