2 * Copyright 2013 Google Inc.
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
8 #include "SkScaledImageCache.h"
10 #include "SkPixelRef.h"
13 // This can be defined by the caller's build system
14 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
16 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
17 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024
20 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
21 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
24 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
25 return reinterpret_cast<SkScaledImageCache::ID*>(rec);
28 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
29 return reinterpret_cast<SkScaledImageCache::Rec*>(id);
32 // Implemented from en.wikipedia.org/wiki/MurmurHash.
33 static uint32_t compute_hash(const uint32_t data[], int count) {
36 for (int i = 0; i < count; ++i) {
39 k = (k << 15) | (k >> 17);
43 hash = (hash << 13) | (hash >> 19);
58 struct SkScaledImageCache::Key {
67 fHash = compute_hash(&fGenID, 7);
70 bool operator<(const Key& other) const {
71 const uint32_t* a = &fGenID;
72 const uint32_t* b = &other.fGenID;
73 for (int i = 0; i < 7; ++i) {
84 bool operator==(const Key& other) const {
85 const uint32_t* a = &fHash;
86 const uint32_t* b = &other.fHash;
87 for (int i = 0; i < 8; ++i) {
102 struct SkScaledImageCache::Rec {
103 Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) {
108 Rec(const Key& key, const SkMipMap* mip) : fKey(key) {
118 static const Key& GetKey(const Rec& rec) { return rec.fKey; }
119 static uint32_t Hash(const Key& key) { return key.fHash; }
121 size_t bytesUsed() const {
122 return fMip ? fMip->getSize() : fBitmap.getSize();
128 // this guy wants to be 64bit aligned
133 // we use either fBitmap or fMip, but not both
135 const SkMipMap* fMip;
138 #include "SkTDynamicHash.h"
140 class SkScaledImageCache::Hash :
141 public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {};
144 ///////////////////////////////////////////////////////////////////////////////
146 // experimental hash to speed things up
149 #if !defined(USE_HASH)
150 static inline SkScaledImageCache::Rec* find_rec_in_list(
151 SkScaledImageCache::Rec* head, const Key & key) {
152 SkScaledImageCache::Rec* rec = head;
153 while ((rec != NULL) && (rec->fKey != key)) {
160 void SkScaledImageCache::init() {
170 fSingleAllocationByteLimit = 0;
173 // One of these should be explicit set by the caller after we return.
175 fDiscardableFactory = NULL;
178 #include "SkDiscardableMemory.h"
180 class SkOneShotDiscardablePixelRef : public SkPixelRef {
182 SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef)
183 // Ownership of the discardablememory is transfered to the pixelref
184 SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes);
185 ~SkOneShotDiscardablePixelRef();
187 SK_DECLARE_UNFLATTENABLE_OBJECT()
190 virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE;
191 virtual void onUnlockPixels() SK_OVERRIDE;
192 virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE;
195 SkDiscardableMemory* fDM;
199 typedef SkPixelRef INHERITED;
202 SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info,
203 SkDiscardableMemory* dm,
209 SkASSERT(dm->data());
213 SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() {
217 bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) {
219 // we're already locked
220 SkASSERT(fDM->data());
225 // A previous call to onUnlock may have deleted our DM, so check for that
231 // since it failed, we delete it now, to free-up the resource
238 rec->fPixels = fDM->data();
239 rec->fColorTable = NULL;
240 rec->fRowBytes = fRB;
244 void SkOneShotDiscardablePixelRef::onUnlockPixels() {
245 SkASSERT(!fFirstTime);
249 size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const {
250 return this->info().getSafeSize(fRB);
253 class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator {
255 SkScaledImageCacheDiscardableAllocator(
256 SkScaledImageCache::DiscardableFactory factory) {
261 virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE;
264 SkScaledImageCache::DiscardableFactory fFactory;
267 bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap,
268 SkColorTable* ctable) {
269 size_t size = bitmap->getSize();
270 uint64_t size64 = bitmap->computeSize64();
271 if (0 == size || size64 > (uint64_t)size) {
275 SkDiscardableMemory* dm = fFactory(size);
280 // can we relax this?
281 if (kN32_SkColorType != bitmap->colorType()) {
285 SkImageInfo info = bitmap->info();
286 bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef,
287 (info, dm, bitmap->rowBytes())))->unref();
288 bitmap->lockPixels();
289 return bitmap->readyToDraw();
292 SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) {
294 fDiscardableFactory = factory;
296 fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory));
299 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
301 fTotalByteLimit = byteLimit;
304 SkScaledImageCache::~SkScaledImageCache() {
305 SkSafeUnref(fAllocator);
309 Rec* next = rec->fNext;
316 ////////////////////////////////////////////////////////////////////////////////
319 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
322 const SkIRect& bounds) {
323 const Key key(genID, scaleX, scaleY, bounds);
324 return this->findAndLock(key);
328 This private method is the fully general record finder. All other
329 record finders should call this function or the one above. */
330 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) {
331 if (key.fBounds.isEmpty()) {
335 Rec* rec = fHash->find(key);
337 Rec* rec = find_rec_in_list(fHead, key);
340 this->moveToHead(rec); // for our LRU
341 rec->fLockCount += 1;
347 This function finds the bounds of the bitmap *within its pixelRef*.
348 If the bitmap lacks a pixelRef, it will return an empty rect, since
349 that doesn't make sense. This may be a useful enough function that
350 it should be somewhere else (in SkBitmap?). */
351 static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
352 if (!(bm.pixelRef())) {
353 return SkIRect::MakeEmpty();
355 SkIPoint origin = bm.pixelRefOrigin();
356 return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height());
360 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
364 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
365 SkIRect::MakeWH(width, height));
367 SkASSERT(NULL == rec->fMip);
368 SkASSERT(rec->fBitmap.pixelRef());
369 *bitmap = rec->fBitmap;
371 return rec_to_id(rec);
374 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
378 if (0 == scaleX || 0 == scaleY) {
379 // degenerate, and the key we use for mipmaps
382 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
383 scaleY, get_bounds_from_bitmap(orig));
385 SkASSERT(NULL == rec->fMip);
386 SkASSERT(rec->fBitmap.pixelRef());
387 *scaled = rec->fBitmap;
389 return rec_to_id(rec);
392 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
393 SkMipMap const ** mip) {
394 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0,
395 get_bounds_from_bitmap(orig));
398 SkASSERT(NULL == rec->fBitmap.pixelRef());
401 return rec_to_id(rec);
405 ////////////////////////////////////////////////////////////////////////////////
407 This private method is the fully general record adder. All other
408 record adders should call this funtion. */
409 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
411 // See if we already have this key (racy inserts, etc.)
412 Rec* existing = this->findAndLock(rec->fKey);
413 if (NULL != existing) {
414 // Since we already have a matching entry, just delete the new one and return.
415 // Call sites cannot assume the passed in object will live past this call.
416 existing->fBitmap = rec->fBitmap;
418 return rec_to_id(existing);
421 this->addToHead(rec);
422 SkASSERT(1 == rec->fLockCount);
427 // We may (now) be overbudget, so see if we need to purge something.
428 this->purgeAsNeeded();
429 return rec_to_id(rec);
432 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
435 const SkBitmap& bitmap) {
436 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
437 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
438 return this->addAndLock(rec);
441 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
444 const SkBitmap& scaled) {
445 if (0 == scaleX || 0 == scaleY) {
446 // degenerate, and the key we use for mipmaps
449 SkIRect bounds = get_bounds_from_bitmap(orig);
450 if (bounds.isEmpty()) {
453 Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
454 Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
455 return this->addAndLock(rec);
458 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
459 const SkMipMap* mip) {
460 SkIRect bounds = get_bounds_from_bitmap(orig);
461 if (bounds.isEmpty()) {
464 Key key(orig.getGenerationID(), 0, 0, bounds);
465 Rec* rec = SkNEW_ARGS(Rec, (key, mip));
466 return this->addAndLock(rec);
469 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
476 while (rec != NULL) {
477 if (rec == id_to_rec(id)) {
486 Rec* rec = id_to_rec(id);
487 SkASSERT(rec->fLockCount > 0);
488 rec->fLockCount -= 1;
490 // we may have been over-budget, but now have released something, so check
491 // if we should purge.
492 if (0 == rec->fLockCount) {
493 this->purgeAsNeeded();
497 void SkScaledImageCache::purgeAsNeeded() {
501 if (fDiscardableFactory) {
502 countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT;
503 byteLimit = SK_MaxU32; // no limit based on bytes
505 countLimit = SK_MaxS32; // no limit based on count
506 byteLimit = fTotalByteLimit;
509 size_t bytesUsed = fTotalBytesUsed;
510 int countUsed = fCount;
514 if (bytesUsed < byteLimit && countUsed < countLimit) {
518 Rec* prev = rec->fPrev;
519 if (0 == rec->fLockCount) {
520 size_t used = rec->bytesUsed();
521 SkASSERT(used <= bytesUsed);
524 fHash->remove(rec->fKey);
535 fTotalBytesUsed = bytesUsed;
539 size_t SkScaledImageCache::setTotalByteLimit(size_t newLimit) {
540 size_t prevLimit = fTotalByteLimit;
541 fTotalByteLimit = newLimit;
542 if (newLimit < prevLimit) {
543 this->purgeAsNeeded();
548 ///////////////////////////////////////////////////////////////////////////////
550 void SkScaledImageCache::detach(Rec* rec) {
551 Rec* prev = rec->fPrev;
552 Rec* next = rec->fNext;
555 SkASSERT(fHead == rec);
567 rec->fNext = rec->fPrev = NULL;
570 void SkScaledImageCache::moveToHead(Rec* rec) {
589 void SkScaledImageCache::addToHead(Rec* rec) {
601 fTotalBytesUsed += rec->bytesUsed();
607 ///////////////////////////////////////////////////////////////////////////////
610 void SkScaledImageCache::validate() const {
612 SkASSERT(NULL == fTail);
613 SkASSERT(0 == fTotalBytesUsed);
617 if (fHead == fTail) {
618 SkASSERT(NULL == fHead->fPrev);
619 SkASSERT(NULL == fHead->fNext);
620 SkASSERT(fHead->bytesUsed() == fTotalBytesUsed);
624 SkASSERT(NULL == fHead->fPrev);
625 SkASSERT(NULL != fHead->fNext);
626 SkASSERT(NULL == fTail->fNext);
627 SkASSERT(NULL != fTail->fPrev);
631 const Rec* rec = fHead;
634 used += rec->bytesUsed();
635 SkASSERT(used <= fTotalBytesUsed);
638 SkASSERT(fCount == count);
644 SkASSERT(used >= rec->bytesUsed());
645 used -= rec->bytesUsed();
649 SkASSERT(0 == count);
654 void SkScaledImageCache::dump() const {
657 const Rec* rec = fHead;
660 locked += rec->fLockCount > 0;
664 SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n",
665 fCount, fTotalBytesUsed, locked,
666 fDiscardableFactory ? "discardable" : "malloc");
669 size_t SkScaledImageCache::setSingleAllocationByteLimit(size_t newLimit) {
670 size_t oldLimit = fSingleAllocationByteLimit;
671 fSingleAllocationByteLimit = newLimit;
675 size_t SkScaledImageCache::getSingleAllocationByteLimit() const {
676 return fSingleAllocationByteLimit;
679 ///////////////////////////////////////////////////////////////////////////////
681 #include "SkThread.h"
683 SK_DECLARE_STATIC_MUTEX(gMutex);
684 static SkScaledImageCache* gScaledImageCache = NULL;
685 static void cleanup_gScaledImageCache() {
686 // We'll clean this up in our own tests, but disable for clients.
687 // Chrome seems to have funky multi-process things going on in unit tests that
688 // makes this unsafe to delete when the main process atexit()s.
689 // SkLazyPtr does the same sort of thing.
691 SkDELETE(gScaledImageCache);
695 /** Must hold gMutex when calling. */
696 static SkScaledImageCache* get_cache() {
697 // gMutex is always held when this is called, so we don't need to be fancy in here.
699 if (NULL == gScaledImageCache) {
700 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
701 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create));
703 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
705 atexit(cleanup_gScaledImageCache);
707 return gScaledImageCache;
711 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
712 uint32_t pixelGenerationID,
716 SkAutoMutexAcquire am(gMutex);
717 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
720 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
721 uint32_t pixelGenerationID,
724 const SkBitmap& scaled) {
725 SkAutoMutexAcquire am(gMutex);
726 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
730 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
734 SkAutoMutexAcquire am(gMutex);
735 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled);
738 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig,
739 SkMipMap const ** mip) {
740 SkAutoMutexAcquire am(gMutex);
741 return get_cache()->findAndLockMip(orig, mip);
744 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig,
747 const SkBitmap& scaled) {
748 SkAutoMutexAcquire am(gMutex);
749 return get_cache()->addAndLock(orig, scaleX, scaleY, scaled);
752 SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig,
753 const SkMipMap* mip) {
754 SkAutoMutexAcquire am(gMutex);
755 return get_cache()->addAndLockMip(orig, mip);
758 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
759 SkAutoMutexAcquire am(gMutex);
760 get_cache()->unlock(id);
762 // get_cache()->dump();
765 size_t SkScaledImageCache::GetTotalBytesUsed() {
766 SkAutoMutexAcquire am(gMutex);
767 return get_cache()->getTotalBytesUsed();
770 size_t SkScaledImageCache::GetTotalByteLimit() {
771 SkAutoMutexAcquire am(gMutex);
772 return get_cache()->getTotalByteLimit();
775 size_t SkScaledImageCache::SetTotalByteLimit(size_t newLimit) {
776 SkAutoMutexAcquire am(gMutex);
777 return get_cache()->setTotalByteLimit(newLimit);
780 SkBitmap::Allocator* SkScaledImageCache::GetAllocator() {
781 SkAutoMutexAcquire am(gMutex);
782 return get_cache()->allocator();
785 void SkScaledImageCache::Dump() {
786 SkAutoMutexAcquire am(gMutex);
790 size_t SkScaledImageCache::SetSingleAllocationByteLimit(size_t size) {
791 SkAutoMutexAcquire am(gMutex);
792 return get_cache()->setSingleAllocationByteLimit(size);
795 size_t SkScaledImageCache::GetSingleAllocationByteLimit() {
796 SkAutoMutexAcquire am(gMutex);
797 return get_cache()->getSingleAllocationByteLimit();
800 ///////////////////////////////////////////////////////////////////////////////
802 #include "SkGraphics.h"
804 size_t SkGraphics::GetImageCacheTotalBytesUsed() {
805 return SkScaledImageCache::GetTotalBytesUsed();
808 size_t SkGraphics::GetImageCacheTotalByteLimit() {
809 return SkScaledImageCache::GetTotalByteLimit();
812 size_t SkGraphics::SetImageCacheTotalByteLimit(size_t newLimit) {
813 return SkScaledImageCache::SetTotalByteLimit(newLimit);
816 size_t SkGraphics::GetImageCacheSingleAllocationByteLimit() {
817 return SkScaledImageCache::GetSingleAllocationByteLimit();
820 size_t SkGraphics::SetImageCacheSingleAllocationByteLimit(size_t newLimit) {
821 return SkScaledImageCache::SetSingleAllocationByteLimit(newLimit);