SkBitmapHeapEntry::SkBitmapHeapEntry()
: fSlot(-1)
, fRefCount(0)
- , fBytesAllocated(0) {
+ , fBytesAllocated(0)
+ , fMoreRecentlyUsed(NULL)
+ , fLessRecentlyUsed(NULL) {
}
SkBitmapHeapEntry::~SkBitmapHeapEntry() {
///////////////////////////////////////////////////////////////////////////////
-int SkBitmapHeap::LookupEntry::Compare(const SkBitmapHeap::LookupEntry *a,
- const SkBitmapHeap::LookupEntry *b) {
- if (a->fGenerationId < b->fGenerationId) {
- return -1;
- } else if (a->fGenerationId > b->fGenerationId) {
- return 1;
- } else if (a->fPixelOffset < b->fPixelOffset) {
- return -1;
- } else if (a->fPixelOffset > b->fPixelOffset) {
- return 1;
- } else if (a->fWidth < b->fWidth) {
- return -1;
- } else if (a->fWidth > b->fWidth) {
- return 1;
- } else if (a->fHeight < b->fHeight) {
- return -1;
- } else if (a->fHeight > b->fHeight) {
- return 1;
- }
- return 0;
-}
-
-///////////////////////////////////////////////////////////////////////////////
-
SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount)
: INHERITED()
, fExternalStorage(NULL)
return array;
}
-void SkBitmapHeap::removeFromLRU(SkBitmapHeap::LookupEntry* entry) {
- if (fMostRecentlyUsed == entry) {
- fMostRecentlyUsed = entry->fLessRecentlyUsed;
- if (NULL == fMostRecentlyUsed) {
- SkASSERT(fLeastRecentlyUsed == entry);
- fLeastRecentlyUsed = NULL;
- } else {
- fMostRecentlyUsed->fMoreRecentlyUsed = NULL;
- }
- } else {
- // Remove entry from its prior place, and make sure to cover the hole.
- if (fLeastRecentlyUsed == entry) {
- SkASSERT(entry->fMoreRecentlyUsed != NULL);
- fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
- }
- // Since we have already considered the case where entry is the most recently used, it must
- // have a more recently used at this point.
+// We just "used" the entry. Update our LRU accordingly
+void SkBitmapHeap::setMostRecentlyUsed(SkBitmapHeapEntry* entry) {
+ SkASSERT(entry != NULL);
+ if (entry == fMostRecentlyUsed) {
+ return;
+ }
+ // Remove info from its prior place, and make sure to cover the hole.
+ if (fLeastRecentlyUsed == entry) {
SkASSERT(entry->fMoreRecentlyUsed != NULL);
+ fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
+ }
+ if (entry->fMoreRecentlyUsed != NULL) {
+ SkASSERT(fMostRecentlyUsed != entry);
entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed;
-
- if (entry->fLessRecentlyUsed != NULL) {
- SkASSERT(fLeastRecentlyUsed != entry);
- entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
- }
+ }
+ if (entry->fLessRecentlyUsed != NULL) {
+ SkASSERT(fLeastRecentlyUsed != entry);
+ entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
}
entry->fMoreRecentlyUsed = NULL;
-}
-
-void SkBitmapHeap::appendToLRU(SkBitmapHeap::LookupEntry* entry) {
+ // Set up the head and tail pointers properly.
if (fMostRecentlyUsed != NULL) {
SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed);
fMostRecentlyUsed->fMoreRecentlyUsed = entry;
}
// iterate through our LRU cache and try to find an entry to evict
-SkBitmapHeap::LookupEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
+SkBitmapHeapEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
SkASSERT(fPreferredCount != UNLIMITED_SIZE);
SkASSERT(fStorage.count() >= fPreferredCount);
- SkBitmapHeap::LookupEntry* iter = fLeastRecentlyUsed;
+ SkBitmapHeapEntry* iter = fLeastRecentlyUsed;
while (iter != NULL) {
- SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
- if (heapEntry->fRefCount > 0) {
+ if (iter->fRefCount > 0) {
// If the least recently used bitmap has not been unreferenced
// by its owner, then according to our LRU specifications a more
// recently used one can not have used all it's references yet either.
return NULL;
}
- if (replacement.getGenerationID() == iter->fGenerationId) {
+ if (replacement.pixelRef() && replacement.pixelRef() == iter->fBitmap.pixelRef()) {
// Do not replace a bitmap with a new one using the same
// pixel ref. Instead look for a different one that will
// potentially free up more space.
if (UNLIMITED_SIZE == fPreferredCount) {
return 0;
}
- LookupEntry* iter = fLeastRecentlyUsed;
+ SkBitmapHeapEntry* iter = fLeastRecentlyUsed;
size_t origBytesAllocated = fBytesAllocated;
// Purge starting from LRU until a non-evictable bitmap is found or until
// everything is evicted.
- while (iter != NULL) {
- SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
- if (heapEntry->fRefCount > 0) {
- break;
- }
- LookupEntry* next = iter->fMoreRecentlyUsed;
- this->removeEntryFromLookupTable(iter);
+ while (iter && 0 == iter->fRefCount) {
+ SkBitmapHeapEntry* next = iter->fMoreRecentlyUsed;
+ this->removeEntryFromLookupTable(*iter);
// Free the pixel memory. removeEntryFromLookupTable already reduced
// fBytesAllocated properly.
- heapEntry->fBitmap.reset();
+ iter->fBitmap.reset();
// Add to list of unused slots which can be reused in the future.
- fUnusedSlots.push(heapEntry->fSlot);
+ fUnusedSlots.push(iter->fSlot);
+ // Remove its LRU pointers, so that it does not pretend it is already in
+ // the list the next time it is used.
+ iter->fMoreRecentlyUsed = iter->fLessRecentlyUsed = NULL;
iter = next;
if (origBytesAllocated - fBytesAllocated >= bytesToFree) {
break;
}
int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) {
- int index = SkTSearch<const LookupEntry>((const LookupEntry**)fLookupTable.begin(),
+ int index = SkTSearch<const LookupEntry>(fLookupTable.begin(),
fLookupTable.count(),
- &indexEntry, sizeof(void*), LookupEntry::Compare);
+ indexEntry, sizeof(indexEntry));
if (index < 0) {
// insert ourselves into the bitmapIndex
index = ~index;
- *fLookupTable.insert(index) = SkNEW_ARGS(LookupEntry, (indexEntry));
+ fLookupTable.insert(index, 1, &indexEntry);
} else if (entry != NULL) {
// populate the entry if needed
- *entry = fStorage[fLookupTable[index]->fStorageSlot];
+ *entry = fStorage[fLookupTable[index].fStorageSlot];
}
return index;
return true;
}
-int SkBitmapHeap::removeEntryFromLookupTable(LookupEntry* entry) {
+int SkBitmapHeap::removeEntryFromLookupTable(const SkBitmapHeapEntry& entry) {
// remove the bitmap index for the deleted entry
SkDEBUGCODE(int count = fLookupTable.count();)
- int index = this->findInLookupTable(*entry, NULL);
+ // FIXME: If copying bitmaps retained the generation ID, we could
+ // just grab the generation ID from entry.fBitmap
+ LookupEntry key(entry.fBitmap, entry.fGenerationID);
+ int index = this->findInLookupTable(key, NULL);
// Verify that findInLookupTable found an existing entry rather than adding
// a new entry to the lookup table.
SkASSERT(count == fLookupTable.count());
- SkDELETE(fLookupTable[index]);
+
fLookupTable.remove(index);
- fBytesAllocated -= fStorage[entry->fStorageSlot]->fBytesAllocated;
+ fBytesAllocated -= entry.fBytesAllocated;
return index;
}
SkBitmapHeapEntry* entry = NULL;
int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry);
+ // check to see if we already had a copy of the bitmap in the heap
if (entry) {
- // Already had a copy of the bitmap in the heap.
if (fOwnerCount != IGNORE_OWNERS) {
entry->addReferences(fOwnerCount);
}
if (fPreferredCount != UNLIMITED_SIZE) {
- LookupEntry* lookupEntry = fLookupTable[searchIndex];
- if (lookupEntry != fMostRecentlyUsed) {
- this->removeFromLRU(lookupEntry);
- this->appendToLRU(lookupEntry);
- }
+ this->setMostRecentlyUsed(entry);
}
return entry->fSlot;
}
// decide if we need to evict an existing heap entry or create a new one
if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) {
// iterate through our LRU cache and try to find an entry to evict
- LookupEntry* lookupEntry = this->findEntryToReplace(originalBitmap);
- if (lookupEntry != NULL) {
- // we found an entry to evict
- entry = fStorage[lookupEntry->fStorageSlot];
- // Remove it from the LRU. The new entry will be added to the LRU later.
- this->removeFromLRU(lookupEntry);
- int index = this->removeEntryFromLookupTable(lookupEntry);
+ entry = this->findEntryToReplace(originalBitmap);
+ // we found an entry to evict
+ if (entry) {
+ int index = this->removeEntryFromLookupTable(*entry);
// update the current search index now that we have removed one
if (index < searchIndex) {
// if the copy failed then we must abort
if (!copySucceeded) {
// delete the index
- SkDELETE(fLookupTable[searchIndex]);
fLookupTable.remove(searchIndex);
// If entry is the last slot in storage, it is safe to delete it.
if (fStorage.count() - 1 == entry->fSlot) {
fStorage.remove(entry->fSlot);
fBytesAllocated -= sizeof(SkBitmapHeapEntry);
SkDELETE(entry);
- } else {
- fUnusedSlots.push(entry->fSlot);
}
return INVALID_SLOT;
}
// update the index with the appropriate slot in the heap
- fLookupTable[searchIndex]->fStorageSlot = entry->fSlot;
+ fLookupTable[searchIndex].fStorageSlot = entry->fSlot;
- // compute the space taken by this entry
+ // compute the space taken by the this entry
// TODO if there is a shared pixel ref don't count it
// If the SkBitmap does not share an SkPixelRef with an SkBitmap already
// in the SharedHeap, also include the size of its pixels.
// add the bytes from this entry to the total count
fBytesAllocated += entry->fBytesAllocated;
+ entry->fGenerationID = originalBitmap.getGenerationID();
+
if (fOwnerCount != IGNORE_OWNERS) {
entry->addReferences(fOwnerCount);
}
if (fPreferredCount != UNLIMITED_SIZE) {
- this->appendToLRU(fLookupTable[searchIndex]);
+ this->setMostRecentlyUsed(entry);
}
return entry->fSlot;
}
int32_t fSlot;
int32_t fRefCount;
+ uint32_t fGenerationID;
SkBitmap fBitmap;
// Keep track of the bytes allocated for this bitmap. When replacing the
// bitmap or removing this HeapEntry we know how much memory has been
// reclaimed.
size_t fBytesAllocated;
+ // TODO: Generalize the LRU caching mechanism
+ SkBitmapHeapEntry* fMoreRecentlyUsed;
+ SkBitmapHeapEntry* fLessRecentlyUsed;
friend class SkBitmapHeap;
};
* Returns a count of the number of items currently in the heap
*/
int count() const {
- SkASSERT(fExternalStorage != NULL ||
- fStorage.count() - fUnusedSlots.count() == fLookupTable.count());
+ SkASSERT(fExternalStorage != NULL || fStorage.count() == fLookupTable.count());
return fLookupTable.count();
}
private:
struct LookupEntry {
- LookupEntry(const SkBitmap& bm)
- : fGenerationId(bm.getGenerationID())
- , fPixelOffset(bm.pixelRefOffset())
- , fWidth(bm.width())
- , fHeight(bm.height())
- , fMoreRecentlyUsed(NULL)
- , fLessRecentlyUsed(NULL){}
-
- const uint32_t fGenerationId; // SkPixelRef GenerationID.
- const size_t fPixelOffset;
- const uint32_t fWidth;
- const uint32_t fHeight;
-
- // TODO: Generalize the LRU caching mechanism
- LookupEntry* fMoreRecentlyUsed;
- LookupEntry* fLessRecentlyUsed;
+ LookupEntry(const SkBitmap& bm, uint32_t genId = 0) {
+ fGenerationId = 0 == genId ? bm.getGenerationID() : genId;
+ fPixelOffset = bm.pixelRefOffset();
+ fWidth = bm.width();
+ fHeight = bm.height();
+ }
+ uint32_t fGenerationId; // SkPixelRef GenerationID.
+ size_t fPixelOffset;
+ uint32_t fWidth;
+ uint32_t fHeight;
uint32_t fStorageSlot; // slot of corresponding bitmap in fStorage.
- /**
- * Compare two LookupEntry pointers, returning -1, 0, 1 for sorting.
- */
- static int Compare(const LookupEntry* a, const LookupEntry* b);
+ bool operator < (const LookupEntry& other) const {
+ if (this->fGenerationId != other.fGenerationId) {
+ return this->fGenerationId < other.fGenerationId;
+ } else if(this->fPixelOffset != other.fPixelOffset) {
+ return this->fPixelOffset < other.fPixelOffset;
+ } else if(this->fWidth != other.fWidth) {
+ return this->fWidth < other.fWidth;
+ } else {
+ return this->fHeight < other.fHeight;
+ }
+ }
+ bool operator != (const LookupEntry& other) const {
+ return this->fGenerationId != other.fGenerationId
+ || this->fPixelOffset != other.fPixelOffset
+ || this->fWidth != other.fWidth
+ || this->fHeight != other.fHeight;
+ }
};
/**
- * Remove the entry from the lookup table. Also deletes the entry pointed
- * to by the table. Therefore, if a pointer to that one was passed in, the
- * pointer should no longer be used, since the object to which it points has
- * been deleted.
+ * Remove the entry from the lookup table.
* @return The index in the lookup table of the entry before removal.
*/
- int removeEntryFromLookupTable(LookupEntry*);
+ int removeEntryFromLookupTable(const SkBitmapHeapEntry&);
/**
* Searches for the bitmap in the lookup table and returns the bitmaps index within the table.
*/
int findInLookupTable(const LookupEntry& key, SkBitmapHeapEntry** entry);
- LookupEntry* findEntryToReplace(const SkBitmap& replacement);
+ SkBitmapHeapEntry* findEntryToReplace(const SkBitmap& replacement);
bool copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap);
-
- /**
- * Remove a LookupEntry from the LRU, in preparation for either deleting or appending as most
- * recent. Points the LookupEntry's old neighbors at each other, and sets fLeastRecentlyUsed
- * (if there is still an entry left). Sets LookupEntry's fMoreRecentlyUsed to NULL and leaves
- * its fLessRecentlyUsed unmodified.
- */
- void removeFromLRU(LookupEntry* entry);
-
- /**
- * Append a LookupEntry to the end of the LRU cache, marking it as the most
- * recently used. Assumes that the LookupEntry is already in fLookupTable,
- * but is not in the LRU cache. If it is in the cache, removeFromLRU should
- * be called first.
- */
- void appendToLRU(LookupEntry*);
+ void setMostRecentlyUsed(SkBitmapHeapEntry* entry);
// searchable index that maps to entries in the heap
- SkTDArray<LookupEntry*> fLookupTable;
+ SkTDArray<LookupEntry> fLookupTable;
// heap storage
SkTDArray<SkBitmapHeapEntry*> fStorage;
SkTDArray<int> fUnusedSlots;
ExternalStorage* fExternalStorage;
- LookupEntry* fMostRecentlyUsed;
- LookupEntry* fLeastRecentlyUsed;
+ SkBitmapHeapEntry* fMostRecentlyUsed;
+ SkBitmapHeapEntry* fLeastRecentlyUsed;
const int32_t fPreferredCount;
const int32_t fOwnerCount;
}
void addBitmap(int index) {
+ index--;
SkBitmap* bm;
if(fBitmaps.count() == index) {
bm = SkNEW(SkBitmap);
}
SkBitmap* getBitmap(unsigned index) {
- return fBitmaps[index];
+ return fBitmaps[index - 1];
}
void setSharedHeap(SkBitmapHeap* heap) {
if (this->needOpBytes()) {
this->writeOp(kDone_DrawOp);
this->doNotify();
- if (shouldFlattenBitmaps(fFlags)) {
- // In this case, a BitmapShuttle is reffed by the SharedHeap
- // and refs this canvas. Unref the SharedHeap to end the
- // circular reference. When shouldFlattenBitmaps is false,
- // there is no circular reference, so the SharedHeap can be
- // safely unreffed in the destructor.
- fSharedHeap->unref();
- fSharedHeap = NULL;
- }
}
fDone = true;
}
size_t freeMemoryIfPossible(size_t bytesToFree);
size_t storageAllocatedForRecording() {
+ // FIXME: This can be removed once fSharedHeap is used by cross process
+ // case.
+ if (NULL == fSharedHeap) {
+ return 0;
+ }
return fSharedHeap->bytesAllocated();
}
const SkPaint&) SK_OVERRIDE;
virtual void drawData(const void*, size_t) SK_OVERRIDE;
- /**
- * Flatten an SkBitmap to send to the reader, where it will be referenced
- * according to slot.
- */
- bool shuttleBitmap(const SkBitmap&, int32_t slot);
private:
enum {
kNoSaveLayer = -1,
size_t fBlockSize; // amount allocated for writer
size_t fBytesNotified;
bool fDone;
- const uint32_t fFlags;
+ uint32_t fFlags;
SkRefCntSet fTypefaceSet;
// if a new SkFlatData was added when in cross process mode
void flattenFactoryNames();
+ // These are only used when in cross process, but with no shared address
+ // space, so bitmaps are flattened.
+ FlattenableHeap fBitmapHeap;
+ SkBitmapDictionary fBitmapDictionary;
+ int flattenToIndex(const SkBitmap&);
+
FlattenableHeap fFlattenableHeap;
FlatDictionary fFlatDictionary;
int fCurrFlatIndex[kCount_PaintFlats];
int flattenToIndex(SkFlattenable* obj, PaintFlats);
- // Common code used by drawBitmap*. Behaves differently depending on the
- // type of SkBitmapHeap being used, which is determined by the flags used.
- bool commonDrawBitmap(const SkBitmap& bm, DrawOps op, unsigned flags,
- size_t opBytesNeeded, const SkPaint* paint);
+ // Common code used by drawBitmap* when flattening.
+ bool commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op, unsigned flags,
+ size_t opBytesNeeded, const SkPaint* paint);
+ // Common code used by drawBitmap* when storing in the heap.
+ bool commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op, unsigned flags,
+ size_t opBytesNeeded, const SkPaint* paint);
+ // Convenience type for function pointer
+ typedef bool (SkGPipeCanvas::*BitmapCommonFunction)(const SkBitmap&,
+ DrawOps, unsigned,
+ size_t, const SkPaint*);
SkPaint fPaint;
void writePaint(const SkPaint&);
}
}
-bool SkGPipeCanvas::shuttleBitmap(const SkBitmap& bm, int32_t slot) {
+int SkGPipeCanvas::flattenToIndex(const SkBitmap & bitmap) {
SkASSERT(shouldFlattenBitmaps(fFlags));
- SkOrderedWriteBuffer buffer(1024);
- buffer.setNamedFactoryRecorder(fFactorySet);
- bm.flatten(buffer);
- this->flattenFactoryNames();
- uint32_t size = buffer.size();
- if (this->needOpBytes(size)) {
- this->writeOp(kDef_Bitmap_DrawOp, 0, slot);
- void* dst = static_cast<void*>(fWriter.reserve(size));
- buffer.writeToMemory(dst);
- return true;
+ uint32_t flags = SkFlattenableWriteBuffer::kCrossProcess_Flag;
+ bool added, replaced;
+ const SkFlatData* flat = fBitmapDictionary.findAndReplace(
+ bitmap, flags, fBitmapHeap.flatToReplace(), &added, &replaced);
+
+ int index = flat->index();
+ if (added) {
+ this->flattenFactoryNames();
+ size_t flatSize = flat->flatSize();
+ if (this->needOpBytes(flatSize)) {
+ this->writeOp(kDef_Bitmap_DrawOp, 0, index);
+ fWriter.write(flat->data(), flatSize);
+ }
}
- return false;
+ return index;
}
// return 0 for NULL (or unflattenable obj), or index-base-1
///////////////////////////////////////////////////////////////////////////////
-/**
- * If SkBitmaps are to be flattened to send to the reader, this class is
- * provided to the SkBitmapHeap to tell the SkGPipeCanvas to do so.
- */
-class BitmapShuttle : public SkBitmapHeap::ExternalStorage {
-public:
- BitmapShuttle(SkGPipeCanvas*);
-
- ~BitmapShuttle();
-
- virtual bool insert(const SkBitmap& bitmap, int32_t slot) SK_OVERRIDE;
-
-private:
- SkGPipeCanvas* fCanvas;
-};
-
-///////////////////////////////////////////////////////////////////////////////
-
#define MIN_BLOCK_SIZE (16 * 1024)
#define BITMAPS_TO_KEEP 5
#define FLATTENABLES_TO_KEEP 10
: fFactorySet(isCrossProcess(flags) ? SkNEW(SkNamedFactorySet) : NULL)
, fWriter(*writer)
, fFlags(flags)
+, fBitmapHeap(BITMAPS_TO_KEEP, fFactorySet)
+, fBitmapDictionary(&fBitmapHeap)
, fFlattenableHeap(FLATTENABLES_TO_KEEP, fFactorySet)
, fFlatDictionary(&fFlattenableHeap) {
fController = controller;
}
if (shouldFlattenBitmaps(flags)) {
- BitmapShuttle* shuttle = SkNEW_ARGS(BitmapShuttle, (this));
- fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (shuttle, BITMAPS_TO_KEEP));
- shuttle->unref();
+ // TODO: Use the shared heap for cross process case as well.
+ fSharedHeap = NULL;
} else {
- fSharedHeap = SkNEW_ARGS(SkBitmapHeap,
- (BITMAPS_TO_KEEP, controller->numberOfReaders()));
+ fSharedHeap = SkNEW_ARGS(SkBitmapHeap, (5, controller->numberOfReaders()));
if (this->needOpBytes(sizeof(void*))) {
this->writeOp(kShareHeap_DrawOp);
fWriter.writePtr(static_cast<void*>(fSharedHeap));
SkGPipeCanvas::~SkGPipeCanvas() {
this->finish();
SkSafeUnref(fFactorySet);
+ // FIXME: This can be changed to unref() once fSharedHeap is used by cross
+ // process case.
SkSafeUnref(fSharedHeap);
}
}
}
-bool SkGPipeCanvas::commonDrawBitmap(const SkBitmap& bm, DrawOps op,
- unsigned flags,
- size_t opBytesNeeded,
- const SkPaint* paint) {
+bool SkGPipeCanvas::commonDrawBitmapFlatten(const SkBitmap& bm, DrawOps op,
+ unsigned flags,
+ size_t opBytesNeeded,
+ const SkPaint* paint) {
+ if (paint != NULL) {
+ flags |= kDrawBitmap_HasPaint_DrawOpsFlag;
+ this->writePaint(*paint);
+ }
+ int bitmapIndex = this->flattenToIndex(bm);
+ if (this->needOpBytes(opBytesNeeded)) {
+ this->writeOp(op, flags, bitmapIndex);
+ return true;
+ }
+ return false;
+}
+
+bool SkGPipeCanvas::commonDrawBitmapHeap(const SkBitmap& bm, DrawOps op,
+ unsigned flags,
+ size_t opBytesNeeded,
+ const SkPaint* paint) {
int32_t bitmapIndex = fSharedHeap->insert(bm);
if (SkBitmapHeap::INVALID_SLOT == bitmapIndex) {
return false;
NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(SkScalar) * 2;
- if (this->commonDrawBitmap(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
+ BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
+ &SkGPipeCanvas::commonDrawBitmapFlatten :
+ &SkGPipeCanvas::commonDrawBitmapHeap;
+
+ if ((*this.*bitmapCommon)(bm, kDrawBitmap_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.writeScalar(left);
fWriter.writeScalar(top);
}
} else {
flags = 0;
}
+
+ BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
+ &SkGPipeCanvas::commonDrawBitmapFlatten :
+ &SkGPipeCanvas::commonDrawBitmapHeap;
- if (this->commonDrawBitmap(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
+ if ((*this.*bitmapCommon)(bm, kDrawBitmapRect_DrawOp, flags, opBytesNeeded, paint)) {
if (hasSrc) {
fWriter.write32(src->fLeft);
fWriter.write32(src->fTop);
NOTIFY_SETUP(this);
size_t opBytesNeeded = matrix.writeToMemory(NULL);
- if (this->commonDrawBitmap(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
+ BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
+ &SkGPipeCanvas::commonDrawBitmapFlatten :
+ &SkGPipeCanvas::commonDrawBitmapHeap;
+
+ if ((*this.*bitmapCommon)(bm, kDrawBitmapMatrix_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.writeMatrix(matrix);
}
}
NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(int32_t) * 4 + sizeof(SkRect);
- if (this->commonDrawBitmap(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
+ BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
+ &SkGPipeCanvas::commonDrawBitmapFlatten :
+ &SkGPipeCanvas::commonDrawBitmapHeap;
+
+ if ((*this.*bitmapCommon)(bm, kDrawBitmapNine_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.write32(center.fLeft);
fWriter.write32(center.fTop);
fWriter.write32(center.fRight);
NOTIFY_SETUP(this);
size_t opBytesNeeded = sizeof(int32_t) * 2;
- if (this->commonDrawBitmap(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
+ BitmapCommonFunction bitmapCommon = shouldFlattenBitmaps(fFlags) ?
+ &SkGPipeCanvas::commonDrawBitmapFlatten :
+ &SkGPipeCanvas::commonDrawBitmapHeap;
+
+ if ((*this.*bitmapCommon)(bm, kDrawSprite_DrawOp, 0, opBytesNeeded, paint)) {
fWriter.write32(left);
fWriter.write32(top);
}
}
size_t SkGPipeCanvas::freeMemoryIfPossible(size_t bytesToFree) {
+ // FIXME: This can be removed once fSharedHeap is used by cross process
+ // case.
+ if (NULL == fSharedHeap) {
+ return 0;
+ }
return fSharedHeap->freeMemoryIfPossible(bytesToFree);
}
return NULL == fCanvas ? 0 : fCanvas->storageAllocatedForRecording();
}
-///////////////////////////////////////////////////////////////////////////////
-
-BitmapShuttle::BitmapShuttle(SkGPipeCanvas* canvas) {
- SkASSERT(canvas != NULL);
- fCanvas = canvas;
- fCanvas->ref();
-}
-
-BitmapShuttle::~BitmapShuttle() {
- fCanvas->unref();
-}
-
-bool BitmapShuttle::insert(const SkBitmap& bitmap, int32_t slot) {
- return fCanvas->shuttleBitmap(bitmap, slot);
-}