2 * Copyright 2012 Google Inc.
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
8 #include "SkImage_Base.h"
10 #include "SkBitmapProcShader.h"
12 #include "SkColorTable.h"
14 #include "SkImagePriv.h"
15 #include "SkPixelRef.h"
16 #include "SkSurface.h"
19 #include "GrContext.h"
20 #include "GrTextureAdjuster.h"
25 // fixes https://bug.skia.org/5096
26 static bool is_not_subset(const SkBitmap& bm) {
27 SkASSERT(bm.pixelRef());
28 SkISize dim = bm.pixelRef()->info().dimensions();
29 SkASSERT(dim != bm.dimensions() || bm.pixelRefOrigin().isZero());
30 return dim == bm.dimensions();
33 class SkImage_Raster : public SkImage_Base {
35 static bool ValidArgs(const Info& info, size_t rowBytes, bool hasColorTable,
37 const int maxDimension = SK_MaxS32 >> 2;
39 if (info.width() <= 0 || info.height() <= 0) {
42 if (info.width() > maxDimension || info.height() > maxDimension) {
45 if ((unsigned)info.colorType() > (unsigned)kLastEnum_SkColorType) {
48 if ((unsigned)info.alphaType() > (unsigned)kLastEnum_SkAlphaType) {
52 if (kUnknown_SkColorType == info.colorType()) {
56 const bool needsCT = kIndex_8_SkColorType == info.colorType();
57 if (needsCT != hasColorTable) {
61 if (rowBytes < info.minRowBytes()) {
65 size_t size = info.getSafeSize(rowBytes);
76 SkImage_Raster(const SkImageInfo&, sk_sp<SkData>, size_t rb, SkColorTable*);
77 virtual ~SkImage_Raster();
79 SkImageInfo onImageInfo() const override {
80 return fBitmap.info();
82 SkAlphaType onAlphaType() const override {
83 return fBitmap.alphaType();
86 bool onReadPixels(const SkImageInfo&, void*, size_t, int srcX, int srcY, CachingHint) const override;
87 bool onPeekPixels(SkPixmap*) const override;
88 const SkBitmap* onPeekBitmap() const override { return &fBitmap; }
90 bool getROPixels(SkBitmap*, SkColorSpace* dstColorSpace, CachingHint) const override;
91 GrTexture* asTextureRef(GrContext*, const GrSamplerParams&, SkColorSpace*,
92 sk_sp<SkColorSpace>*) const override;
93 sk_sp<SkImage> onMakeSubset(const SkIRect&) const override;
95 // exposed for SkSurface_Raster via SkNewImageFromPixelRef
96 SkImage_Raster(const SkImageInfo&, SkPixelRef*, const SkIPoint& origin, size_t rowBytes);
98 SkPixelRef* getPixelRef() const { return fBitmap.pixelRef(); }
100 bool onAsLegacyBitmap(SkBitmap*, LegacyBitmapMode) const override;
102 SkImage_Raster(const SkBitmap& bm, bool bitmapMayBeMutable = false)
103 : INHERITED(bm.width(), bm.height(),
104 is_not_subset(bm) ? bm.getGenerationID()
105 : (uint32_t)kNeedNewImageUniqueID)
108 if (bm.pixelRef()->isPreLocked()) {
109 // we only preemptively lock if there is no chance of triggering something expensive
110 // like a lazy decode or imagegenerator. PreLocked means it is flat pixels already.
111 fBitmap.lockPixels();
113 SkASSERT(bitmapMayBeMutable || fBitmap.isImmutable());
116 bool onIsLazyGenerated() const override {
117 return fBitmap.pixelRef() && fBitmap.pixelRef()->isLazyGenerated();
121 sk_sp<GrTexture> refPinnedTexture(uint32_t* uniqueID) const override;
122 bool onPinAsTexture(GrContext*) const override;
123 void onUnpinAsTexture(GrContext*) const override;
130 mutable sk_sp<GrTexture> fPinnedTexture;
131 mutable int32_t fPinnedCount = 0;
132 mutable uint32_t fPinnedUniqueID = 0;
135 typedef SkImage_Base INHERITED;
138 ///////////////////////////////////////////////////////////////////////////////
140 static void release_data(void* addr, void* context) {
141 SkData* data = static_cast<SkData*>(context);
145 SkImage_Raster::SkImage_Raster(const Info& info, sk_sp<SkData> data, size_t rowBytes,
146 SkColorTable* ctable)
147 : INHERITED(info.width(), info.height(), kNeedNewImageUniqueID)
149 void* addr = const_cast<void*>(data->data());
151 fBitmap.installPixels(info, addr, rowBytes, ctable, release_data, data.release());
152 fBitmap.setImmutable();
153 fBitmap.lockPixels();
156 SkImage_Raster::SkImage_Raster(const Info& info, SkPixelRef* pr, const SkIPoint& pixelRefOrigin,
158 : INHERITED(info.width(), info.height(), pr->getGenerationID())
160 fBitmap.setInfo(info, rowBytes);
161 fBitmap.setPixelRef(pr, pixelRefOrigin);
162 fBitmap.lockPixels();
163 SkASSERT(fBitmap.isImmutable());
166 SkImage_Raster::~SkImage_Raster() {
168 SkASSERT(nullptr == fPinnedTexture.get()); // want the caller to have manually unpinned
172 bool SkImage_Raster::onReadPixels(const SkImageInfo& dstInfo, void* dstPixels, size_t dstRowBytes,
173 int srcX, int srcY, CachingHint) const {
174 SkBitmap shallowCopy(fBitmap);
175 return shallowCopy.readPixels(dstInfo, dstPixels, dstRowBytes, srcX, srcY);
178 bool SkImage_Raster::onPeekPixels(SkPixmap* pm) const {
179 return fBitmap.peekPixels(pm);
182 bool SkImage_Raster::getROPixels(SkBitmap* dst, SkColorSpace* dstColorSpace, CachingHint) const {
187 GrTexture* SkImage_Raster::asTextureRef(GrContext* ctx, const GrSamplerParams& params,
188 SkColorSpace* dstColorSpace,
189 sk_sp<SkColorSpace>* texColorSpace) const {
196 *texColorSpace = sk_ref_sp(fBitmap.colorSpace());
200 sk_sp<GrTexture> tex = this->refPinnedTexture(&uniqueID);
202 GrTextureAdjuster adjuster(fPinnedTexture.get(), fBitmap.alphaType(), fBitmap.bounds(),
203 fPinnedUniqueID, fBitmap.colorSpace());
204 return adjuster.refTextureSafeForParams(params, nullptr);
207 return GrRefCachedBitmapTexture(ctx, fBitmap, params);
215 sk_sp<GrTexture> SkImage_Raster::refPinnedTexture(uint32_t* uniqueID) const {
216 if (fPinnedTexture) {
217 SkASSERT(fPinnedCount > 0);
218 SkASSERT(fPinnedUniqueID != 0);
219 *uniqueID = fPinnedUniqueID;
220 return fPinnedTexture;
225 bool SkImage_Raster::onPinAsTexture(GrContext* ctx) const {
226 if (fPinnedTexture) {
227 SkASSERT(fPinnedCount > 0);
228 SkASSERT(fPinnedUniqueID != 0);
229 SkASSERT(fPinnedTexture->getContext() == ctx);
231 SkASSERT(fPinnedCount == 0);
232 SkASSERT(fPinnedUniqueID == 0);
233 fPinnedTexture.reset(
234 GrRefCachedBitmapTexture(ctx, fBitmap, GrSamplerParams::ClampNoFilter()));
235 if (!fPinnedTexture) {
238 fPinnedUniqueID = fBitmap.getGenerationID();
240 // Note: we only increment if the texture was successfully pinned
245 void SkImage_Raster::onUnpinAsTexture(GrContext* ctx) const {
246 // Note: we always decrement, even if fPinnedTexture is null
247 SkASSERT(fPinnedCount > 0);
248 SkASSERT(fPinnedUniqueID != 0);
249 if (fPinnedTexture) {
250 SkASSERT(fPinnedTexture->getContext() == ctx);
253 if (0 == --fPinnedCount) {
254 fPinnedTexture.reset(nullptr);
260 sk_sp<SkImage> SkImage_Raster::onMakeSubset(const SkIRect& subset) const {
261 // TODO : could consider heurist of sharing pixels, if subset is pretty close to complete
263 SkImageInfo info = SkImageInfo::MakeN32(subset.width(), subset.height(), fBitmap.alphaType());
264 auto surface(SkSurface::MakeRaster(info));
268 surface->getCanvas()->clear(0);
269 surface->getCanvas()->drawImage(this, SkIntToScalar(-subset.x()), SkIntToScalar(-subset.y()),
271 return surface->makeImageSnapshot();
274 ///////////////////////////////////////////////////////////////////////////////
276 sk_sp<SkImage> SkImage::MakeRasterCopy(const SkPixmap& pmap) {
278 if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(),
279 pmap.ctable() != nullptr, &size) || !pmap.addr()) {
283 // Here we actually make a copy of the caller's pixel data
284 sk_sp<SkData> data(SkData::MakeWithCopy(pmap.addr(), size));
285 return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes(), pmap.ctable());
289 sk_sp<SkImage> SkImage::MakeRasterData(const SkImageInfo& info, sk_sp<SkData> data,
292 if (!SkImage_Raster::ValidArgs(info, rowBytes, false, &size) || !data) {
296 // did they give us enough data?
297 if (data->size() < size) {
301 SkColorTable* ctable = nullptr;
302 return sk_make_sp<SkImage_Raster>(info, std::move(data), rowBytes, ctable);
305 sk_sp<SkImage> SkImage::MakeFromRaster(const SkPixmap& pmap, RasterReleaseProc proc,
306 ReleaseContext ctx) {
308 if (!SkImage_Raster::ValidArgs(pmap.info(), pmap.rowBytes(), false, &size) || !pmap.addr()) {
312 sk_sp<SkData> data(SkData::MakeWithProc(pmap.addr(), size, proc, ctx));
313 return sk_make_sp<SkImage_Raster>(pmap.info(), std::move(data), pmap.rowBytes(), pmap.ctable());
316 sk_sp<SkImage> SkMakeImageFromPixelRef(const SkImageInfo& info, SkPixelRef* pr,
317 const SkIPoint& pixelRefOrigin, size_t rowBytes) {
318 if (!SkImage_Raster::ValidArgs(info, rowBytes, false, nullptr)) {
321 return sk_make_sp<SkImage_Raster>(info, pr, pixelRefOrigin, rowBytes);
324 sk_sp<SkImage> SkMakeImageFromRasterBitmap(const SkBitmap& bm, SkCopyPixelsMode cpm,
325 SkTBlitterAllocator* allocator) {
326 bool hasColorTable = false;
327 if (kIndex_8_SkColorType == bm.colorType()) {
328 SkAutoLockPixels autoLockPixels(bm);
329 hasColorTable = bm.getColorTable() != nullptr;
332 if (!SkImage_Raster::ValidArgs(bm.info(), bm.rowBytes(), hasColorTable, nullptr)) {
336 sk_sp<SkImage> image;
337 if (kAlways_SkCopyPixelsMode == cpm || (!bm.isImmutable() && kNever_SkCopyPixelsMode != cpm)) {
341 if (tmp.getPixels() && tmp.peekPixels(&pmap)) {
342 image = SkImage::MakeRasterCopy(pmap);
346 image.reset(allocator->createT<SkImage_Raster>(bm, kNever_SkCopyPixelsMode == cpm));
347 image.get()->ref(); // account for the allocator being an owner
349 image = sk_make_sp<SkImage_Raster>(bm, kNever_SkCopyPixelsMode == cpm);
355 const SkPixelRef* SkBitmapImageGetPixelRef(const SkImage* image) {
356 return ((const SkImage_Raster*)image)->getPixelRef();
359 bool SkImage_Raster::onAsLegacyBitmap(SkBitmap* bitmap, LegacyBitmapMode mode) const {
360 if (kRO_LegacyBitmapMode == mode) {
361 // When we're a snapshot from a surface, our bitmap may not be marked immutable
362 // even though logically always we are, but in that case we can't physically share our
363 // pixelref since the caller might call setImmutable() themselves
364 // (thus changing our state).
365 if (fBitmap.isImmutable()) {
366 bitmap->setInfo(fBitmap.info(), fBitmap.rowBytes());
367 bitmap->setPixelRef(fBitmap.pixelRef(), fBitmap.pixelRefOrigin());
371 return this->INHERITED::onAsLegacyBitmap(bitmap, mode);