2 * Copyright (C) 2010 Google Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "platform/image-decoders/webp/WEBPImageDecoder.h"
32 #include "RuntimeEnabledFeatures.h"
33 #include "platform/PlatformInstrumentation.h"
39 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN)
40 #error Blink assumes a little-endian target.
43 #if SK_B32_SHIFT // Output little-endian RGBA pixels (Android).
44 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : MODE_RGBA; }
45 #else // Output little-endian BGRA pixels.
46 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : MODE_BGRA; }
51 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
52 ImageSource::GammaAndColorProfileOption gammaAndColorProfileOption,
53 size_t maxDecodedBytes)
54 : ImageDecoder(alphaOption, gammaAndColorProfileOption, maxDecodedBytes)
57 , m_frameBackgroundHasAlpha(false)
59 , m_haveReadProfile(false)
63 , m_demuxState(WEBP_DEMUX_PARSING_HEADER)
64 , m_haveAlreadyParsedThisData(false)
65 , m_haveReadAnimationParameters(false)
66 , m_repetitionCount(cAnimationLoopOnce)
71 WEBPImageDecoder::~WEBPImageDecoder()
76 void WEBPImageDecoder::clear()
80 qcms_transform_release(m_transform);
83 WebPDemuxDelete(m_demux);
88 void WEBPImageDecoder::clearDecoder()
90 WebPIDelete(m_decoder);
93 m_frameBackgroundHasAlpha = false;
96 bool WEBPImageDecoder::isSizeAvailable()
98 if (!ImageDecoder::isSizeAvailable())
101 return ImageDecoder::isSizeAvailable();
104 size_t WEBPImageDecoder::frameCount()
106 if (!updateDemuxer())
109 return m_frameBufferCache.size();
112 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index)
114 if (index >= frameCount())
117 ImageFrame& frame = m_frameBufferCache[index];
118 if (frame.status() == ImageFrame::FrameComplete)
121 Vector<size_t> framesToDecode;
122 size_t frameToDecode = index;
124 framesToDecode.append(frameToDecode);
125 frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex();
126 } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete);
129 for (size_t i = framesToDecode.size(); i > 0; --i) {
130 size_t frameIndex = framesToDecode[i - 1];
131 if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex))
133 WebPIterator webpFrame;
134 if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
136 PlatformInstrumentation::willDecodeImage("WEBP");
137 decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex);
138 PlatformInstrumentation::didDecodeImage();
139 WebPDemuxReleaseIterator(&webpFrame);
144 // We need more data to continue decoding.
145 if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete)
149 // It is also a fatal error if all data is received and we have decoded all
150 // frames available but the file is truncated.
151 if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE)
154 frame.notifyBitmapIfPixelsChanged();
158 void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived)
162 ImageDecoder::setData(data, allDataReceived);
163 m_haveAlreadyParsedThisData = false;
166 int WEBPImageDecoder::repetitionCount() const
168 return failed() ? cAnimationLoopOnce : m_repetitionCount;
171 bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const
173 if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
175 if (!(m_formatFlags & ANIMATION_FLAG))
176 return ImageDecoder::frameIsCompleteAtIndex(index);
177 bool frameIsLoadedAtIndex = index < m_frameBufferCache.size();
178 return frameIsLoadedAtIndex;
181 float WEBPImageDecoder::frameDurationAtIndex(size_t index) const
183 return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duration() : 0;
186 bool WEBPImageDecoder::updateDemuxer()
191 if (m_haveAlreadyParsedThisData)
194 m_haveAlreadyParsedThisData = true;
196 const unsigned webpHeaderSize = 20;
197 if (m_data->size() < webpHeaderSize)
198 return false; // Wait for headers so that WebPDemuxPartial doesn't return null.
200 WebPDemuxDelete(m_demux);
201 WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() };
202 m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
203 if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE))
206 if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
207 return false; // Not enough data for parsing canvas width/height yet.
209 bool hasAnimation = (m_formatFlags & ANIMATION_FLAG);
210 if (!ImageDecoder::isSizeAvailable()) {
211 m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
212 hasAnimation = (m_formatFlags & ANIMATION_FLAG);
213 if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT)))
217 ASSERT(ImageDecoder::isSizeAvailable());
218 const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT);
219 if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) {
220 // As we have parsed at least one frame (even if partially),
221 // we must already have parsed the animation properties.
222 // This is because ANIM chunk always precedes ANMF chunks.
223 m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
224 ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
225 if (!m_repetitionCount)
226 m_repetitionCount = cAnimationLoopInfinite;
227 m_haveReadAnimationParameters = true;
230 const size_t oldFrameCount = m_frameBufferCache.size();
231 if (newFrameCount > oldFrameCount) {
232 m_frameBufferCache.resize(newFrameCount);
233 for (size_t i = oldFrameCount; i < newFrameCount; ++i) {
234 m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha);
237 m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound);
240 WebPIterator animatedFrame;
241 WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame);
242 ASSERT(animatedFrame.complete == 1);
243 m_frameBufferCache[i].setDuration(animatedFrame.duration);
244 m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep);
245 m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor);
246 IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height);
247 // Make sure the frameRect doesn't extend outside the buffer.
248 if (frameRect.maxX() > size().width())
249 frameRect.setWidth(size().width() - animatedFrame.x_offset);
250 if (frameRect.maxY() > size().height())
251 frameRect.setHeight(size().height() - animatedFrame.y_offset);
252 m_frameBufferCache[i].setOriginalFrameRect(frameRect);
253 m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, !animatedFrame.has_alpha));
254 WebPDemuxReleaseIterator(&animatedFrame);
261 bool WEBPImageDecoder::initFrameBuffer(size_t frameIndex)
263 ImageFrame& buffer = m_frameBufferCache[frameIndex];
264 if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized.
267 const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex();
268 if (requiredPreviousFrameIndex == kNotFound) {
269 // This frame doesn't rely on any previous data.
270 if (!buffer.setSize(size().width(), size().height()))
272 m_frameBackgroundHasAlpha = !buffer.originalFrameRect().contains(IntRect(IntPoint(), size()));
274 const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameIndex];
275 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
277 // Preserve the last frame as the starting state for this frame.
278 if (!buffer.copyBitmapData(prevBuffer))
281 if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor) {
282 // We want to clear the previous frame to transparent, without
283 // affecting pixels in the image outside of the frame.
284 const IntRect& prevRect = prevBuffer.originalFrameRect();
285 ASSERT(!prevRect.contains(IntRect(IntPoint(), size())));
286 buffer.zeroFillFrameRect(prevRect);
289 m_frameBackgroundHasAlpha = prevBuffer.hasAlpha() || (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor);
292 buffer.setStatus(ImageFrame::FramePartial);
293 // The buffer is transparent outside the decoded area while the image is loading.
294 // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
295 buffer.setHasAlpha(true);
299 size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame)
301 // If |clearExceptFrame| has status FrameComplete, we preserve that frame.
302 // Otherwise, we preserve a previous frame with status FrameComplete whose data is required
303 // to decode |clearExceptFrame|, either in initFrameBuffer() or ApplyPostProcessing().
304 // All other frames can be cleared.
305 while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache[clearExceptFrame].status() != ImageFrame::FrameComplete))
306 clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPreviousFrameIndex();
308 return ImageDecoder::clearCacheExceptFrame(clearExceptFrame);
311 void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex)
313 if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCache[frameIndex].status() == ImageFrame::FramePartial) {
314 // Clear the decoder state so that this partial frame can be decoded again when requested.
317 ImageDecoder::clearFrameBuffer(frameIndex);
322 void WEBPImageDecoder::createColorTransform(const char* data, size_t size)
325 qcms_transform_release(m_transform);
328 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile();
331 qcms_profile* inputProfile = qcms_profile_from_memory(data, size);
335 // We currently only support color profiles for RGB profiled images.
336 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile));
337 // The input image pixels are RGBA format.
338 qcms_data_type format = QCMS_DATA_RGBA_8;
339 // FIXME: Don't force perceptual intent if the image profile contains an intent.
340 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCMS_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL);
342 qcms_profile_release(inputProfile);
345 void WEBPImageDecoder::readColorProfile()
347 WebPChunkIterator chunkIterator;
348 if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) {
349 WebPDemuxReleaseChunkIterator(&chunkIterator);
353 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk.bytes);
354 size_t profileSize = chunkIterator.chunk.size;
356 // Only accept RGB color profiles from input class devices.
357 bool ignoreProfile = false;
358 if (profileSize < ImageDecoder::iccColorProfileHeaderLength)
359 ignoreProfile = true;
360 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize))
361 ignoreProfile = true;
362 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize))
363 ignoreProfile = true;
366 createColorTransform(profileData, profileSize);
368 WebPDemuxReleaseChunkIterator(&chunkIterator);
371 #endif // USE(QCMSLIB)
373 void WEBPImageDecoder::applyPostProcessing(size_t frameIndex)
375 ImageFrame& buffer = m_frameBufferCache[frameIndex];
378 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0))
379 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062
380 if (decodedHeight <= 0)
383 const IntRect& frameRect = buffer.originalFrameRect();
384 ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width());
385 ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height());
386 const int left = frameRect.x();
387 const int top = frameRect.y();
390 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) {
391 if (!m_haveReadProfile) {
393 m_haveReadProfile = true;
395 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
396 const int canvasY = top + y;
397 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canvasY));
398 if (qcms_transform* transform = colorTransform())
399 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGBX);
400 uint8_t* pixel = row;
401 for (int x = 0; x < width; ++x, pixel += 4) {
402 const int canvasX = left + x;
403 buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], pixel[3]);
407 #endif // USE(QCMSLIB)
409 // During the decoding of current frame, we may have set some pixels to be transparent (i.e. alpha < 255).
410 // However, the value of each of these pixels should have been determined by blending it against the value
411 // of that pixel in the previous frame if alpha blend source was 'BlendAtopPreviousFrame'. So, we correct these
412 // pixels based on disposal method of the previous frame and the previous frame buffer.
413 // FIXME: This could be avoided if libwebp decoder had an API that used the previous required frame
414 // to do the alpha-blending by itself.
415 if ((m_formatFlags & ANIMATION_FLAG) && frameIndex && buffer.alphaBlendSource() == ImageFrame::BlendAtopPreviousFrame && buffer.requiredPreviousFrameIndex() != kNotFound) {
416 ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1];
417 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
418 ImageFrame::DisposalMethod prevDisposalMethod = prevBuffer.disposalMethod();
419 if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Restore transparent pixels to pixels in previous canvas.
420 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
421 const int canvasY = top + y;
422 for (int x = 0; x < width; ++x) {
423 const int canvasX = left + x;
424 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canvasY);
425 // FIXME: Use alpha-blending when alpha is between 0 and 255.
426 // Alpha-blending is being implemented in: https://bugs.webkit.org/show_bug.cgi?id=17022
427 if (!((pixel >> SK_A32_SHIFT) & 0xff)) {
428 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(canvasX, canvasY);
433 } else if (prevDisposalMethod == ImageFrame::DisposeOverwriteBgcolor) {
434 const IntRect& prevRect = prevBuffer.originalFrameRect();
435 // We need to restore transparent pixels to as they were just after initFrame() call. That is:
436 // * Transparent if it belongs to prevRect <-- This is a no-op.
437 // * Pixel in the previous canvas otherwise <-- Need to restore.
438 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
439 const int canvasY = top + y;
440 for (int x = 0; x < width; ++x) {
441 const int canvasX = left + x;
442 ImageFrame::PixelData& pixel = *buffer.getAddr(canvasX, canvasY);
443 // FIXME: Use alpha-blending when alpha is between 0 and 255.
444 if (!((pixel >> SK_A32_SHIFT) & 0xff) && !prevRect.contains(IntPoint(canvasX, canvasY))) {
445 ImageFrame::PixelData prevPixel = *prevBuffer.getAddr(canvasX, canvasY);
453 m_decodedHeight = decodedHeight;
454 buffer.setPixelsChanged(true);
457 bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool onlySize, size_t frameIndex)
462 if (!ImageDecoder::isSizeAvailable()) {
463 static const size_t imageHeaderSize = 30;
464 if (dataSize < imageHeaderSize)
467 WebPBitstreamFeatures features;
468 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK)
470 width = features.width;
471 height = features.height;
472 m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0;
473 if (!setSize(width, height))
477 ASSERT(ImageDecoder::isSizeAvailable());
481 ASSERT(m_frameBufferCache.size() > frameIndex);
482 ImageFrame& buffer = m_frameBufferCache[frameIndex];
483 ASSERT(buffer.status() != ImageFrame::FrameComplete);
485 if (buffer.status() == ImageFrame::FrameEmpty) {
486 if (!buffer.setSize(size().width(), size().height()))
488 buffer.setStatus(ImageFrame::FramePartial);
489 // The buffer is transparent outside the decoded area while the image is loading.
490 // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
491 buffer.setHasAlpha(true);
492 buffer.setOriginalFrameRect(IntRect(IntPoint(), size()));
495 const IntRect& frameRect = buffer.originalFrameRect();
497 WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG);
498 if (!m_premultiplyAlpha)
499 mode = outputMode(false);
501 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
502 mode = MODE_RGBA; // Decode to RGBA for input to libqcms.
504 WebPInitDecBuffer(&m_decoderBuffer);
505 m_decoderBuffer.colorspace = mode;
506 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData);
507 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height();
508 m_decoderBuffer.is_external_memory = 1;
509 m_decoder = WebPINewDecoder(&m_decoderBuffer);
514 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y()));
516 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) {
518 applyPostProcessing(frameIndex);
519 buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha);
520 buffer.setStatus(ImageFrame::FrameComplete);
523 case VP8_STATUS_SUSPENDED:
524 if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) {
525 applyPostProcessing(frameIndex);
535 } // namespace WebCore