1 /*
2 * Copyright (C) 2010 Google Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #include "config.h"
30 #include "platform/image-decoders/webp/WEBPImageDecoder.h"
31
32 #include "platform/PlatformInstrumentation.h"
33 #include "platform/RuntimeEnabledFeatures.h"
34
35 #if USE(QCMSLIB)
36 #include "qcms.h"
37 #endif
38
39 #if CPU(BIG_ENDIAN) || CPU(MIDDLE_ENDIAN)
40 #error Blink assumes a little-endian target.
41 #endif
42
43 #if SK_B32_SHIFT // Output little-endian RGBA pixels (Android).
outputMode(bool hasAlpha)44 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_rgbA : MODE_RGBA; }
45 #else // Output little-endian BGRA pixels.
outputMode(bool hasAlpha)46 inline WEBP_CSP_MODE outputMode(bool hasAlpha) { return hasAlpha ? MODE_bgrA : MODE_BGRA; }
47 #endif
48
blendChannel(uint8_t src,uint8_t srcA,uint8_t dst,uint8_t dstA,unsigned scale)49 inline uint8_t blendChannel(uint8_t src, uint8_t srcA, uint8_t dst, uint8_t dstA, unsigned scale)
50 {
51 unsigned blendUnscaled = src * srcA + dst * dstA;
52 ASSERT(blendUnscaled < (1ULL << 32) / scale);
53 return (blendUnscaled * scale) >> 24;
54 }
55
blendSrcOverDstNonPremultiplied(uint32_t src,uint32_t dst)56 inline uint32_t blendSrcOverDstNonPremultiplied(uint32_t src, uint32_t dst)
57 {
58 uint8_t srcA = SkGetPackedA32(src);
59 if (srcA == 0)
60 return dst;
61
62 uint8_t dstA = SkGetPackedA32(dst);
63 uint8_t dstFactorA = (dstA * SkAlpha255To256(255 - srcA)) >> 8;
64 ASSERT(srcA + dstFactorA < (1U << 8));
65 uint8_t blendA = srcA + dstFactorA;
66 unsigned scale = (1UL << 24) / blendA;
67
68 uint8_t blendR = blendChannel(SkGetPackedR32(src), srcA, SkGetPackedR32(dst), dstFactorA, scale);
69 uint8_t blendG = blendChannel(SkGetPackedG32(src), srcA, SkGetPackedG32(dst), dstFactorA, scale);
70 uint8_t blendB = blendChannel(SkGetPackedB32(src), srcA, SkGetPackedB32(dst), dstFactorA, scale);
71
72 return SkPackARGB32NoCheck(blendA, blendR, blendG, blendB);
73 }
74
75 // Returns two point ranges (<left, width> pairs) at row 'canvasY', that belong to 'src' but not 'dst'.
76 // A point range is empty if the corresponding width is 0.
findBlendRangeAtRow(const WebCore::IntRect & src,const WebCore::IntRect & dst,int canvasY,int & left1,int & width1,int & left2,int & width2)77 inline void findBlendRangeAtRow(const WebCore::IntRect& src, const WebCore::IntRect& dst, int canvasY, int& left1, int& width1, int& left2, int& width2)
78 {
79 ASSERT_WITH_SECURITY_IMPLICATION(canvasY >= src.y() && canvasY < src.maxY());
80 left1 = -1;
81 width1 = 0;
82 left2 = -1;
83 width2 = 0;
84
85 if (canvasY < dst.y() || canvasY >= dst.maxY() || src.x() >= dst.maxX() || src.maxX() <= dst.x()) {
86 left1 = src.x();
87 width1 = src.width();
88 return;
89 }
90
91 if (src.x() < dst.x()) {
92 left1 = src.x();
93 width1 = dst.x() - src.x();
94 }
95
96 if (src.maxX() > dst.maxX()) {
97 left2 = dst.maxX();
98 width2 = src.maxX() - dst.maxX();
99 }
100 }
101
alphaBlendPremultiplied(WebCore::ImageFrame & src,WebCore::ImageFrame & dst,int canvasY,int left,int width)102 void alphaBlendPremultiplied(WebCore::ImageFrame& src, WebCore::ImageFrame& dst, int canvasY, int left, int width)
103 {
104 for (int x = 0; x < width; ++x) {
105 int canvasX = left + x;
106 WebCore::ImageFrame::PixelData& pixel = *src.getAddr(canvasX, canvasY);
107 if (SkGetPackedA32(pixel) != 0xff) {
108 WebCore::ImageFrame::PixelData prevPixel = *dst.getAddr(canvasX, canvasY);
109 pixel = SkPMSrcOver(pixel, prevPixel);
110 }
111 }
112 }
113
alphaBlendNonPremultiplied(WebCore::ImageFrame & src,WebCore::ImageFrame & dst,int canvasY,int left,int width)114 void alphaBlendNonPremultiplied(WebCore::ImageFrame& src, WebCore::ImageFrame& dst, int canvasY, int left, int width)
115 {
116 for (int x = 0; x < width; ++x) {
117 int canvasX = left + x;
118 WebCore::ImageFrame::PixelData& pixel = *src.getAddr(canvasX, canvasY);
119 if (SkGetPackedA32(pixel) != 0xff) {
120 WebCore::ImageFrame::PixelData prevPixel = *dst.getAddr(canvasX, canvasY);
121 pixel = blendSrcOverDstNonPremultiplied(pixel, prevPixel);
122 }
123 }
124 }
125
126 namespace WebCore {
127
WEBPImageDecoder(ImageSource::AlphaOption alphaOption,ImageSource::GammaAndColorProfileOption gammaAndColorProfileOption,size_t maxDecodedBytes)128 WEBPImageDecoder::WEBPImageDecoder(ImageSource::AlphaOption alphaOption,
129 ImageSource::GammaAndColorProfileOption gammaAndColorProfileOption,
130 size_t maxDecodedBytes)
131 : ImageDecoder(alphaOption, gammaAndColorProfileOption, maxDecodedBytes)
132 , m_decoder(0)
133 , m_formatFlags(0)
134 , m_frameBackgroundHasAlpha(false)
135 , m_hasColorProfile(false)
136 #if USE(QCMSLIB)
137 , m_haveReadProfile(false)
138 , m_transform(0)
139 #endif
140 , m_demux(0)
141 , m_demuxState(WEBP_DEMUX_PARSING_HEADER)
142 , m_haveAlreadyParsedThisData(false)
143 , m_haveReadAnimationParameters(false)
144 , m_repetitionCount(cAnimationLoopOnce)
145 , m_decodedHeight(0)
146 {
147 m_blendFunction = (alphaOption == ImageSource::AlphaPremultiplied) ? alphaBlendPremultiplied : alphaBlendNonPremultiplied;
148 }
149
~WEBPImageDecoder()150 WEBPImageDecoder::~WEBPImageDecoder()
151 {
152 clear();
153 }
154
clear()155 void WEBPImageDecoder::clear()
156 {
157 #if USE(QCMSLIB)
158 if (m_transform)
159 qcms_transform_release(m_transform);
160 m_transform = 0;
161 #endif
162 WebPDemuxDelete(m_demux);
163 m_demux = 0;
164 clearDecoder();
165 }
166
clearDecoder()167 void WEBPImageDecoder::clearDecoder()
168 {
169 WebPIDelete(m_decoder);
170 m_decoder = 0;
171 m_decodedHeight = 0;
172 m_frameBackgroundHasAlpha = false;
173 }
174
isSizeAvailable()175 bool WEBPImageDecoder::isSizeAvailable()
176 {
177 if (!ImageDecoder::isSizeAvailable())
178 updateDemuxer();
179
180 return ImageDecoder::isSizeAvailable();
181 }
182
frameCount()183 size_t WEBPImageDecoder::frameCount()
184 {
185 if (!updateDemuxer())
186 return 0;
187
188 return m_frameBufferCache.size();
189 }
190
frameBufferAtIndex(size_t index)191 ImageFrame* WEBPImageDecoder::frameBufferAtIndex(size_t index)
192 {
193 if (index >= frameCount())
194 return 0;
195
196 ImageFrame& frame = m_frameBufferCache[index];
197 if (frame.status() == ImageFrame::FrameComplete)
198 return &frame;
199
200 Vector<size_t> framesToDecode;
201 size_t frameToDecode = index;
202 do {
203 framesToDecode.append(frameToDecode);
204 frameToDecode = m_frameBufferCache[frameToDecode].requiredPreviousFrameIndex();
205 } while (frameToDecode != kNotFound && m_frameBufferCache[frameToDecode].status() != ImageFrame::FrameComplete);
206
207 ASSERT(m_demux);
208 for (size_t i = framesToDecode.size(); i > 0; --i) {
209 size_t frameIndex = framesToDecode[i - 1];
210 if ((m_formatFlags & ANIMATION_FLAG) && !initFrameBuffer(frameIndex))
211 return 0;
212 WebPIterator webpFrame;
213 if (!WebPDemuxGetFrame(m_demux, frameIndex + 1, &webpFrame))
214 return 0;
215 PlatformInstrumentation::willDecodeImage("WEBP");
216 decode(webpFrame.fragment.bytes, webpFrame.fragment.size, false, frameIndex);
217 PlatformInstrumentation::didDecodeImage();
218 WebPDemuxReleaseIterator(&webpFrame);
219
220 if (failed())
221 return 0;
222
223 // We need more data to continue decoding.
224 if (m_frameBufferCache[frameIndex].status() != ImageFrame::FrameComplete)
225 break;
226 }
227
228 // It is also a fatal error if all data is received and we have decoded all
229 // frames available but the file is truncated.
230 if (index >= m_frameBufferCache.size() - 1 && isAllDataReceived() && m_demux && m_demuxState != WEBP_DEMUX_DONE)
231 setFailed();
232
233 frame.notifyBitmapIfPixelsChanged();
234 return &frame;
235 }
236
setData(SharedBuffer * data,bool allDataReceived)237 void WEBPImageDecoder::setData(SharedBuffer* data, bool allDataReceived)
238 {
239 if (failed())
240 return;
241 ImageDecoder::setData(data, allDataReceived);
242 m_haveAlreadyParsedThisData = false;
243 }
244
repetitionCount() const245 int WEBPImageDecoder::repetitionCount() const
246 {
247 return failed() ? cAnimationLoopOnce : m_repetitionCount;
248 }
249
frameIsCompleteAtIndex(size_t index) const250 bool WEBPImageDecoder::frameIsCompleteAtIndex(size_t index) const
251 {
252 if (!m_demux || m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
253 return false;
254 if (!(m_formatFlags & ANIMATION_FLAG))
255 return ImageDecoder::frameIsCompleteAtIndex(index);
256 bool frameIsLoadedAtIndex = index < m_frameBufferCache.size();
257 return frameIsLoadedAtIndex;
258 }
259
frameDurationAtIndex(size_t index) const260 float WEBPImageDecoder::frameDurationAtIndex(size_t index) const
261 {
262 return index < m_frameBufferCache.size() ? m_frameBufferCache[index].duration() : 0;
263 }
264
updateDemuxer()265 bool WEBPImageDecoder::updateDemuxer()
266 {
267 if (failed())
268 return false;
269
270 if (m_haveAlreadyParsedThisData)
271 return true;
272
273 m_haveAlreadyParsedThisData = true;
274
275 const unsigned webpHeaderSize = 20;
276 if (m_data->size() < webpHeaderSize)
277 return false; // Wait for headers so that WebPDemuxPartial doesn't return null.
278
279 WebPDemuxDelete(m_demux);
280 WebPData inputData = { reinterpret_cast<const uint8_t*>(m_data->data()), m_data->size() };
281 m_demux = WebPDemuxPartial(&inputData, &m_demuxState);
282 if (!m_demux || (isAllDataReceived() && m_demuxState != WEBP_DEMUX_DONE))
283 return setFailed();
284
285 if (m_demuxState <= WEBP_DEMUX_PARSING_HEADER)
286 return false; // Not enough data for parsing canvas width/height yet.
287
288 bool hasAnimation = (m_formatFlags & ANIMATION_FLAG);
289 if (!ImageDecoder::isSizeAvailable()) {
290 m_formatFlags = WebPDemuxGetI(m_demux, WEBP_FF_FORMAT_FLAGS);
291 hasAnimation = (m_formatFlags & ANIMATION_FLAG);
292 if (!hasAnimation)
293 m_repetitionCount = cAnimationNone;
294 else
295 m_formatFlags &= ~ICCP_FLAG; // FIXME: Implement ICC profile support for animated images.
296 #if USE(QCMSLIB)
297 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
298 m_hasColorProfile = true;
299 #endif
300 if (!setSize(WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_WIDTH), WebPDemuxGetI(m_demux, WEBP_FF_CANVAS_HEIGHT)))
301 return setFailed();
302 }
303
304 ASSERT(ImageDecoder::isSizeAvailable());
305 const size_t newFrameCount = WebPDemuxGetI(m_demux, WEBP_FF_FRAME_COUNT);
306 if (hasAnimation && !m_haveReadAnimationParameters && newFrameCount) {
307 // As we have parsed at least one frame (even if partially),
308 // we must already have parsed the animation properties.
309 // This is because ANIM chunk always precedes ANMF chunks.
310 m_repetitionCount = WebPDemuxGetI(m_demux, WEBP_FF_LOOP_COUNT);
311 ASSERT(m_repetitionCount == (m_repetitionCount & 0xffff)); // Loop count is always <= 16 bits.
312 // |m_repetitionCount| is the total number of animation cycles to show,
313 // with 0 meaning "infinite". But ImageSource::repetitionCount()
314 // returns -1 for "infinite", and 0 and up for "show the animation one
315 // cycle more than this value". By subtracting one here, we convert
316 // both finite and infinite cases correctly.
317 --m_repetitionCount;
318 m_haveReadAnimationParameters = true;
319 }
320
321 const size_t oldFrameCount = m_frameBufferCache.size();
322 if (newFrameCount > oldFrameCount) {
323 m_frameBufferCache.resize(newFrameCount);
324 for (size_t i = oldFrameCount; i < newFrameCount; ++i) {
325 m_frameBufferCache[i].setPremultiplyAlpha(m_premultiplyAlpha);
326 if (!hasAnimation) {
327 ASSERT(!i);
328 m_frameBufferCache[i].setRequiredPreviousFrameIndex(kNotFound);
329 continue;
330 }
331 WebPIterator animatedFrame;
332 WebPDemuxGetFrame(m_demux, i + 1, &animatedFrame);
333 ASSERT(animatedFrame.complete == 1);
334 m_frameBufferCache[i].setDuration(animatedFrame.duration);
335 m_frameBufferCache[i].setDisposalMethod(animatedFrame.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND ? ImageFrame::DisposeOverwriteBgcolor : ImageFrame::DisposeKeep);
336 m_frameBufferCache[i].setAlphaBlendSource(animatedFrame.blend_method == WEBP_MUX_BLEND ? ImageFrame::BlendAtopPreviousFrame : ImageFrame::BlendAtopBgcolor);
337 IntRect frameRect(animatedFrame.x_offset, animatedFrame.y_offset, animatedFrame.width, animatedFrame.height);
338 // Make sure the frameRect doesn't extend outside the buffer.
339 if (frameRect.maxX() > size().width())
340 frameRect.setWidth(size().width() - animatedFrame.x_offset);
341 if (frameRect.maxY() > size().height())
342 frameRect.setHeight(size().height() - animatedFrame.y_offset);
343 m_frameBufferCache[i].setOriginalFrameRect(frameRect);
344 m_frameBufferCache[i].setRequiredPreviousFrameIndex(findRequiredPreviousFrame(i, !animatedFrame.has_alpha));
345 WebPDemuxReleaseIterator(&animatedFrame);
346 }
347 }
348
349 return true;
350 }
351
initFrameBuffer(size_t frameIndex)352 bool WEBPImageDecoder::initFrameBuffer(size_t frameIndex)
353 {
354 ImageFrame& buffer = m_frameBufferCache[frameIndex];
355 if (buffer.status() != ImageFrame::FrameEmpty) // Already initialized.
356 return true;
357
358 const size_t requiredPreviousFrameIndex = buffer.requiredPreviousFrameIndex();
359 if (requiredPreviousFrameIndex == kNotFound) {
360 // This frame doesn't rely on any previous data.
361 if (!buffer.setSize(size().width(), size().height()))
362 return setFailed();
363 m_frameBackgroundHasAlpha = !buffer.originalFrameRect().contains(IntRect(IntPoint(), size()));
364 } else {
365 const ImageFrame& prevBuffer = m_frameBufferCache[requiredPreviousFrameIndex];
366 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
367
368 // Preserve the last frame as the starting state for this frame.
369 if (!buffer.copyBitmapData(prevBuffer))
370 return setFailed();
371
372 if (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor) {
373 // We want to clear the previous frame to transparent, without
374 // affecting pixels in the image outside of the frame.
375 const IntRect& prevRect = prevBuffer.originalFrameRect();
376 ASSERT(!prevRect.contains(IntRect(IntPoint(), size())));
377 buffer.zeroFillFrameRect(prevRect);
378 }
379
380 m_frameBackgroundHasAlpha = prevBuffer.hasAlpha() || (prevBuffer.disposalMethod() == ImageFrame::DisposeOverwriteBgcolor);
381 }
382
383 buffer.setStatus(ImageFrame::FramePartial);
384 // The buffer is transparent outside the decoded area while the image is loading.
385 // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
386 buffer.setHasAlpha(true);
387 return true;
388 }
389
clearCacheExceptFrame(size_t clearExceptFrame)390 size_t WEBPImageDecoder::clearCacheExceptFrame(size_t clearExceptFrame)
391 {
392 // If |clearExceptFrame| has status FrameComplete, we preserve that frame.
393 // Otherwise, we preserve a previous frame with status FrameComplete whose data is required
394 // to decode |clearExceptFrame|, either in initFrameBuffer() or ApplyPostProcessing().
395 // All other frames can be cleared.
396 while ((clearExceptFrame < m_frameBufferCache.size()) && (m_frameBufferCache[clearExceptFrame].status() != ImageFrame::FrameComplete))
397 clearExceptFrame = m_frameBufferCache[clearExceptFrame].requiredPreviousFrameIndex();
398
399 return ImageDecoder::clearCacheExceptFrame(clearExceptFrame);
400 }
401
clearFrameBuffer(size_t frameIndex)402 void WEBPImageDecoder::clearFrameBuffer(size_t frameIndex)
403 {
404 if (m_demux && m_demuxState >= WEBP_DEMUX_PARSED_HEADER && m_frameBufferCache[frameIndex].status() == ImageFrame::FramePartial) {
405 // Clear the decoder state so that this partial frame can be decoded again when requested.
406 clearDecoder();
407 }
408 ImageDecoder::clearFrameBuffer(frameIndex);
409 }
410
411 #if USE(QCMSLIB)
412
createColorTransform(const char * data,size_t size)413 void WEBPImageDecoder::createColorTransform(const char* data, size_t size)
414 {
415 if (m_transform)
416 qcms_transform_release(m_transform);
417 m_transform = 0;
418
419 qcms_profile* deviceProfile = ImageDecoder::qcmsOutputDeviceProfile();
420 if (!deviceProfile)
421 return;
422 qcms_profile* inputProfile = qcms_profile_from_memory(data, size);
423 if (!inputProfile)
424 return;
425
426 // We currently only support color profiles for RGB profiled images.
427 ASSERT(icSigRgbData == qcms_profile_get_color_space(inputProfile));
428 // The input image pixels are RGBA format.
429 qcms_data_type format = QCMS_DATA_RGBA_8;
430 // FIXME: Don't force perceptual intent if the image profile contains an intent.
431 m_transform = qcms_transform_create(inputProfile, format, deviceProfile, QCMS_DATA_RGBA_8, QCMS_INTENT_PERCEPTUAL);
432
433 qcms_profile_release(inputProfile);
434 }
435
readColorProfile()436 void WEBPImageDecoder::readColorProfile()
437 {
438 WebPChunkIterator chunkIterator;
439 if (!WebPDemuxGetChunk(m_demux, "ICCP", 1, &chunkIterator)) {
440 WebPDemuxReleaseChunkIterator(&chunkIterator);
441 return;
442 }
443
444 const char* profileData = reinterpret_cast<const char*>(chunkIterator.chunk.bytes);
445 size_t profileSize = chunkIterator.chunk.size;
446
447 // Only accept RGB color profiles from input class devices.
448 bool ignoreProfile = false;
449 if (profileSize < ImageDecoder::iccColorProfileHeaderLength)
450 ignoreProfile = true;
451 else if (!ImageDecoder::rgbColorProfile(profileData, profileSize))
452 ignoreProfile = true;
453 else if (!ImageDecoder::inputDeviceColorProfile(profileData, profileSize))
454 ignoreProfile = true;
455
456 if (!ignoreProfile)
457 createColorTransform(profileData, profileSize);
458
459 WebPDemuxReleaseChunkIterator(&chunkIterator);
460 }
461
462 #endif // USE(QCMSLIB)
463
applyPostProcessing(size_t frameIndex)464 void WEBPImageDecoder::applyPostProcessing(size_t frameIndex)
465 {
466 ImageFrame& buffer = m_frameBufferCache[frameIndex];
467 int width;
468 int decodedHeight;
469 if (!WebPIDecGetRGB(m_decoder, &decodedHeight, &width, 0, 0))
470 return; // See also https://bugs.webkit.org/show_bug.cgi?id=74062
471 if (decodedHeight <= 0)
472 return;
473
474 const IntRect& frameRect = buffer.originalFrameRect();
475 ASSERT_WITH_SECURITY_IMPLICATION(width == frameRect.width());
476 ASSERT_WITH_SECURITY_IMPLICATION(decodedHeight <= frameRect.height());
477 const int left = frameRect.x();
478 const int top = frameRect.y();
479
480 #if USE(QCMSLIB)
481 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile()) {
482 if (!m_haveReadProfile) {
483 readColorProfile();
484 m_haveReadProfile = true;
485 }
486 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
487 const int canvasY = top + y;
488 uint8_t* row = reinterpret_cast<uint8_t*>(buffer.getAddr(left, canvasY));
489 if (qcms_transform* transform = colorTransform())
490 qcms_transform_data_type(transform, row, row, width, QCMS_OUTPUT_RGBX);
491 uint8_t* pixel = row;
492 for (int x = 0; x < width; ++x, pixel += 4) {
493 const int canvasX = left + x;
494 buffer.setRGBA(canvasX, canvasY, pixel[0], pixel[1], pixel[2], pixel[3]);
495 }
496 }
497 }
498 #endif // USE(QCMSLIB)
499
500 // During the decoding of current frame, we may have set some pixels to be transparent (i.e. alpha < 255).
501 // However, the value of each of these pixels should have been determined by blending it against the value
502 // of that pixel in the previous frame if alpha blend source was 'BlendAtopPreviousFrame'. So, we correct these
503 // pixels based on disposal method of the previous frame and the previous frame buffer.
504 // FIXME: This could be avoided if libwebp decoder had an API that used the previous required frame
505 // to do the alpha-blending by itself.
506 if ((m_formatFlags & ANIMATION_FLAG) && frameIndex && buffer.alphaBlendSource() == ImageFrame::BlendAtopPreviousFrame && buffer.requiredPreviousFrameIndex() != kNotFound) {
507 ImageFrame& prevBuffer = m_frameBufferCache[frameIndex - 1];
508 ASSERT(prevBuffer.status() == ImageFrame::FrameComplete);
509 ImageFrame::DisposalMethod prevDisposalMethod = prevBuffer.disposalMethod();
510 if (prevDisposalMethod == ImageFrame::DisposeKeep) { // Blend transparent pixels with pixels in previous canvas.
511 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
512 m_blendFunction(buffer, prevBuffer, top + y, left, width);
513 }
514 } else if (prevDisposalMethod == ImageFrame::DisposeOverwriteBgcolor) {
515 const IntRect& prevRect = prevBuffer.originalFrameRect();
516 // We need to blend a transparent pixel with its value just after initFrame() call. That is:
517 // * Blend with fully transparent pixel if it belongs to prevRect <-- This is a no-op.
518 // * Blend with the pixel in the previous canvas otherwise <-- Needs alpha-blending.
519 for (int y = m_decodedHeight; y < decodedHeight; ++y) {
520 int canvasY = top + y;
521 int left1, width1, left2, width2;
522 findBlendRangeAtRow(frameRect, prevRect, canvasY, left1, width1, left2, width2);
523 if (width1 > 0)
524 m_blendFunction(buffer, prevBuffer, canvasY, left1, width1);
525 if (width2 > 0)
526 m_blendFunction(buffer, prevBuffer, canvasY, left2, width2);
527 }
528 }
529 }
530
531 m_decodedHeight = decodedHeight;
532 buffer.setPixelsChanged(true);
533 }
534
decode(const uint8_t * dataBytes,size_t dataSize,bool onlySize,size_t frameIndex)535 bool WEBPImageDecoder::decode(const uint8_t* dataBytes, size_t dataSize, bool onlySize, size_t frameIndex)
536 {
537 if (failed())
538 return false;
539
540 if (!ImageDecoder::isSizeAvailable()) {
541 static const size_t imageHeaderSize = 30;
542 if (dataSize < imageHeaderSize)
543 return false;
544 int width, height;
545 WebPBitstreamFeatures features;
546 if (WebPGetFeatures(dataBytes, dataSize, &features) != VP8_STATUS_OK)
547 return setFailed();
548 width = features.width;
549 height = features.height;
550 m_formatFlags = features.has_alpha ? ALPHA_FLAG : 0;
551 if (!setSize(width, height))
552 return setFailed();
553 }
554
555 ASSERT(ImageDecoder::isSizeAvailable());
556 if (onlySize)
557 return true;
558
559 ASSERT(m_frameBufferCache.size() > frameIndex);
560 ImageFrame& buffer = m_frameBufferCache[frameIndex];
561 ASSERT(buffer.status() != ImageFrame::FrameComplete);
562
563 if (buffer.status() == ImageFrame::FrameEmpty) {
564 if (!buffer.setSize(size().width(), size().height()))
565 return setFailed();
566 buffer.setStatus(ImageFrame::FramePartial);
567 // The buffer is transparent outside the decoded area while the image is loading.
568 // The correct value of 'hasAlpha' for the frame will be set when it is fully decoded.
569 buffer.setHasAlpha(true);
570 buffer.setOriginalFrameRect(IntRect(IntPoint(), size()));
571 }
572
573 const IntRect& frameRect = buffer.originalFrameRect();
574 if (!m_decoder) {
575 WEBP_CSP_MODE mode = outputMode(m_formatFlags & ALPHA_FLAG);
576 if (!m_premultiplyAlpha)
577 mode = outputMode(false);
578 #if USE(QCMSLIB)
579 if ((m_formatFlags & ICCP_FLAG) && !ignoresGammaAndColorProfile())
580 mode = MODE_RGBA; // Decode to RGBA for input to libqcms.
581 #endif
582 WebPInitDecBuffer(&m_decoderBuffer);
583 m_decoderBuffer.colorspace = mode;
584 m_decoderBuffer.u.RGBA.stride = size().width() * sizeof(ImageFrame::PixelData);
585 m_decoderBuffer.u.RGBA.size = m_decoderBuffer.u.RGBA.stride * frameRect.height();
586 m_decoderBuffer.is_external_memory = 1;
587 m_decoder = WebPINewDecoder(&m_decoderBuffer);
588 if (!m_decoder)
589 return setFailed();
590 }
591
592 m_decoderBuffer.u.RGBA.rgba = reinterpret_cast<uint8_t*>(buffer.getAddr(frameRect.x(), frameRect.y()));
593
594 switch (WebPIUpdate(m_decoder, dataBytes, dataSize)) {
595 case VP8_STATUS_OK:
596 applyPostProcessing(frameIndex);
597 buffer.setHasAlpha((m_formatFlags & ALPHA_FLAG) || m_frameBackgroundHasAlpha);
598 buffer.setStatus(ImageFrame::FrameComplete);
599 clearDecoder();
600 return true;
601 case VP8_STATUS_SUSPENDED:
602 if (!isAllDataReceived() && !frameIsCompleteAtIndex(frameIndex)) {
603 applyPostProcessing(frameIndex);
604 return false;
605 }
606 // FALLTHROUGH
607 default:
608 clear();
609 return setFailed();
610 }
611 }
612
613 } // namespace WebCore
614