1 // Copyright 2015 Google Inc. All Rights Reserved.
2 //
3 // Use of this source code is governed by a BSD-style license
4 // that can be found in the COPYING file in the root of the source
5 // tree. An additional intellectual property rights grant can be found
6 // in the file PATENTS. All contributing project authors may
7 // be found in the AUTHORS file in the root of the source tree.
8 // -----------------------------------------------------------------------------
9 //
10 // AnimDecoder implementation.
11 //
12
13 #ifdef HAVE_CONFIG_H
14 #include "src/webp/config.h"
15 #endif
16
17 #include <assert.h>
18 #include <string.h>
19
20 #include "src/utils/utils.h"
21 #include "src/webp/decode.h"
22 #include "src/webp/demux.h"
23
24 #define NUM_CHANNELS 4
25
26 typedef void (*BlendRowFunc)(uint32_t* const, const uint32_t* const, int);
27 static void BlendPixelRowNonPremult(uint32_t* const src,
28 const uint32_t* const dst, int num_pixels);
29 static void BlendPixelRowPremult(uint32_t* const src, const uint32_t* const dst,
30 int num_pixels);
31
32 struct WebPAnimDecoder {
33 WebPDemuxer* demux_; // Demuxer created from given WebP bitstream.
34 WebPDecoderConfig config_; // Decoder config.
35 // Note: we use a pointer to a function blending multiple pixels at a time to
36 // allow possible inlining of per-pixel blending function.
37 BlendRowFunc blend_func_; // Pointer to the chose blend row function.
38 WebPAnimInfo info_; // Global info about the animation.
39 uint8_t* curr_frame_; // Current canvas (not disposed).
40 uint8_t* prev_frame_disposed_; // Previous canvas (properly disposed).
41 int prev_frame_timestamp_; // Previous frame timestamp (milliseconds).
42 WebPIterator prev_iter_; // Iterator object for previous frame.
43 int prev_frame_was_keyframe_; // True if previous frame was a keyframe.
44 int next_frame_; // Index of the next frame to be decoded
45 // (starting from 1).
46 };
47
DefaultDecoderOptions(WebPAnimDecoderOptions * const dec_options)48 static void DefaultDecoderOptions(WebPAnimDecoderOptions* const dec_options) {
49 dec_options->color_mode = MODE_RGBA;
50 dec_options->use_threads = 0;
51 }
52
WebPAnimDecoderOptionsInitInternal(WebPAnimDecoderOptions * dec_options,int abi_version)53 int WebPAnimDecoderOptionsInitInternal(WebPAnimDecoderOptions* dec_options,
54 int abi_version) {
55 if (dec_options == NULL ||
56 WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_DEMUX_ABI_VERSION)) {
57 return 0;
58 }
59 DefaultDecoderOptions(dec_options);
60 return 1;
61 }
62
ApplyDecoderOptions(const WebPAnimDecoderOptions * const dec_options,WebPAnimDecoder * const dec)63 static int ApplyDecoderOptions(const WebPAnimDecoderOptions* const dec_options,
64 WebPAnimDecoder* const dec) {
65 WEBP_CSP_MODE mode;
66 WebPDecoderConfig* config = &dec->config_;
67 assert(dec_options != NULL);
68
69 mode = dec_options->color_mode;
70 if (mode != MODE_RGBA && mode != MODE_BGRA &&
71 mode != MODE_rgbA && mode != MODE_bgrA) {
72 return 0;
73 }
74 dec->blend_func_ = (mode == MODE_RGBA || mode == MODE_BGRA)
75 ? &BlendPixelRowNonPremult
76 : &BlendPixelRowPremult;
77 WebPInitDecoderConfig(config);
78 config->output.colorspace = mode;
79 config->output.is_external_memory = 1;
80 config->options.use_threads = dec_options->use_threads;
81 // Note: config->output.u.RGBA is set at the time of decoding each frame.
82 return 1;
83 }
84
WebPAnimDecoderNewInternal(const WebPData * webp_data,const WebPAnimDecoderOptions * dec_options,int abi_version)85 WebPAnimDecoder* WebPAnimDecoderNewInternal(
86 const WebPData* webp_data, const WebPAnimDecoderOptions* dec_options,
87 int abi_version) {
88 WebPAnimDecoderOptions options;
89 WebPAnimDecoder* dec = NULL;
90 WebPBitstreamFeatures features;
91 if (webp_data == NULL ||
92 WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_DEMUX_ABI_VERSION)) {
93 return NULL;
94 }
95
96 // Validate the bitstream before doing expensive allocations. The demuxer may
97 // be more tolerant than the decoder.
98 if (WebPGetFeatures(webp_data->bytes, webp_data->size, &features) !=
99 VP8_STATUS_OK) {
100 return NULL;
101 }
102
103 // Note: calloc() so that the pointer members are initialized to NULL.
104 dec = (WebPAnimDecoder*)WebPSafeCalloc(1ULL, sizeof(*dec));
105 if (dec == NULL) goto Error;
106
107 if (dec_options != NULL) {
108 options = *dec_options;
109 } else {
110 DefaultDecoderOptions(&options);
111 }
112 if (!ApplyDecoderOptions(&options, dec)) goto Error;
113
114 dec->demux_ = WebPDemux(webp_data);
115 if (dec->demux_ == NULL) goto Error;
116
117 dec->info_.canvas_width = WebPDemuxGetI(dec->demux_, WEBP_FF_CANVAS_WIDTH);
118 dec->info_.canvas_height = WebPDemuxGetI(dec->demux_, WEBP_FF_CANVAS_HEIGHT);
119 dec->info_.loop_count = WebPDemuxGetI(dec->demux_, WEBP_FF_LOOP_COUNT);
120 dec->info_.bgcolor = WebPDemuxGetI(dec->demux_, WEBP_FF_BACKGROUND_COLOR);
121 dec->info_.frame_count = WebPDemuxGetI(dec->demux_, WEBP_FF_FRAME_COUNT);
122
123 // Note: calloc() because we fill frame with zeroes as well.
124 dec->curr_frame_ = (uint8_t*)WebPSafeCalloc(
125 dec->info_.canvas_width * NUM_CHANNELS, dec->info_.canvas_height);
126 if (dec->curr_frame_ == NULL) goto Error;
127 dec->prev_frame_disposed_ = (uint8_t*)WebPSafeCalloc(
128 dec->info_.canvas_width * NUM_CHANNELS, dec->info_.canvas_height);
129 if (dec->prev_frame_disposed_ == NULL) goto Error;
130
131 WebPAnimDecoderReset(dec);
132 return dec;
133
134 Error:
135 WebPAnimDecoderDelete(dec);
136 return NULL;
137 }
138
WebPAnimDecoderGetInfo(const WebPAnimDecoder * dec,WebPAnimInfo * info)139 int WebPAnimDecoderGetInfo(const WebPAnimDecoder* dec, WebPAnimInfo* info) {
140 if (dec == NULL || info == NULL) return 0;
141 *info = dec->info_;
142 return 1;
143 }
144
145 // Returns true if the frame covers the full canvas.
IsFullFrame(int width,int height,int canvas_width,int canvas_height)146 static int IsFullFrame(int width, int height, int canvas_width,
147 int canvas_height) {
148 return (width == canvas_width && height == canvas_height);
149 }
150
151 // Clear the canvas to transparent.
ZeroFillCanvas(uint8_t * buf,uint32_t canvas_width,uint32_t canvas_height)152 static int ZeroFillCanvas(uint8_t* buf, uint32_t canvas_width,
153 uint32_t canvas_height) {
154 const uint64_t size =
155 (uint64_t)canvas_width * canvas_height * NUM_CHANNELS * sizeof(*buf);
156 if (!CheckSizeOverflow(size)) return 0;
157 memset(buf, 0, (size_t)size);
158 return 1;
159 }
160
161 // Clear given frame rectangle to transparent.
ZeroFillFrameRect(uint8_t * buf,int buf_stride,int x_offset,int y_offset,int width,int height)162 static void ZeroFillFrameRect(uint8_t* buf, int buf_stride, int x_offset,
163 int y_offset, int width, int height) {
164 int j;
165 assert(width * NUM_CHANNELS <= buf_stride);
166 buf += y_offset * buf_stride + x_offset * NUM_CHANNELS;
167 for (j = 0; j < height; ++j) {
168 memset(buf, 0, width * NUM_CHANNELS);
169 buf += buf_stride;
170 }
171 }
172
173 // Copy width * height pixels from 'src' to 'dst'.
CopyCanvas(const uint8_t * src,uint8_t * dst,uint32_t width,uint32_t height)174 static int CopyCanvas(const uint8_t* src, uint8_t* dst,
175 uint32_t width, uint32_t height) {
176 const uint64_t size = (uint64_t)width * height * NUM_CHANNELS;
177 if (!CheckSizeOverflow(size)) return 0;
178 assert(src != NULL && dst != NULL);
179 memcpy(dst, src, (size_t)size);
180 return 1;
181 }
182
183 // Returns true if the current frame is a key-frame.
IsKeyFrame(const WebPIterator * const curr,const WebPIterator * const prev,int prev_frame_was_key_frame,int canvas_width,int canvas_height)184 static int IsKeyFrame(const WebPIterator* const curr,
185 const WebPIterator* const prev,
186 int prev_frame_was_key_frame,
187 int canvas_width, int canvas_height) {
188 if (curr->frame_num == 1) {
189 return 1;
190 } else if ((!curr->has_alpha || curr->blend_method == WEBP_MUX_NO_BLEND) &&
191 IsFullFrame(curr->width, curr->height,
192 canvas_width, canvas_height)) {
193 return 1;
194 } else {
195 return (prev->dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) &&
196 (IsFullFrame(prev->width, prev->height, canvas_width,
197 canvas_height) ||
198 prev_frame_was_key_frame);
199 }
200 }
201
202
203 // Blend a single channel of 'src' over 'dst', given their alpha channel values.
204 // 'src' and 'dst' are assumed to be NOT pre-multiplied by alpha.
BlendChannelNonPremult(uint32_t src,uint8_t src_a,uint32_t dst,uint8_t dst_a,uint32_t scale,int shift)205 static uint8_t BlendChannelNonPremult(uint32_t src, uint8_t src_a,
206 uint32_t dst, uint8_t dst_a,
207 uint32_t scale, int shift) {
208 const uint8_t src_channel = (src >> shift) & 0xff;
209 const uint8_t dst_channel = (dst >> shift) & 0xff;
210 const uint32_t blend_unscaled = src_channel * src_a + dst_channel * dst_a;
211 assert(blend_unscaled < (1ULL << 32) / scale);
212 return (blend_unscaled * scale) >> 24;
213 }
214
215 // Blend 'src' over 'dst' assuming they are NOT pre-multiplied by alpha.
BlendPixelNonPremult(uint32_t src,uint32_t dst)216 static uint32_t BlendPixelNonPremult(uint32_t src, uint32_t dst) {
217 const uint8_t src_a = (src >> 24) & 0xff;
218
219 if (src_a == 0) {
220 return dst;
221 } else {
222 const uint8_t dst_a = (dst >> 24) & 0xff;
223 // This is the approximate integer arithmetic for the actual formula:
224 // dst_factor_a = (dst_a * (255 - src_a)) / 255.
225 const uint8_t dst_factor_a = (dst_a * (256 - src_a)) >> 8;
226 const uint8_t blend_a = src_a + dst_factor_a;
227 const uint32_t scale = (1UL << 24) / blend_a;
228
229 const uint8_t blend_r =
230 BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 0);
231 const uint8_t blend_g =
232 BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 8);
233 const uint8_t blend_b =
234 BlendChannelNonPremult(src, src_a, dst, dst_factor_a, scale, 16);
235 assert(src_a + dst_factor_a < 256);
236
237 return (blend_r << 0) |
238 (blend_g << 8) |
239 (blend_b << 16) |
240 ((uint32_t)blend_a << 24);
241 }
242 }
243
244 // Blend 'num_pixels' in 'src' over 'dst' assuming they are NOT pre-multiplied
245 // by alpha.
BlendPixelRowNonPremult(uint32_t * const src,const uint32_t * const dst,int num_pixels)246 static void BlendPixelRowNonPremult(uint32_t* const src,
247 const uint32_t* const dst, int num_pixels) {
248 int i;
249 for (i = 0; i < num_pixels; ++i) {
250 const uint8_t src_alpha = (src[i] >> 24) & 0xff;
251 if (src_alpha != 0xff) {
252 src[i] = BlendPixelNonPremult(src[i], dst[i]);
253 }
254 }
255 }
256
257 // Individually multiply each channel in 'pix' by 'scale'.
ChannelwiseMultiply(uint32_t pix,uint32_t scale)258 static WEBP_INLINE uint32_t ChannelwiseMultiply(uint32_t pix, uint32_t scale) {
259 uint32_t mask = 0x00FF00FF;
260 uint32_t rb = ((pix & mask) * scale) >> 8;
261 uint32_t ag = ((pix >> 8) & mask) * scale;
262 return (rb & mask) | (ag & ~mask);
263 }
264
265 // Blend 'src' over 'dst' assuming they are pre-multiplied by alpha.
BlendPixelPremult(uint32_t src,uint32_t dst)266 static uint32_t BlendPixelPremult(uint32_t src, uint32_t dst) {
267 const uint8_t src_a = (src >> 24) & 0xff;
268 return src + ChannelwiseMultiply(dst, 256 - src_a);
269 }
270
271 // Blend 'num_pixels' in 'src' over 'dst' assuming they are pre-multiplied by
272 // alpha.
BlendPixelRowPremult(uint32_t * const src,const uint32_t * const dst,int num_pixels)273 static void BlendPixelRowPremult(uint32_t* const src, const uint32_t* const dst,
274 int num_pixels) {
275 int i;
276 for (i = 0; i < num_pixels; ++i) {
277 const uint8_t src_alpha = (src[i] >> 24) & 0xff;
278 if (src_alpha != 0xff) {
279 src[i] = BlendPixelPremult(src[i], dst[i]);
280 }
281 }
282 }
283
284 // Returns two ranges (<left, width> pairs) at row 'canvas_y', that belong to
285 // 'src' but not 'dst'. A point range is empty if the corresponding width is 0.
FindBlendRangeAtRow(const WebPIterator * const src,const WebPIterator * const dst,int canvas_y,int * const left1,int * const width1,int * const left2,int * const width2)286 static void FindBlendRangeAtRow(const WebPIterator* const src,
287 const WebPIterator* const dst, int canvas_y,
288 int* const left1, int* const width1,
289 int* const left2, int* const width2) {
290 const int src_max_x = src->x_offset + src->width;
291 const int dst_max_x = dst->x_offset + dst->width;
292 const int dst_max_y = dst->y_offset + dst->height;
293 assert(canvas_y >= src->y_offset && canvas_y < (src->y_offset + src->height));
294 *left1 = -1;
295 *width1 = 0;
296 *left2 = -1;
297 *width2 = 0;
298
299 if (canvas_y < dst->y_offset || canvas_y >= dst_max_y ||
300 src->x_offset >= dst_max_x || src_max_x <= dst->x_offset) {
301 *left1 = src->x_offset;
302 *width1 = src->width;
303 return;
304 }
305
306 if (src->x_offset < dst->x_offset) {
307 *left1 = src->x_offset;
308 *width1 = dst->x_offset - src->x_offset;
309 }
310
311 if (src_max_x > dst_max_x) {
312 *left2 = dst_max_x;
313 *width2 = src_max_x - dst_max_x;
314 }
315 }
316
WebPAnimDecoderGetNext(WebPAnimDecoder * dec,uint8_t ** buf_ptr,int * timestamp_ptr)317 int WebPAnimDecoderGetNext(WebPAnimDecoder* dec,
318 uint8_t** buf_ptr, int* timestamp_ptr) {
319 WebPIterator iter;
320 uint32_t width;
321 uint32_t height;
322 int is_key_frame;
323 int timestamp;
324 BlendRowFunc blend_row;
325
326 if (dec == NULL || buf_ptr == NULL || timestamp_ptr == NULL) return 0;
327 if (!WebPAnimDecoderHasMoreFrames(dec)) return 0;
328
329 width = dec->info_.canvas_width;
330 height = dec->info_.canvas_height;
331 blend_row = dec->blend_func_;
332
333 // Get compressed frame.
334 if (!WebPDemuxGetFrame(dec->demux_, dec->next_frame_, &iter)) {
335 return 0;
336 }
337 timestamp = dec->prev_frame_timestamp_ + iter.duration;
338
339 // Initialize.
340 is_key_frame = IsKeyFrame(&iter, &dec->prev_iter_,
341 dec->prev_frame_was_keyframe_, width, height);
342 if (is_key_frame) {
343 if (!ZeroFillCanvas(dec->curr_frame_, width, height)) {
344 goto Error;
345 }
346 } else {
347 if (!CopyCanvas(dec->prev_frame_disposed_, dec->curr_frame_,
348 width, height)) {
349 goto Error;
350 }
351 }
352
353 // Decode.
354 {
355 const uint8_t* in = iter.fragment.bytes;
356 const size_t in_size = iter.fragment.size;
357 const uint32_t stride = width * NUM_CHANNELS; // at most 25 + 2 bits
358 const uint64_t out_offset = (uint64_t)iter.y_offset * stride +
359 (uint64_t)iter.x_offset * NUM_CHANNELS; // 53b
360 const uint64_t size = (uint64_t)iter.height * stride; // at most 25 + 27b
361 WebPDecoderConfig* const config = &dec->config_;
362 WebPRGBABuffer* const buf = &config->output.u.RGBA;
363 if ((size_t)size != size) goto Error;
364 buf->stride = (int)stride;
365 buf->size = (size_t)size;
366 buf->rgba = dec->curr_frame_ + out_offset;
367
368 if (WebPDecode(in, in_size, config) != VP8_STATUS_OK) {
369 goto Error;
370 }
371 }
372
373 // During the decoding of current frame, we may have set some pixels to be
374 // transparent (i.e. alpha < 255). However, the value of each of these
375 // pixels should have been determined by blending it against the value of
376 // that pixel in the previous frame if blending method of is WEBP_MUX_BLEND.
377 if (iter.frame_num > 1 && iter.blend_method == WEBP_MUX_BLEND &&
378 !is_key_frame) {
379 if (dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_NONE) {
380 int y;
381 // Blend transparent pixels with pixels in previous canvas.
382 for (y = 0; y < iter.height; ++y) {
383 const size_t offset =
384 (iter.y_offset + y) * width + iter.x_offset;
385 blend_row((uint32_t*)dec->curr_frame_ + offset,
386 (uint32_t*)dec->prev_frame_disposed_ + offset, iter.width);
387 }
388 } else {
389 int y;
390 assert(dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND);
391 // We need to blend a transparent pixel with its value just after
392 // initialization. That is, blend it with:
393 // * Fully transparent pixel if it belongs to prevRect <-- No-op.
394 // * The pixel in the previous canvas otherwise <-- Need alpha-blending.
395 for (y = 0; y < iter.height; ++y) {
396 const int canvas_y = iter.y_offset + y;
397 int left1, width1, left2, width2;
398 FindBlendRangeAtRow(&iter, &dec->prev_iter_, canvas_y, &left1, &width1,
399 &left2, &width2);
400 if (width1 > 0) {
401 const size_t offset1 = canvas_y * width + left1;
402 blend_row((uint32_t*)dec->curr_frame_ + offset1,
403 (uint32_t*)dec->prev_frame_disposed_ + offset1, width1);
404 }
405 if (width2 > 0) {
406 const size_t offset2 = canvas_y * width + left2;
407 blend_row((uint32_t*)dec->curr_frame_ + offset2,
408 (uint32_t*)dec->prev_frame_disposed_ + offset2, width2);
409 }
410 }
411 }
412 }
413
414 // Update info of the previous frame and dispose it for the next iteration.
415 dec->prev_frame_timestamp_ = timestamp;
416 WebPDemuxReleaseIterator(&dec->prev_iter_);
417 dec->prev_iter_ = iter;
418 dec->prev_frame_was_keyframe_ = is_key_frame;
419 CopyCanvas(dec->curr_frame_, dec->prev_frame_disposed_, width, height);
420 if (dec->prev_iter_.dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
421 ZeroFillFrameRect(dec->prev_frame_disposed_, width * NUM_CHANNELS,
422 dec->prev_iter_.x_offset, dec->prev_iter_.y_offset,
423 dec->prev_iter_.width, dec->prev_iter_.height);
424 }
425 ++dec->next_frame_;
426
427 // All OK, fill in the values.
428 *buf_ptr = dec->curr_frame_;
429 *timestamp_ptr = timestamp;
430 return 1;
431
432 Error:
433 WebPDemuxReleaseIterator(&iter);
434 return 0;
435 }
436
WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder * dec)437 int WebPAnimDecoderHasMoreFrames(const WebPAnimDecoder* dec) {
438 if (dec == NULL) return 0;
439 return (dec->next_frame_ <= (int)dec->info_.frame_count);
440 }
441
WebPAnimDecoderReset(WebPAnimDecoder * dec)442 void WebPAnimDecoderReset(WebPAnimDecoder* dec) {
443 if (dec != NULL) {
444 dec->prev_frame_timestamp_ = 0;
445 WebPDemuxReleaseIterator(&dec->prev_iter_);
446 memset(&dec->prev_iter_, 0, sizeof(dec->prev_iter_));
447 dec->prev_frame_was_keyframe_ = 0;
448 dec->next_frame_ = 1;
449 }
450 }
451
WebPAnimDecoderGetDemuxer(const WebPAnimDecoder * dec)452 const WebPDemuxer* WebPAnimDecoderGetDemuxer(const WebPAnimDecoder* dec) {
453 if (dec == NULL) return NULL;
454 return dec->demux_;
455 }
456
WebPAnimDecoderDelete(WebPAnimDecoder * dec)457 void WebPAnimDecoderDelete(WebPAnimDecoder* dec) {
458 if (dec != NULL) {
459 WebPDemuxReleaseIterator(&dec->prev_iter_);
460 WebPDemuxDelete(dec->demux_);
461 WebPSafeFree(dec->curr_frame_);
462 WebPSafeFree(dec->prev_frame_disposed_);
463 WebPSafeFree(dec);
464 }
465 }
466