1 // Copyright 2014 Google Inc. All Rights Reserved.
2 //
3 // Use of this source code is governed by a BSD-style license
4 // that can be found in the COPYING file in the root of the source
5 // tree. An additional intellectual property rights grant can be found
6 // in the file PATENTS. All contributing project authors may
7 // be found in the AUTHORS file in the root of the source tree.
8 // -----------------------------------------------------------------------------
9 //
10 // AnimEncoder implementation.
11 //
12
13 #include <assert.h>
14 #include <limits.h>
15 #include <math.h> // for pow()
16 #include <stdio.h>
17 #include <stdlib.h> // for abs()
18
19 #include "src/mux/animi.h"
20 #include "src/utils/utils.h"
21 #include "src/webp/decode.h"
22 #include "src/webp/encode.h"
23 #include "src/webp/format_constants.h"
24 #include "src/webp/mux.h"
25
26 #if defined(_MSC_VER) && _MSC_VER < 1900
27 #define snprintf _snprintf
28 #endif
29
30 #define ERROR_STR_MAX_LENGTH 100
31
32 //------------------------------------------------------------------------------
33 // Internal structs.
34
35 // Stores frame rectangle dimensions.
36 typedef struct {
37 int x_offset_, y_offset_, width_, height_;
38 } FrameRectangle;
39
40 // Used to store two candidates of encoded data for an animation frame. One of
41 // the two will be chosen later.
42 typedef struct {
43 WebPMuxFrameInfo sub_frame_; // Encoded frame rectangle.
44 WebPMuxFrameInfo key_frame_; // Encoded frame if it is a key-frame.
45 int is_key_frame_; // True if 'key_frame' has been chosen.
46 } EncodedFrame;
47
48 struct WebPAnimEncoder {
49 const int canvas_width_; // Canvas width.
50 const int canvas_height_; // Canvas height.
51 const WebPAnimEncoderOptions options_; // Global encoding options.
52
53 FrameRectangle prev_rect_; // Previous WebP frame rectangle.
54 WebPConfig last_config_; // Cached in case a re-encode is needed.
55 WebPConfig last_config_reversed_; // If 'last_config_' uses lossless, then
56 // this config uses lossy and vice versa;
57 // only valid if 'options_.allow_mixed'
58 // is true.
59
60 WebPPicture* curr_canvas_; // Only pointer; we don't own memory.
61
62 // Canvas buffers.
63 WebPPicture curr_canvas_copy_; // Possibly modified current canvas.
64 int curr_canvas_copy_modified_; // True if pixels in 'curr_canvas_copy_'
65 // differ from those in 'curr_canvas_'.
66
67 WebPPicture prev_canvas_; // Previous canvas.
68 WebPPicture prev_canvas_disposed_; // Previous canvas disposed to background.
69
70 // Encoded data.
71 EncodedFrame* encoded_frames_; // Array of encoded frames.
72 size_t size_; // Number of allocated frames.
73 size_t start_; // Frame start index.
74 size_t count_; // Number of valid frames.
75 size_t flush_count_; // If >0, 'flush_count' frames starting from
76 // 'start' are ready to be added to mux.
77
78 // key-frame related.
79 int64_t best_delta_; // min(canvas size - frame size) over the frames.
80 // Can be negative in certain cases due to
81 // transparent pixels in a frame.
82 int keyframe_; // Index of selected key-frame relative to 'start_'.
83 int count_since_key_frame_; // Frames seen since the last key-frame.
84
85 int first_timestamp_; // Timestamp of the first frame.
86 int prev_timestamp_; // Timestamp of the last added frame.
87 int prev_candidate_undecided_; // True if it's not yet decided if previous
88 // frame would be a sub-frame or a key-frame.
89
90 // Misc.
91 int is_first_frame_; // True if first frame is yet to be added/being added.
92 int got_null_frame_; // True if WebPAnimEncoderAdd() has already been called
93 // with a NULL frame.
94
95 size_t in_frame_count_; // Number of input frames processed so far.
96 size_t out_frame_count_; // Number of frames added to mux so far. This may be
97 // different from 'in_frame_count_' due to merging.
98
99 WebPMux* mux_; // Muxer to assemble the WebP bitstream.
100 char error_str_[ERROR_STR_MAX_LENGTH]; // Error string. Empty if no error.
101 };
102
103 // -----------------------------------------------------------------------------
104 // Life of WebPAnimEncoder object.
105
106 #define DELTA_INFINITY (1ULL << 32)
107 #define KEYFRAME_NONE (-1)
108
109 // Reset the counters in the WebPAnimEncoder.
ResetCounters(WebPAnimEncoder * const enc)110 static void ResetCounters(WebPAnimEncoder* const enc) {
111 enc->start_ = 0;
112 enc->count_ = 0;
113 enc->flush_count_ = 0;
114 enc->best_delta_ = DELTA_INFINITY;
115 enc->keyframe_ = KEYFRAME_NONE;
116 }
117
DisableKeyframes(WebPAnimEncoderOptions * const enc_options)118 static void DisableKeyframes(WebPAnimEncoderOptions* const enc_options) {
119 enc_options->kmax = INT_MAX;
120 enc_options->kmin = enc_options->kmax - 1;
121 }
122
123 #define MAX_CACHED_FRAMES 30
124
SanitizeEncoderOptions(WebPAnimEncoderOptions * const enc_options)125 static void SanitizeEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
126 int print_warning = enc_options->verbose;
127
128 if (enc_options->minimize_size) {
129 DisableKeyframes(enc_options);
130 }
131
132 if (enc_options->kmax == 1) { // All frames will be key-frames.
133 enc_options->kmin = 0;
134 enc_options->kmax = 0;
135 return;
136 } else if (enc_options->kmax <= 0) {
137 DisableKeyframes(enc_options);
138 print_warning = 0;
139 }
140
141 if (enc_options->kmin >= enc_options->kmax) {
142 enc_options->kmin = enc_options->kmax - 1;
143 if (print_warning) {
144 fprintf(stderr, "WARNING: Setting kmin = %d, so that kmin < kmax.\n",
145 enc_options->kmin);
146 }
147 } else {
148 const int kmin_limit = enc_options->kmax / 2 + 1;
149 if (enc_options->kmin < kmin_limit && kmin_limit < enc_options->kmax) {
150 // This ensures that enc.keyframe + kmin >= kmax is always true. So, we
151 // can flush all the frames in the 'count_since_key_frame == kmax' case.
152 enc_options->kmin = kmin_limit;
153 if (print_warning) {
154 fprintf(stderr,
155 "WARNING: Setting kmin = %d, so that kmin >= kmax / 2 + 1.\n",
156 enc_options->kmin);
157 }
158 }
159 }
160 // Limit the max number of frames that are allocated.
161 if (enc_options->kmax - enc_options->kmin > MAX_CACHED_FRAMES) {
162 enc_options->kmin = enc_options->kmax - MAX_CACHED_FRAMES;
163 if (print_warning) {
164 fprintf(stderr,
165 "WARNING: Setting kmin = %d, so that kmax - kmin <= %d.\n",
166 enc_options->kmin, MAX_CACHED_FRAMES);
167 }
168 }
169 assert(enc_options->kmin < enc_options->kmax);
170 }
171
172 #undef MAX_CACHED_FRAMES
173
DefaultEncoderOptions(WebPAnimEncoderOptions * const enc_options)174 static void DefaultEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
175 enc_options->anim_params.loop_count = 0;
176 enc_options->anim_params.bgcolor = 0xffffffff; // White.
177 enc_options->minimize_size = 0;
178 DisableKeyframes(enc_options);
179 enc_options->allow_mixed = 0;
180 enc_options->verbose = 0;
181 }
182
WebPAnimEncoderOptionsInitInternal(WebPAnimEncoderOptions * enc_options,int abi_version)183 int WebPAnimEncoderOptionsInitInternal(WebPAnimEncoderOptions* enc_options,
184 int abi_version) {
185 if (enc_options == NULL ||
186 WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
187 return 0;
188 }
189 DefaultEncoderOptions(enc_options);
190 return 1;
191 }
192
193 // This starting value is more fit to WebPCleanupTransparentAreaLossless().
194 #define TRANSPARENT_COLOR 0x00000000
195
ClearRectangle(WebPPicture * const picture,int left,int top,int width,int height)196 static void ClearRectangle(WebPPicture* const picture,
197 int left, int top, int width, int height) {
198 int j;
199 for (j = top; j < top + height; ++j) {
200 uint32_t* const dst = picture->argb + j * picture->argb_stride;
201 int i;
202 for (i = left; i < left + width; ++i) {
203 dst[i] = TRANSPARENT_COLOR;
204 }
205 }
206 }
207
WebPUtilClearPic(WebPPicture * const picture,const FrameRectangle * const rect)208 static void WebPUtilClearPic(WebPPicture* const picture,
209 const FrameRectangle* const rect) {
210 if (rect != NULL) {
211 ClearRectangle(picture, rect->x_offset_, rect->y_offset_,
212 rect->width_, rect->height_);
213 } else {
214 ClearRectangle(picture, 0, 0, picture->width, picture->height);
215 }
216 }
217
MarkNoError(WebPAnimEncoder * const enc)218 static void MarkNoError(WebPAnimEncoder* const enc) {
219 enc->error_str_[0] = '\0'; // Empty string.
220 }
221
MarkError(WebPAnimEncoder * const enc,const char * str)222 static void MarkError(WebPAnimEncoder* const enc, const char* str) {
223 if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s.", str) < 0) {
224 assert(0); // FIX ME!
225 }
226 }
227
MarkError2(WebPAnimEncoder * const enc,const char * str,int error_code)228 static void MarkError2(WebPAnimEncoder* const enc,
229 const char* str, int error_code) {
230 if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s: %d.", str,
231 error_code) < 0) {
232 assert(0); // FIX ME!
233 }
234 }
235
WebPAnimEncoderNewInternal(int width,int height,const WebPAnimEncoderOptions * enc_options,int abi_version)236 WebPAnimEncoder* WebPAnimEncoderNewInternal(
237 int width, int height, const WebPAnimEncoderOptions* enc_options,
238 int abi_version) {
239 WebPAnimEncoder* enc;
240
241 if (WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
242 return NULL;
243 }
244 if (width <= 0 || height <= 0 ||
245 (width * (uint64_t)height) >= MAX_IMAGE_AREA) {
246 return NULL;
247 }
248
249 enc = (WebPAnimEncoder*)WebPSafeCalloc(1, sizeof(*enc));
250 if (enc == NULL) return NULL;
251 MarkNoError(enc);
252
253 // Dimensions and options.
254 *(int*)&enc->canvas_width_ = width;
255 *(int*)&enc->canvas_height_ = height;
256 if (enc_options != NULL) {
257 *(WebPAnimEncoderOptions*)&enc->options_ = *enc_options;
258 SanitizeEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
259 } else {
260 DefaultEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
261 }
262
263 // Canvas buffers.
264 if (!WebPPictureInit(&enc->curr_canvas_copy_) ||
265 !WebPPictureInit(&enc->prev_canvas_) ||
266 !WebPPictureInit(&enc->prev_canvas_disposed_)) {
267 goto Err;
268 }
269 enc->curr_canvas_copy_.width = width;
270 enc->curr_canvas_copy_.height = height;
271 enc->curr_canvas_copy_.use_argb = 1;
272 if (!WebPPictureAlloc(&enc->curr_canvas_copy_) ||
273 !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_) ||
274 !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_disposed_)) {
275 goto Err;
276 }
277 WebPUtilClearPic(&enc->prev_canvas_, NULL);
278 enc->curr_canvas_copy_modified_ = 1;
279
280 // Encoded frames.
281 ResetCounters(enc);
282 // Note: one extra storage is for the previous frame.
283 enc->size_ = enc->options_.kmax - enc->options_.kmin + 1;
284 // We need space for at least 2 frames. But when kmin, kmax are both zero,
285 // enc->size_ will be 1. So we handle that special case below.
286 if (enc->size_ < 2) enc->size_ = 2;
287 enc->encoded_frames_ =
288 (EncodedFrame*)WebPSafeCalloc(enc->size_, sizeof(*enc->encoded_frames_));
289 if (enc->encoded_frames_ == NULL) goto Err;
290
291 enc->mux_ = WebPMuxNew();
292 if (enc->mux_ == NULL) goto Err;
293
294 enc->count_since_key_frame_ = 0;
295 enc->first_timestamp_ = 0;
296 enc->prev_timestamp_ = 0;
297 enc->prev_candidate_undecided_ = 0;
298 enc->is_first_frame_ = 1;
299 enc->got_null_frame_ = 0;
300
301 return enc; // All OK.
302
303 Err:
304 WebPAnimEncoderDelete(enc);
305 return NULL;
306 }
307
308 // Release the data contained by 'encoded_frame'.
FrameRelease(EncodedFrame * const encoded_frame)309 static void FrameRelease(EncodedFrame* const encoded_frame) {
310 if (encoded_frame != NULL) {
311 WebPDataClear(&encoded_frame->sub_frame_.bitstream);
312 WebPDataClear(&encoded_frame->key_frame_.bitstream);
313 memset(encoded_frame, 0, sizeof(*encoded_frame));
314 }
315 }
316
WebPAnimEncoderDelete(WebPAnimEncoder * enc)317 void WebPAnimEncoderDelete(WebPAnimEncoder* enc) {
318 if (enc != NULL) {
319 WebPPictureFree(&enc->curr_canvas_copy_);
320 WebPPictureFree(&enc->prev_canvas_);
321 WebPPictureFree(&enc->prev_canvas_disposed_);
322 if (enc->encoded_frames_ != NULL) {
323 size_t i;
324 for (i = 0; i < enc->size_; ++i) {
325 FrameRelease(&enc->encoded_frames_[i]);
326 }
327 WebPSafeFree(enc->encoded_frames_);
328 }
329 WebPMuxDelete(enc->mux_);
330 WebPSafeFree(enc);
331 }
332 }
333
334 // -----------------------------------------------------------------------------
335 // Frame addition.
336
337 // Returns cached frame at the given 'position'.
GetFrame(const WebPAnimEncoder * const enc,size_t position)338 static EncodedFrame* GetFrame(const WebPAnimEncoder* const enc,
339 size_t position) {
340 assert(enc->start_ + position < enc->size_);
341 return &enc->encoded_frames_[enc->start_ + position];
342 }
343
344 typedef int (*ComparePixelsFunc)(const uint32_t*, int, const uint32_t*, int,
345 int, int);
346
347 // Returns true if 'length' number of pixels in 'src' and 'dst' are equal,
348 // assuming the given step sizes between pixels.
349 // 'max_allowed_diff' is unused and only there to allow function pointer use.
ComparePixelsLossless(const uint32_t * src,int src_step,const uint32_t * dst,int dst_step,int length,int max_allowed_diff)350 static WEBP_INLINE int ComparePixelsLossless(const uint32_t* src, int src_step,
351 const uint32_t* dst, int dst_step,
352 int length, int max_allowed_diff) {
353 (void)max_allowed_diff;
354 assert(length > 0);
355 while (length-- > 0) {
356 if (*src != *dst) {
357 return 0;
358 }
359 src += src_step;
360 dst += dst_step;
361 }
362 return 1;
363 }
364
365 // Helper to check if each channel in 'src' and 'dst' is at most off by
366 // 'max_allowed_diff'.
PixelsAreSimilar(uint32_t src,uint32_t dst,int max_allowed_diff)367 static WEBP_INLINE int PixelsAreSimilar(uint32_t src, uint32_t dst,
368 int max_allowed_diff) {
369 const int src_a = (src >> 24) & 0xff;
370 const int src_r = (src >> 16) & 0xff;
371 const int src_g = (src >> 8) & 0xff;
372 const int src_b = (src >> 0) & 0xff;
373 const int dst_a = (dst >> 24) & 0xff;
374 const int dst_r = (dst >> 16) & 0xff;
375 const int dst_g = (dst >> 8) & 0xff;
376 const int dst_b = (dst >> 0) & 0xff;
377
378 return (src_a == dst_a) &&
379 (abs(src_r - dst_r) * dst_a <= (max_allowed_diff * 255)) &&
380 (abs(src_g - dst_g) * dst_a <= (max_allowed_diff * 255)) &&
381 (abs(src_b - dst_b) * dst_a <= (max_allowed_diff * 255));
382 }
383
384 // Returns true if 'length' number of pixels in 'src' and 'dst' are within an
385 // error bound, assuming the given step sizes between pixels.
ComparePixelsLossy(const uint32_t * src,int src_step,const uint32_t * dst,int dst_step,int length,int max_allowed_diff)386 static WEBP_INLINE int ComparePixelsLossy(const uint32_t* src, int src_step,
387 const uint32_t* dst, int dst_step,
388 int length, int max_allowed_diff) {
389 assert(length > 0);
390 while (length-- > 0) {
391 if (!PixelsAreSimilar(*src, *dst, max_allowed_diff)) {
392 return 0;
393 }
394 src += src_step;
395 dst += dst_step;
396 }
397 return 1;
398 }
399
IsEmptyRect(const FrameRectangle * const rect)400 static int IsEmptyRect(const FrameRectangle* const rect) {
401 return (rect->width_ == 0) || (rect->height_ == 0);
402 }
403
QualityToMaxDiff(float quality)404 static int QualityToMaxDiff(float quality) {
405 const double val = pow(quality / 100., 0.5);
406 const double max_diff = 31 * (1 - val) + 1 * val;
407 return (int)(max_diff + 0.5);
408 }
409
410 // Assumes that an initial valid guess of change rectangle 'rect' is passed.
MinimizeChangeRectangle(const WebPPicture * const src,const WebPPicture * const dst,FrameRectangle * const rect,int is_lossless,float quality)411 static void MinimizeChangeRectangle(const WebPPicture* const src,
412 const WebPPicture* const dst,
413 FrameRectangle* const rect,
414 int is_lossless, float quality) {
415 int i, j;
416 const ComparePixelsFunc compare_pixels =
417 is_lossless ? ComparePixelsLossless : ComparePixelsLossy;
418 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
419 const int max_allowed_diff = is_lossless ? 0 : max_allowed_diff_lossy;
420
421 // Assumption/correctness checks.
422 assert(src->width == dst->width && src->height == dst->height);
423 assert(rect->x_offset_ + rect->width_ <= dst->width);
424 assert(rect->y_offset_ + rect->height_ <= dst->height);
425
426 // Left boundary.
427 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
428 const uint32_t* const src_argb =
429 &src->argb[rect->y_offset_ * src->argb_stride + i];
430 const uint32_t* const dst_argb =
431 &dst->argb[rect->y_offset_ * dst->argb_stride + i];
432 if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
433 rect->height_, max_allowed_diff)) {
434 --rect->width_; // Redundant column.
435 ++rect->x_offset_;
436 } else {
437 break;
438 }
439 }
440 if (rect->width_ == 0) goto NoChange;
441
442 // Right boundary.
443 for (i = rect->x_offset_ + rect->width_ - 1; i >= rect->x_offset_; --i) {
444 const uint32_t* const src_argb =
445 &src->argb[rect->y_offset_ * src->argb_stride + i];
446 const uint32_t* const dst_argb =
447 &dst->argb[rect->y_offset_ * dst->argb_stride + i];
448 if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
449 rect->height_, max_allowed_diff)) {
450 --rect->width_; // Redundant column.
451 } else {
452 break;
453 }
454 }
455 if (rect->width_ == 0) goto NoChange;
456
457 // Top boundary.
458 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
459 const uint32_t* const src_argb =
460 &src->argb[j * src->argb_stride + rect->x_offset_];
461 const uint32_t* const dst_argb =
462 &dst->argb[j * dst->argb_stride + rect->x_offset_];
463 if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
464 max_allowed_diff)) {
465 --rect->height_; // Redundant row.
466 ++rect->y_offset_;
467 } else {
468 break;
469 }
470 }
471 if (rect->height_ == 0) goto NoChange;
472
473 // Bottom boundary.
474 for (j = rect->y_offset_ + rect->height_ - 1; j >= rect->y_offset_; --j) {
475 const uint32_t* const src_argb =
476 &src->argb[j * src->argb_stride + rect->x_offset_];
477 const uint32_t* const dst_argb =
478 &dst->argb[j * dst->argb_stride + rect->x_offset_];
479 if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
480 max_allowed_diff)) {
481 --rect->height_; // Redundant row.
482 } else {
483 break;
484 }
485 }
486 if (rect->height_ == 0) goto NoChange;
487
488 if (IsEmptyRect(rect)) {
489 NoChange:
490 rect->x_offset_ = 0;
491 rect->y_offset_ = 0;
492 rect->width_ = 0;
493 rect->height_ = 0;
494 }
495 }
496
497 // Snap rectangle to even offsets (and adjust dimensions if needed).
SnapToEvenOffsets(FrameRectangle * const rect)498 static WEBP_INLINE void SnapToEvenOffsets(FrameRectangle* const rect) {
499 rect->width_ += (rect->x_offset_ & 1);
500 rect->height_ += (rect->y_offset_ & 1);
501 rect->x_offset_ &= ~1;
502 rect->y_offset_ &= ~1;
503 }
504
505 typedef struct {
506 int should_try_; // Should try this set of parameters.
507 int empty_rect_allowed_; // Frame with empty rectangle can be skipped.
508 FrameRectangle rect_ll_; // Frame rectangle for lossless compression.
509 WebPPicture sub_frame_ll_; // Sub-frame pic for lossless compression.
510 FrameRectangle rect_lossy_; // Frame rectangle for lossy compression.
511 // Could be smaller than rect_ll_ as pixels
512 // with small diffs can be ignored.
513 WebPPicture sub_frame_lossy_; // Sub-frame pic for lossless compression.
514 } SubFrameParams;
515
SubFrameParamsInit(SubFrameParams * const params,int should_try,int empty_rect_allowed)516 static int SubFrameParamsInit(SubFrameParams* const params,
517 int should_try, int empty_rect_allowed) {
518 params->should_try_ = should_try;
519 params->empty_rect_allowed_ = empty_rect_allowed;
520 if (!WebPPictureInit(¶ms->sub_frame_ll_) ||
521 !WebPPictureInit(¶ms->sub_frame_lossy_)) {
522 return 0;
523 }
524 return 1;
525 }
526
SubFrameParamsFree(SubFrameParams * const params)527 static void SubFrameParamsFree(SubFrameParams* const params) {
528 WebPPictureFree(¶ms->sub_frame_ll_);
529 WebPPictureFree(¶ms->sub_frame_lossy_);
530 }
531
532 // Given previous and current canvas, picks the optimal rectangle for the
533 // current frame based on 'is_lossless' and other parameters. Assumes that the
534 // initial guess 'rect' is valid.
GetSubRect(const WebPPicture * const prev_canvas,const WebPPicture * const curr_canvas,int is_key_frame,int is_first_frame,int empty_rect_allowed,int is_lossless,float quality,FrameRectangle * const rect,WebPPicture * const sub_frame)535 static int GetSubRect(const WebPPicture* const prev_canvas,
536 const WebPPicture* const curr_canvas, int is_key_frame,
537 int is_first_frame, int empty_rect_allowed,
538 int is_lossless, float quality,
539 FrameRectangle* const rect,
540 WebPPicture* const sub_frame) {
541 if (!is_key_frame || is_first_frame) { // Optimize frame rectangle.
542 // Note: This behaves as expected for first frame, as 'prev_canvas' is
543 // initialized to a fully transparent canvas in the beginning.
544 MinimizeChangeRectangle(prev_canvas, curr_canvas, rect,
545 is_lossless, quality);
546 }
547
548 if (IsEmptyRect(rect)) {
549 if (empty_rect_allowed) { // No need to get 'sub_frame'.
550 return 1;
551 } else { // Force a 1x1 rectangle.
552 rect->width_ = 1;
553 rect->height_ = 1;
554 assert(rect->x_offset_ == 0);
555 assert(rect->y_offset_ == 0);
556 }
557 }
558
559 SnapToEvenOffsets(rect);
560 return WebPPictureView(curr_canvas, rect->x_offset_, rect->y_offset_,
561 rect->width_, rect->height_, sub_frame);
562 }
563
564 // Picks optimal frame rectangle for both lossless and lossy compression. The
565 // initial guess for frame rectangles will be the full canvas.
GetSubRects(const WebPPicture * const prev_canvas,const WebPPicture * const curr_canvas,int is_key_frame,int is_first_frame,float quality,SubFrameParams * const params)566 static int GetSubRects(const WebPPicture* const prev_canvas,
567 const WebPPicture* const curr_canvas, int is_key_frame,
568 int is_first_frame, float quality,
569 SubFrameParams* const params) {
570 // Lossless frame rectangle.
571 params->rect_ll_.x_offset_ = 0;
572 params->rect_ll_.y_offset_ = 0;
573 params->rect_ll_.width_ = curr_canvas->width;
574 params->rect_ll_.height_ = curr_canvas->height;
575 if (!GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
576 params->empty_rect_allowed_, 1, quality,
577 ¶ms->rect_ll_, ¶ms->sub_frame_ll_)) {
578 return 0;
579 }
580 // Lossy frame rectangle.
581 params->rect_lossy_ = params->rect_ll_; // seed with lossless rect.
582 return GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
583 params->empty_rect_allowed_, 0, quality,
584 ¶ms->rect_lossy_, ¶ms->sub_frame_lossy_);
585 }
586
clip(int v,int min_v,int max_v)587 static WEBP_INLINE int clip(int v, int min_v, int max_v) {
588 return (v < min_v) ? min_v : (v > max_v) ? max_v : v;
589 }
590
WebPAnimEncoderRefineRect(const WebPPicture * const prev_canvas,const WebPPicture * const curr_canvas,int is_lossless,float quality,int * const x_offset,int * const y_offset,int * const width,int * const height)591 int WebPAnimEncoderRefineRect(
592 const WebPPicture* const prev_canvas, const WebPPicture* const curr_canvas,
593 int is_lossless, float quality, int* const x_offset, int* const y_offset,
594 int* const width, int* const height) {
595 FrameRectangle rect;
596 const int right = clip(*x_offset + *width, 0, curr_canvas->width);
597 const int left = clip(*x_offset, 0, curr_canvas->width - 1);
598 const int bottom = clip(*y_offset + *height, 0, curr_canvas->height);
599 const int top = clip(*y_offset, 0, curr_canvas->height - 1);
600 if (prev_canvas == NULL || curr_canvas == NULL ||
601 prev_canvas->width != curr_canvas->width ||
602 prev_canvas->height != curr_canvas->height ||
603 !prev_canvas->use_argb || !curr_canvas->use_argb) {
604 return 0;
605 }
606 rect.x_offset_ = left;
607 rect.y_offset_ = top;
608 rect.width_ = clip(right - left, 0, curr_canvas->width - rect.x_offset_);
609 rect.height_ = clip(bottom - top, 0, curr_canvas->height - rect.y_offset_);
610 MinimizeChangeRectangle(prev_canvas, curr_canvas, &rect, is_lossless,
611 quality);
612 SnapToEvenOffsets(&rect);
613 *x_offset = rect.x_offset_;
614 *y_offset = rect.y_offset_;
615 *width = rect.width_;
616 *height = rect.height_;
617 return 1;
618 }
619
DisposeFrameRectangle(int dispose_method,const FrameRectangle * const rect,WebPPicture * const curr_canvas)620 static void DisposeFrameRectangle(int dispose_method,
621 const FrameRectangle* const rect,
622 WebPPicture* const curr_canvas) {
623 assert(rect != NULL);
624 if (dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
625 WebPUtilClearPic(curr_canvas, rect);
626 }
627 }
628
RectArea(const FrameRectangle * const rect)629 static uint32_t RectArea(const FrameRectangle* const rect) {
630 return (uint32_t)rect->width_ * rect->height_;
631 }
632
IsLosslessBlendingPossible(const WebPPicture * const src,const WebPPicture * const dst,const FrameRectangle * const rect)633 static int IsLosslessBlendingPossible(const WebPPicture* const src,
634 const WebPPicture* const dst,
635 const FrameRectangle* const rect) {
636 int i, j;
637 assert(src->width == dst->width && src->height == dst->height);
638 assert(rect->x_offset_ + rect->width_ <= dst->width);
639 assert(rect->y_offset_ + rect->height_ <= dst->height);
640 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
641 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
642 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
643 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
644 const uint32_t dst_alpha = dst_pixel >> 24;
645 if (dst_alpha != 0xff && src_pixel != dst_pixel) {
646 // In this case, if we use blending, we can't attain the desired
647 // 'dst_pixel' value for this pixel. So, blending is not possible.
648 return 0;
649 }
650 }
651 }
652 return 1;
653 }
654
IsLossyBlendingPossible(const WebPPicture * const src,const WebPPicture * const dst,const FrameRectangle * const rect,float quality)655 static int IsLossyBlendingPossible(const WebPPicture* const src,
656 const WebPPicture* const dst,
657 const FrameRectangle* const rect,
658 float quality) {
659 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
660 int i, j;
661 assert(src->width == dst->width && src->height == dst->height);
662 assert(rect->x_offset_ + rect->width_ <= dst->width);
663 assert(rect->y_offset_ + rect->height_ <= dst->height);
664 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
665 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
666 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
667 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
668 const uint32_t dst_alpha = dst_pixel >> 24;
669 if (dst_alpha != 0xff &&
670 !PixelsAreSimilar(src_pixel, dst_pixel, max_allowed_diff_lossy)) {
671 // In this case, if we use blending, we can't attain the desired
672 // 'dst_pixel' value for this pixel. So, blending is not possible.
673 return 0;
674 }
675 }
676 }
677 return 1;
678 }
679
680 // For pixels in 'rect', replace those pixels in 'dst' that are same as 'src' by
681 // transparent pixels.
682 // Returns true if at least one pixel gets modified.
IncreaseTransparency(const WebPPicture * const src,const FrameRectangle * const rect,WebPPicture * const dst)683 static int IncreaseTransparency(const WebPPicture* const src,
684 const FrameRectangle* const rect,
685 WebPPicture* const dst) {
686 int i, j;
687 int modified = 0;
688 assert(src != NULL && dst != NULL && rect != NULL);
689 assert(src->width == dst->width && src->height == dst->height);
690 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
691 const uint32_t* const psrc = src->argb + j * src->argb_stride;
692 uint32_t* const pdst = dst->argb + j * dst->argb_stride;
693 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
694 if (psrc[i] == pdst[i] && pdst[i] != TRANSPARENT_COLOR) {
695 pdst[i] = TRANSPARENT_COLOR;
696 modified = 1;
697 }
698 }
699 }
700 return modified;
701 }
702
703 #undef TRANSPARENT_COLOR
704
705 // Replace similar blocks of pixels by a 'see-through' transparent block
706 // with uniform average color.
707 // Assumes lossy compression is being used.
708 // Returns true if at least one pixel gets modified.
FlattenSimilarBlocks(const WebPPicture * const src,const FrameRectangle * const rect,WebPPicture * const dst,float quality)709 static int FlattenSimilarBlocks(const WebPPicture* const src,
710 const FrameRectangle* const rect,
711 WebPPicture* const dst, float quality) {
712 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
713 int i, j;
714 int modified = 0;
715 const int block_size = 8;
716 const int y_start = (rect->y_offset_ + block_size) & ~(block_size - 1);
717 const int y_end = (rect->y_offset_ + rect->height_) & ~(block_size - 1);
718 const int x_start = (rect->x_offset_ + block_size) & ~(block_size - 1);
719 const int x_end = (rect->x_offset_ + rect->width_) & ~(block_size - 1);
720 assert(src != NULL && dst != NULL && rect != NULL);
721 assert(src->width == dst->width && src->height == dst->height);
722 assert((block_size & (block_size - 1)) == 0); // must be a power of 2
723 // Iterate over each block and count similar pixels.
724 for (j = y_start; j < y_end; j += block_size) {
725 for (i = x_start; i < x_end; i += block_size) {
726 int cnt = 0;
727 int avg_r = 0, avg_g = 0, avg_b = 0;
728 int x, y;
729 const uint32_t* const psrc = src->argb + j * src->argb_stride + i;
730 uint32_t* const pdst = dst->argb + j * dst->argb_stride + i;
731 for (y = 0; y < block_size; ++y) {
732 for (x = 0; x < block_size; ++x) {
733 const uint32_t src_pixel = psrc[x + y * src->argb_stride];
734 const int alpha = src_pixel >> 24;
735 if (alpha == 0xff &&
736 PixelsAreSimilar(src_pixel, pdst[x + y * dst->argb_stride],
737 max_allowed_diff_lossy)) {
738 ++cnt;
739 avg_r += (src_pixel >> 16) & 0xff;
740 avg_g += (src_pixel >> 8) & 0xff;
741 avg_b += (src_pixel >> 0) & 0xff;
742 }
743 }
744 }
745 // If we have a fully similar block, we replace it with an
746 // average transparent block. This compresses better in lossy mode.
747 if (cnt == block_size * block_size) {
748 const uint32_t color = (0x00 << 24) |
749 ((avg_r / cnt) << 16) |
750 ((avg_g / cnt) << 8) |
751 ((avg_b / cnt) << 0);
752 for (y = 0; y < block_size; ++y) {
753 for (x = 0; x < block_size; ++x) {
754 pdst[x + y * dst->argb_stride] = color;
755 }
756 }
757 modified = 1;
758 }
759 }
760 }
761 return modified;
762 }
763
EncodeFrame(const WebPConfig * const config,WebPPicture * const pic,WebPMemoryWriter * const memory)764 static int EncodeFrame(const WebPConfig* const config, WebPPicture* const pic,
765 WebPMemoryWriter* const memory) {
766 pic->use_argb = 1;
767 pic->writer = WebPMemoryWrite;
768 pic->custom_ptr = memory;
769 if (!WebPEncode(config, pic)) {
770 return 0;
771 }
772 return 1;
773 }
774
775 // Struct representing a candidate encoded frame including its metadata.
776 typedef struct {
777 WebPMemoryWriter mem_;
778 WebPMuxFrameInfo info_;
779 FrameRectangle rect_;
780 int evaluate_; // True if this candidate should be evaluated.
781 } Candidate;
782
783 // Generates a candidate encoded frame given a picture and metadata.
EncodeCandidate(WebPPicture * const sub_frame,const FrameRectangle * const rect,const WebPConfig * const encoder_config,int use_blending,Candidate * const candidate)784 static WebPEncodingError EncodeCandidate(WebPPicture* const sub_frame,
785 const FrameRectangle* const rect,
786 const WebPConfig* const encoder_config,
787 int use_blending,
788 Candidate* const candidate) {
789 WebPConfig config = *encoder_config;
790 WebPEncodingError error_code = VP8_ENC_OK;
791 assert(candidate != NULL);
792 memset(candidate, 0, sizeof(*candidate));
793
794 // Set frame rect and info.
795 candidate->rect_ = *rect;
796 candidate->info_.id = WEBP_CHUNK_ANMF;
797 candidate->info_.x_offset = rect->x_offset_;
798 candidate->info_.y_offset = rect->y_offset_;
799 candidate->info_.dispose_method = WEBP_MUX_DISPOSE_NONE; // Set later.
800 candidate->info_.blend_method =
801 use_blending ? WEBP_MUX_BLEND : WEBP_MUX_NO_BLEND;
802 candidate->info_.duration = 0; // Set in next call to WebPAnimEncoderAdd().
803
804 // Encode picture.
805 WebPMemoryWriterInit(&candidate->mem_);
806
807 if (!config.lossless && use_blending) {
808 // Disable filtering to avoid blockiness in reconstructed frames at the
809 // time of decoding.
810 config.autofilter = 0;
811 config.filter_strength = 0;
812 }
813 if (!EncodeFrame(&config, sub_frame, &candidate->mem_)) {
814 error_code = sub_frame->error_code;
815 goto Err;
816 }
817
818 candidate->evaluate_ = 1;
819 return error_code;
820
821 Err:
822 WebPMemoryWriterClear(&candidate->mem_);
823 return error_code;
824 }
825
CopyCurrentCanvas(WebPAnimEncoder * const enc)826 static void CopyCurrentCanvas(WebPAnimEncoder* const enc) {
827 if (enc->curr_canvas_copy_modified_) {
828 WebPCopyPixels(enc->curr_canvas_, &enc->curr_canvas_copy_);
829 enc->curr_canvas_copy_.progress_hook = enc->curr_canvas_->progress_hook;
830 enc->curr_canvas_copy_.user_data = enc->curr_canvas_->user_data;
831 enc->curr_canvas_copy_modified_ = 0;
832 }
833 }
834
835 enum {
836 LL_DISP_NONE = 0,
837 LL_DISP_BG,
838 LOSSY_DISP_NONE,
839 LOSSY_DISP_BG,
840 CANDIDATE_COUNT
841 };
842
843 #define MIN_COLORS_LOSSY 31 // Don't try lossy below this threshold.
844 #define MAX_COLORS_LOSSLESS 194 // Don't try lossless above this threshold.
845
846 // Generates candidates for a given dispose method given pre-filled sub-frame
847 // 'params'.
GenerateCandidates(WebPAnimEncoder * const enc,Candidate candidates[CANDIDATE_COUNT],WebPMuxAnimDispose dispose_method,int is_lossless,int is_key_frame,SubFrameParams * const params,const WebPConfig * const config_ll,const WebPConfig * const config_lossy)848 static WebPEncodingError GenerateCandidates(
849 WebPAnimEncoder* const enc, Candidate candidates[CANDIDATE_COUNT],
850 WebPMuxAnimDispose dispose_method, int is_lossless, int is_key_frame,
851 SubFrameParams* const params,
852 const WebPConfig* const config_ll, const WebPConfig* const config_lossy) {
853 WebPEncodingError error_code = VP8_ENC_OK;
854 const int is_dispose_none = (dispose_method == WEBP_MUX_DISPOSE_NONE);
855 Candidate* const candidate_ll =
856 is_dispose_none ? &candidates[LL_DISP_NONE] : &candidates[LL_DISP_BG];
857 Candidate* const candidate_lossy = is_dispose_none
858 ? &candidates[LOSSY_DISP_NONE]
859 : &candidates[LOSSY_DISP_BG];
860 WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
861 const WebPPicture* const prev_canvas =
862 is_dispose_none ? &enc->prev_canvas_ : &enc->prev_canvas_disposed_;
863 int use_blending_ll, use_blending_lossy;
864 int evaluate_ll, evaluate_lossy;
865
866 CopyCurrentCanvas(enc);
867 use_blending_ll =
868 !is_key_frame &&
869 IsLosslessBlendingPossible(prev_canvas, curr_canvas, ¶ms->rect_ll_);
870 use_blending_lossy =
871 !is_key_frame &&
872 IsLossyBlendingPossible(prev_canvas, curr_canvas, ¶ms->rect_lossy_,
873 config_lossy->quality);
874
875 // Pick candidates to be tried.
876 if (!enc->options_.allow_mixed) {
877 evaluate_ll = is_lossless;
878 evaluate_lossy = !is_lossless;
879 } else if (enc->options_.minimize_size) {
880 evaluate_ll = 1;
881 evaluate_lossy = 1;
882 } else { // Use a heuristic for trying lossless and/or lossy compression.
883 const int num_colors = WebPGetColorPalette(¶ms->sub_frame_ll_, NULL);
884 evaluate_ll = (num_colors < MAX_COLORS_LOSSLESS);
885 evaluate_lossy = (num_colors >= MIN_COLORS_LOSSY);
886 }
887
888 // Generate candidates.
889 if (evaluate_ll) {
890 CopyCurrentCanvas(enc);
891 if (use_blending_ll) {
892 enc->curr_canvas_copy_modified_ =
893 IncreaseTransparency(prev_canvas, ¶ms->rect_ll_, curr_canvas);
894 }
895 error_code = EncodeCandidate(¶ms->sub_frame_ll_, ¶ms->rect_ll_,
896 config_ll, use_blending_ll, candidate_ll);
897 if (error_code != VP8_ENC_OK) return error_code;
898 }
899 if (evaluate_lossy) {
900 CopyCurrentCanvas(enc);
901 if (use_blending_lossy) {
902 enc->curr_canvas_copy_modified_ =
903 FlattenSimilarBlocks(prev_canvas, ¶ms->rect_lossy_, curr_canvas,
904 config_lossy->quality);
905 }
906 error_code =
907 EncodeCandidate(¶ms->sub_frame_lossy_, ¶ms->rect_lossy_,
908 config_lossy, use_blending_lossy, candidate_lossy);
909 if (error_code != VP8_ENC_OK) return error_code;
910 enc->curr_canvas_copy_modified_ = 1;
911 }
912 return error_code;
913 }
914
915 #undef MIN_COLORS_LOSSY
916 #undef MAX_COLORS_LOSSLESS
917
GetEncodedData(const WebPMemoryWriter * const memory,WebPData * const encoded_data)918 static void GetEncodedData(const WebPMemoryWriter* const memory,
919 WebPData* const encoded_data) {
920 encoded_data->bytes = memory->mem;
921 encoded_data->size = memory->size;
922 }
923
924 // Sets dispose method of the previous frame to be 'dispose_method'.
SetPreviousDisposeMethod(WebPAnimEncoder * const enc,WebPMuxAnimDispose dispose_method)925 static void SetPreviousDisposeMethod(WebPAnimEncoder* const enc,
926 WebPMuxAnimDispose dispose_method) {
927 const size_t position = enc->count_ - 2;
928 EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
929 assert(enc->count_ >= 2); // As current and previous frames are in enc.
930
931 if (enc->prev_candidate_undecided_) {
932 assert(dispose_method == WEBP_MUX_DISPOSE_NONE);
933 prev_enc_frame->sub_frame_.dispose_method = dispose_method;
934 prev_enc_frame->key_frame_.dispose_method = dispose_method;
935 } else {
936 WebPMuxFrameInfo* const prev_info = prev_enc_frame->is_key_frame_
937 ? &prev_enc_frame->key_frame_
938 : &prev_enc_frame->sub_frame_;
939 prev_info->dispose_method = dispose_method;
940 }
941 }
942
IncreasePreviousDuration(WebPAnimEncoder * const enc,int duration)943 static int IncreasePreviousDuration(WebPAnimEncoder* const enc, int duration) {
944 const size_t position = enc->count_ - 1;
945 EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
946 int new_duration;
947
948 assert(enc->count_ >= 1);
949 assert(!prev_enc_frame->is_key_frame_ ||
950 prev_enc_frame->sub_frame_.duration ==
951 prev_enc_frame->key_frame_.duration);
952 assert(prev_enc_frame->sub_frame_.duration ==
953 (prev_enc_frame->sub_frame_.duration & (MAX_DURATION - 1)));
954 assert(duration == (duration & (MAX_DURATION - 1)));
955
956 new_duration = prev_enc_frame->sub_frame_.duration + duration;
957 if (new_duration >= MAX_DURATION) { // Special case.
958 // Separate out previous frame from earlier merged frames to avoid overflow.
959 // We add a 1x1 transparent frame for the previous frame, with blending on.
960 const FrameRectangle rect = { 0, 0, 1, 1 };
961 const uint8_t lossless_1x1_bytes[] = {
962 0x52, 0x49, 0x46, 0x46, 0x14, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
963 0x56, 0x50, 0x38, 0x4c, 0x08, 0x00, 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00,
964 0x10, 0x88, 0x88, 0x08
965 };
966 const WebPData lossless_1x1 = {
967 lossless_1x1_bytes, sizeof(lossless_1x1_bytes)
968 };
969 const uint8_t lossy_1x1_bytes[] = {
970 0x52, 0x49, 0x46, 0x46, 0x40, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
971 0x56, 0x50, 0x38, 0x58, 0x0a, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
972 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x41, 0x4c, 0x50, 0x48, 0x02, 0x00,
973 0x00, 0x00, 0x00, 0x00, 0x56, 0x50, 0x38, 0x20, 0x18, 0x00, 0x00, 0x00,
974 0x30, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x01, 0x00, 0x01, 0x00, 0x02, 0x00,
975 0x34, 0x25, 0xa4, 0x00, 0x03, 0x70, 0x00, 0xfe, 0xfb, 0xfd, 0x50, 0x00
976 };
977 const WebPData lossy_1x1 = { lossy_1x1_bytes, sizeof(lossy_1x1_bytes) };
978 const int can_use_lossless =
979 (enc->last_config_.lossless || enc->options_.allow_mixed);
980 EncodedFrame* const curr_enc_frame = GetFrame(enc, enc->count_);
981 curr_enc_frame->is_key_frame_ = 0;
982 curr_enc_frame->sub_frame_.id = WEBP_CHUNK_ANMF;
983 curr_enc_frame->sub_frame_.x_offset = 0;
984 curr_enc_frame->sub_frame_.y_offset = 0;
985 curr_enc_frame->sub_frame_.dispose_method = WEBP_MUX_DISPOSE_NONE;
986 curr_enc_frame->sub_frame_.blend_method = WEBP_MUX_BLEND;
987 curr_enc_frame->sub_frame_.duration = duration;
988 if (!WebPDataCopy(can_use_lossless ? &lossless_1x1 : &lossy_1x1,
989 &curr_enc_frame->sub_frame_.bitstream)) {
990 return 0;
991 }
992 ++enc->count_;
993 ++enc->count_since_key_frame_;
994 enc->flush_count_ = enc->count_ - 1;
995 enc->prev_candidate_undecided_ = 0;
996 enc->prev_rect_ = rect;
997 } else { // Regular case.
998 // Increase duration of the previous frame by 'duration'.
999 prev_enc_frame->sub_frame_.duration = new_duration;
1000 prev_enc_frame->key_frame_.duration = new_duration;
1001 }
1002 return 1;
1003 }
1004
1005 // Pick the candidate encoded frame with smallest size and release other
1006 // candidates.
1007 // TODO(later): Perhaps a rough SSIM/PSNR produced by the encoder should
1008 // also be a criteria, in addition to sizes.
PickBestCandidate(WebPAnimEncoder * const enc,Candidate * const candidates,int is_key_frame,EncodedFrame * const encoded_frame)1009 static void PickBestCandidate(WebPAnimEncoder* const enc,
1010 Candidate* const candidates, int is_key_frame,
1011 EncodedFrame* const encoded_frame) {
1012 int i;
1013 int best_idx = -1;
1014 size_t best_size = ~0;
1015 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1016 if (candidates[i].evaluate_) {
1017 const size_t candidate_size = candidates[i].mem_.size;
1018 if (candidate_size < best_size) {
1019 best_idx = i;
1020 best_size = candidate_size;
1021 }
1022 }
1023 }
1024 assert(best_idx != -1);
1025 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1026 if (candidates[i].evaluate_) {
1027 if (i == best_idx) {
1028 WebPMuxFrameInfo* const dst = is_key_frame
1029 ? &encoded_frame->key_frame_
1030 : &encoded_frame->sub_frame_;
1031 *dst = candidates[i].info_;
1032 GetEncodedData(&candidates[i].mem_, &dst->bitstream);
1033 if (!is_key_frame) {
1034 // Note: Previous dispose method only matters for non-keyframes.
1035 // Also, we don't want to modify previous dispose method that was
1036 // selected when a non key-frame was assumed.
1037 const WebPMuxAnimDispose prev_dispose_method =
1038 (best_idx == LL_DISP_NONE || best_idx == LOSSY_DISP_NONE)
1039 ? WEBP_MUX_DISPOSE_NONE
1040 : WEBP_MUX_DISPOSE_BACKGROUND;
1041 SetPreviousDisposeMethod(enc, prev_dispose_method);
1042 }
1043 enc->prev_rect_ = candidates[i].rect_; // save for next frame.
1044 } else {
1045 WebPMemoryWriterClear(&candidates[i].mem_);
1046 candidates[i].evaluate_ = 0;
1047 }
1048 }
1049 }
1050 }
1051
1052 // Depending on the configuration, tries different compressions
1053 // (lossy/lossless), dispose methods, blending methods etc to encode the current
1054 // frame and outputs the best one in 'encoded_frame'.
1055 // 'frame_skipped' will be set to true if this frame should actually be skipped.
SetFrame(WebPAnimEncoder * const enc,const WebPConfig * const config,int is_key_frame,EncodedFrame * const encoded_frame,int * const frame_skipped)1056 static WebPEncodingError SetFrame(WebPAnimEncoder* const enc,
1057 const WebPConfig* const config,
1058 int is_key_frame,
1059 EncodedFrame* const encoded_frame,
1060 int* const frame_skipped) {
1061 int i;
1062 WebPEncodingError error_code = VP8_ENC_OK;
1063 const WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
1064 const WebPPicture* const prev_canvas = &enc->prev_canvas_;
1065 Candidate candidates[CANDIDATE_COUNT];
1066 const int is_lossless = config->lossless;
1067 const int consider_lossless = is_lossless || enc->options_.allow_mixed;
1068 const int consider_lossy = !is_lossless || enc->options_.allow_mixed;
1069 const int is_first_frame = enc->is_first_frame_;
1070
1071 // First frame cannot be skipped as there is no 'previous frame' to merge it
1072 // to. So, empty rectangle is not allowed for the first frame.
1073 const int empty_rect_allowed_none = !is_first_frame;
1074
1075 // Even if there is exact pixel match between 'disposed previous canvas' and
1076 // 'current canvas', we can't skip current frame, as there may not be exact
1077 // pixel match between 'previous canvas' and 'current canvas'. So, we don't
1078 // allow empty rectangle in this case.
1079 const int empty_rect_allowed_bg = 0;
1080
1081 // If current frame is a key-frame, dispose method of previous frame doesn't
1082 // matter, so we don't try dispose to background.
1083 // Also, if key-frame insertion is on, and previous frame could be picked as
1084 // either a sub-frame or a key-frame, then we can't be sure about what frame
1085 // rectangle would be disposed. In that case too, we don't try dispose to
1086 // background.
1087 const int dispose_bg_possible =
1088 !is_key_frame && !enc->prev_candidate_undecided_;
1089
1090 SubFrameParams dispose_none_params;
1091 SubFrameParams dispose_bg_params;
1092
1093 WebPConfig config_ll = *config;
1094 WebPConfig config_lossy = *config;
1095 config_ll.lossless = 1;
1096 config_lossy.lossless = 0;
1097 enc->last_config_ = *config;
1098 enc->last_config_reversed_ = config->lossless ? config_lossy : config_ll;
1099 *frame_skipped = 0;
1100
1101 if (!SubFrameParamsInit(&dispose_none_params, 1, empty_rect_allowed_none) ||
1102 !SubFrameParamsInit(&dispose_bg_params, 0, empty_rect_allowed_bg)) {
1103 return VP8_ENC_ERROR_INVALID_CONFIGURATION;
1104 }
1105
1106 memset(candidates, 0, sizeof(candidates));
1107
1108 // Change-rectangle assuming previous frame was DISPOSE_NONE.
1109 if (!GetSubRects(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
1110 config_lossy.quality, &dispose_none_params)) {
1111 error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1112 goto Err;
1113 }
1114
1115 if ((consider_lossless && IsEmptyRect(&dispose_none_params.rect_ll_)) ||
1116 (consider_lossy && IsEmptyRect(&dispose_none_params.rect_lossy_))) {
1117 // Don't encode the frame at all. Instead, the duration of the previous
1118 // frame will be increased later.
1119 assert(empty_rect_allowed_none);
1120 *frame_skipped = 1;
1121 goto End;
1122 }
1123
1124 if (dispose_bg_possible) {
1125 // Change-rectangle assuming previous frame was DISPOSE_BACKGROUND.
1126 WebPPicture* const prev_canvas_disposed = &enc->prev_canvas_disposed_;
1127 WebPCopyPixels(prev_canvas, prev_canvas_disposed);
1128 DisposeFrameRectangle(WEBP_MUX_DISPOSE_BACKGROUND, &enc->prev_rect_,
1129 prev_canvas_disposed);
1130
1131 if (!GetSubRects(prev_canvas_disposed, curr_canvas, is_key_frame,
1132 is_first_frame, config_lossy.quality,
1133 &dispose_bg_params)) {
1134 error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1135 goto Err;
1136 }
1137 assert(!IsEmptyRect(&dispose_bg_params.rect_ll_));
1138 assert(!IsEmptyRect(&dispose_bg_params.rect_lossy_));
1139
1140 if (enc->options_.minimize_size) { // Try both dispose methods.
1141 dispose_bg_params.should_try_ = 1;
1142 dispose_none_params.should_try_ = 1;
1143 } else if ((is_lossless &&
1144 RectArea(&dispose_bg_params.rect_ll_) <
1145 RectArea(&dispose_none_params.rect_ll_)) ||
1146 (!is_lossless &&
1147 RectArea(&dispose_bg_params.rect_lossy_) <
1148 RectArea(&dispose_none_params.rect_lossy_))) {
1149 dispose_bg_params.should_try_ = 1; // Pick DISPOSE_BACKGROUND.
1150 dispose_none_params.should_try_ = 0;
1151 }
1152 }
1153
1154 if (dispose_none_params.should_try_) {
1155 error_code = GenerateCandidates(
1156 enc, candidates, WEBP_MUX_DISPOSE_NONE, is_lossless, is_key_frame,
1157 &dispose_none_params, &config_ll, &config_lossy);
1158 if (error_code != VP8_ENC_OK) goto Err;
1159 }
1160
1161 if (dispose_bg_params.should_try_) {
1162 assert(!enc->is_first_frame_);
1163 assert(dispose_bg_possible);
1164 error_code = GenerateCandidates(
1165 enc, candidates, WEBP_MUX_DISPOSE_BACKGROUND, is_lossless, is_key_frame,
1166 &dispose_bg_params, &config_ll, &config_lossy);
1167 if (error_code != VP8_ENC_OK) goto Err;
1168 }
1169
1170 PickBestCandidate(enc, candidates, is_key_frame, encoded_frame);
1171
1172 goto End;
1173
1174 Err:
1175 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1176 if (candidates[i].evaluate_) {
1177 WebPMemoryWriterClear(&candidates[i].mem_);
1178 }
1179 }
1180
1181 End:
1182 SubFrameParamsFree(&dispose_none_params);
1183 SubFrameParamsFree(&dispose_bg_params);
1184 return error_code;
1185 }
1186
1187 // Calculate the penalty incurred if we encode given frame as a key frame
1188 // instead of a sub-frame.
KeyFramePenalty(const EncodedFrame * const encoded_frame)1189 static int64_t KeyFramePenalty(const EncodedFrame* const encoded_frame) {
1190 return ((int64_t)encoded_frame->key_frame_.bitstream.size -
1191 encoded_frame->sub_frame_.bitstream.size);
1192 }
1193
CacheFrame(WebPAnimEncoder * const enc,const WebPConfig * const config)1194 static int CacheFrame(WebPAnimEncoder* const enc,
1195 const WebPConfig* const config) {
1196 int ok = 0;
1197 int frame_skipped = 0;
1198 WebPEncodingError error_code = VP8_ENC_OK;
1199 const size_t position = enc->count_;
1200 EncodedFrame* const encoded_frame = GetFrame(enc, position);
1201
1202 ++enc->count_;
1203
1204 if (enc->is_first_frame_) { // Add this as a key-frame.
1205 error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
1206 if (error_code != VP8_ENC_OK) goto End;
1207 assert(frame_skipped == 0); // First frame can't be skipped, even if empty.
1208 assert(position == 0 && enc->count_ == 1);
1209 encoded_frame->is_key_frame_ = 1;
1210 enc->flush_count_ = 0;
1211 enc->count_since_key_frame_ = 0;
1212 enc->prev_candidate_undecided_ = 0;
1213 } else {
1214 ++enc->count_since_key_frame_;
1215 if (enc->count_since_key_frame_ <= enc->options_.kmin) {
1216 // Add this as a frame rectangle.
1217 error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
1218 if (error_code != VP8_ENC_OK) goto End;
1219 if (frame_skipped) goto Skip;
1220 encoded_frame->is_key_frame_ = 0;
1221 enc->flush_count_ = enc->count_ - 1;
1222 enc->prev_candidate_undecided_ = 0;
1223 } else {
1224 int64_t curr_delta;
1225 FrameRectangle prev_rect_key, prev_rect_sub;
1226
1227 // Add this as a frame rectangle to enc.
1228 error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
1229 if (error_code != VP8_ENC_OK) goto End;
1230 if (frame_skipped) goto Skip;
1231 prev_rect_sub = enc->prev_rect_;
1232
1233
1234 // Add this as a key-frame to enc, too.
1235 error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
1236 if (error_code != VP8_ENC_OK) goto End;
1237 assert(frame_skipped == 0); // Key-frame cannot be an empty rectangle.
1238 prev_rect_key = enc->prev_rect_;
1239
1240 // Analyze size difference of the two variants.
1241 curr_delta = KeyFramePenalty(encoded_frame);
1242 if (curr_delta <= enc->best_delta_) { // Pick this as the key-frame.
1243 if (enc->keyframe_ != KEYFRAME_NONE) {
1244 EncodedFrame* const old_keyframe = GetFrame(enc, enc->keyframe_);
1245 assert(old_keyframe->is_key_frame_);
1246 old_keyframe->is_key_frame_ = 0;
1247 }
1248 encoded_frame->is_key_frame_ = 1;
1249 enc->prev_candidate_undecided_ = 1;
1250 enc->keyframe_ = (int)position;
1251 enc->best_delta_ = curr_delta;
1252 enc->flush_count_ = enc->count_ - 1; // We can flush previous frames.
1253 } else {
1254 encoded_frame->is_key_frame_ = 0;
1255 enc->prev_candidate_undecided_ = 0;
1256 }
1257 // Note: We need '>=' below because when kmin and kmax are both zero,
1258 // count_since_key_frame will always be > kmax.
1259 if (enc->count_since_key_frame_ >= enc->options_.kmax) {
1260 enc->flush_count_ = enc->count_ - 1;
1261 enc->count_since_key_frame_ = 0;
1262 enc->keyframe_ = KEYFRAME_NONE;
1263 enc->best_delta_ = DELTA_INFINITY;
1264 }
1265 if (!enc->prev_candidate_undecided_) {
1266 enc->prev_rect_ =
1267 encoded_frame->is_key_frame_ ? prev_rect_key : prev_rect_sub;
1268 }
1269 }
1270 }
1271
1272 // Update previous to previous and previous canvases for next call.
1273 WebPCopyPixels(enc->curr_canvas_, &enc->prev_canvas_);
1274 enc->is_first_frame_ = 0;
1275
1276 Skip:
1277 ok = 1;
1278 ++enc->in_frame_count_;
1279
1280 End:
1281 if (!ok || frame_skipped) {
1282 FrameRelease(encoded_frame);
1283 // We reset some counters, as the frame addition failed/was skipped.
1284 --enc->count_;
1285 if (!enc->is_first_frame_) --enc->count_since_key_frame_;
1286 if (!ok) {
1287 MarkError2(enc, "ERROR adding frame. WebPEncodingError", error_code);
1288 }
1289 }
1290 enc->curr_canvas_->error_code = error_code; // report error_code
1291 assert(ok || error_code != VP8_ENC_OK);
1292 return ok;
1293 }
1294
FlushFrames(WebPAnimEncoder * const enc)1295 static int FlushFrames(WebPAnimEncoder* const enc) {
1296 while (enc->flush_count_ > 0) {
1297 WebPMuxError err;
1298 EncodedFrame* const curr = GetFrame(enc, 0);
1299 const WebPMuxFrameInfo* const info =
1300 curr->is_key_frame_ ? &curr->key_frame_ : &curr->sub_frame_;
1301 assert(enc->mux_ != NULL);
1302 err = WebPMuxPushFrame(enc->mux_, info, 1);
1303 if (err != WEBP_MUX_OK) {
1304 MarkError2(enc, "ERROR adding frame. WebPMuxError", err);
1305 return 0;
1306 }
1307 if (enc->options_.verbose) {
1308 fprintf(stderr, "INFO: Added frame. offset:%d,%d dispose:%d blend:%d\n",
1309 info->x_offset, info->y_offset, info->dispose_method,
1310 info->blend_method);
1311 }
1312 ++enc->out_frame_count_;
1313 FrameRelease(curr);
1314 ++enc->start_;
1315 --enc->flush_count_;
1316 --enc->count_;
1317 if (enc->keyframe_ != KEYFRAME_NONE) --enc->keyframe_;
1318 }
1319
1320 if (enc->count_ == 1 && enc->start_ != 0) {
1321 // Move enc->start to index 0.
1322 const int enc_start_tmp = (int)enc->start_;
1323 EncodedFrame temp = enc->encoded_frames_[0];
1324 enc->encoded_frames_[0] = enc->encoded_frames_[enc_start_tmp];
1325 enc->encoded_frames_[enc_start_tmp] = temp;
1326 FrameRelease(&enc->encoded_frames_[enc_start_tmp]);
1327 enc->start_ = 0;
1328 }
1329 return 1;
1330 }
1331
1332 #undef DELTA_INFINITY
1333 #undef KEYFRAME_NONE
1334
WebPAnimEncoderAdd(WebPAnimEncoder * enc,WebPPicture * frame,int timestamp,const WebPConfig * encoder_config)1335 int WebPAnimEncoderAdd(WebPAnimEncoder* enc, WebPPicture* frame, int timestamp,
1336 const WebPConfig* encoder_config) {
1337 WebPConfig config;
1338 int ok;
1339
1340 if (enc == NULL) {
1341 return 0;
1342 }
1343 MarkNoError(enc);
1344
1345 if (!enc->is_first_frame_) {
1346 // Make sure timestamps are non-decreasing (integer wrap-around is OK).
1347 const uint32_t prev_frame_duration =
1348 (uint32_t)timestamp - enc->prev_timestamp_;
1349 if (prev_frame_duration >= MAX_DURATION) {
1350 if (frame != NULL) {
1351 frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1352 }
1353 MarkError(enc, "ERROR adding frame: timestamps must be non-decreasing");
1354 return 0;
1355 }
1356 if (!IncreasePreviousDuration(enc, (int)prev_frame_duration)) {
1357 return 0;
1358 }
1359 // IncreasePreviousDuration() may add a frame to avoid exceeding
1360 // MAX_DURATION which could cause CacheFrame() to over read encoded_frames_
1361 // before the next flush.
1362 if (enc->count_ == enc->size_ && !FlushFrames(enc)) {
1363 return 0;
1364 }
1365 } else {
1366 enc->first_timestamp_ = timestamp;
1367 }
1368
1369 if (frame == NULL) { // Special: last call.
1370 enc->got_null_frame_ = 1;
1371 enc->prev_timestamp_ = timestamp;
1372 return 1;
1373 }
1374
1375 if (frame->width != enc->canvas_width_ ||
1376 frame->height != enc->canvas_height_) {
1377 frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1378 MarkError(enc, "ERROR adding frame: Invalid frame dimensions");
1379 return 0;
1380 }
1381
1382 if (!frame->use_argb) { // Convert frame from YUV(A) to ARGB.
1383 if (enc->options_.verbose) {
1384 fprintf(stderr, "WARNING: Converting frame from YUV(A) to ARGB format; "
1385 "this incurs a small loss.\n");
1386 }
1387 if (!WebPPictureYUVAToARGB(frame)) {
1388 MarkError(enc, "ERROR converting frame from YUV(A) to ARGB");
1389 return 0;
1390 }
1391 }
1392
1393 if (encoder_config != NULL) {
1394 if (!WebPValidateConfig(encoder_config)) {
1395 MarkError(enc, "ERROR adding frame: Invalid WebPConfig");
1396 return 0;
1397 }
1398 config = *encoder_config;
1399 } else {
1400 WebPConfigInit(&config);
1401 config.lossless = 1;
1402 }
1403 assert(enc->curr_canvas_ == NULL);
1404 enc->curr_canvas_ = frame; // Store reference.
1405 assert(enc->curr_canvas_copy_modified_ == 1);
1406 CopyCurrentCanvas(enc);
1407
1408 ok = CacheFrame(enc, &config) && FlushFrames(enc);
1409
1410 enc->curr_canvas_ = NULL;
1411 enc->curr_canvas_copy_modified_ = 1;
1412 if (ok) {
1413 enc->prev_timestamp_ = timestamp;
1414 }
1415 return ok;
1416 }
1417
1418 // -----------------------------------------------------------------------------
1419 // Bitstream assembly.
1420
DecodeFrameOntoCanvas(const WebPMuxFrameInfo * const frame,WebPPicture * const canvas)1421 static int DecodeFrameOntoCanvas(const WebPMuxFrameInfo* const frame,
1422 WebPPicture* const canvas) {
1423 const WebPData* const image = &frame->bitstream;
1424 WebPPicture sub_image;
1425 WebPDecoderConfig config;
1426 WebPInitDecoderConfig(&config);
1427 WebPUtilClearPic(canvas, NULL);
1428 if (WebPGetFeatures(image->bytes, image->size, &config.input) !=
1429 VP8_STATUS_OK) {
1430 return 0;
1431 }
1432 if (!WebPPictureView(canvas, frame->x_offset, frame->y_offset,
1433 config.input.width, config.input.height, &sub_image)) {
1434 return 0;
1435 }
1436 config.output.is_external_memory = 1;
1437 config.output.colorspace = MODE_BGRA;
1438 config.output.u.RGBA.rgba = (uint8_t*)sub_image.argb;
1439 config.output.u.RGBA.stride = sub_image.argb_stride * 4;
1440 config.output.u.RGBA.size = config.output.u.RGBA.stride * sub_image.height;
1441
1442 if (WebPDecode(image->bytes, image->size, &config) != VP8_STATUS_OK) {
1443 return 0;
1444 }
1445 return 1;
1446 }
1447
FrameToFullCanvas(WebPAnimEncoder * const enc,const WebPMuxFrameInfo * const frame,WebPData * const full_image)1448 static int FrameToFullCanvas(WebPAnimEncoder* const enc,
1449 const WebPMuxFrameInfo* const frame,
1450 WebPData* const full_image) {
1451 WebPPicture* const canvas_buf = &enc->curr_canvas_copy_;
1452 WebPMemoryWriter mem1, mem2;
1453 WebPMemoryWriterInit(&mem1);
1454 WebPMemoryWriterInit(&mem2);
1455
1456 if (!DecodeFrameOntoCanvas(frame, canvas_buf)) goto Err;
1457 if (!EncodeFrame(&enc->last_config_, canvas_buf, &mem1)) goto Err;
1458 GetEncodedData(&mem1, full_image);
1459
1460 if (enc->options_.allow_mixed) {
1461 if (!EncodeFrame(&enc->last_config_reversed_, canvas_buf, &mem2)) goto Err;
1462 if (mem2.size < mem1.size) {
1463 GetEncodedData(&mem2, full_image);
1464 WebPMemoryWriterClear(&mem1);
1465 } else {
1466 WebPMemoryWriterClear(&mem2);
1467 }
1468 }
1469 return 1;
1470
1471 Err:
1472 WebPMemoryWriterClear(&mem1);
1473 WebPMemoryWriterClear(&mem2);
1474 return 0;
1475 }
1476
1477 // Convert a single-frame animation to a non-animated image if appropriate.
1478 // TODO(urvang): Can we pick one of the two heuristically (based on frame
1479 // rectangle and/or presence of alpha)?
OptimizeSingleFrame(WebPAnimEncoder * const enc,WebPData * const webp_data)1480 static WebPMuxError OptimizeSingleFrame(WebPAnimEncoder* const enc,
1481 WebPData* const webp_data) {
1482 WebPMuxError err = WEBP_MUX_OK;
1483 int canvas_width, canvas_height;
1484 WebPMuxFrameInfo frame;
1485 WebPData full_image;
1486 WebPData webp_data2;
1487 WebPMux* const mux = WebPMuxCreate(webp_data, 0);
1488 if (mux == NULL) return WEBP_MUX_BAD_DATA;
1489 assert(enc->out_frame_count_ == 1);
1490 WebPDataInit(&frame.bitstream);
1491 WebPDataInit(&full_image);
1492 WebPDataInit(&webp_data2);
1493
1494 err = WebPMuxGetFrame(mux, 1, &frame);
1495 if (err != WEBP_MUX_OK) goto End;
1496 if (frame.id != WEBP_CHUNK_ANMF) goto End; // Non-animation: nothing to do.
1497 err = WebPMuxGetCanvasSize(mux, &canvas_width, &canvas_height);
1498 if (err != WEBP_MUX_OK) goto End;
1499 if (!FrameToFullCanvas(enc, &frame, &full_image)) {
1500 err = WEBP_MUX_BAD_DATA;
1501 goto End;
1502 }
1503 err = WebPMuxSetImage(mux, &full_image, 1);
1504 if (err != WEBP_MUX_OK) goto End;
1505 err = WebPMuxAssemble(mux, &webp_data2);
1506 if (err != WEBP_MUX_OK) goto End;
1507
1508 if (webp_data2.size < webp_data->size) { // Pick 'webp_data2' if smaller.
1509 WebPDataClear(webp_data);
1510 *webp_data = webp_data2;
1511 WebPDataInit(&webp_data2);
1512 }
1513
1514 End:
1515 WebPDataClear(&frame.bitstream);
1516 WebPDataClear(&full_image);
1517 WebPMuxDelete(mux);
1518 WebPDataClear(&webp_data2);
1519 return err;
1520 }
1521
WebPAnimEncoderAssemble(WebPAnimEncoder * enc,WebPData * webp_data)1522 int WebPAnimEncoderAssemble(WebPAnimEncoder* enc, WebPData* webp_data) {
1523 WebPMux* mux;
1524 WebPMuxError err;
1525
1526 if (enc == NULL) {
1527 return 0;
1528 }
1529 MarkNoError(enc);
1530
1531 if (webp_data == NULL) {
1532 MarkError(enc, "ERROR assembling: NULL input");
1533 return 0;
1534 }
1535
1536 if (enc->in_frame_count_ == 0) {
1537 MarkError(enc, "ERROR: No frames to assemble");
1538 return 0;
1539 }
1540
1541 if (!enc->got_null_frame_ && enc->in_frame_count_ > 1 && enc->count_ > 0) {
1542 // set duration of the last frame to be avg of durations of previous frames.
1543 const double delta_time =
1544 (uint32_t)enc->prev_timestamp_ - enc->first_timestamp_;
1545 const int average_duration = (int)(delta_time / (enc->in_frame_count_ - 1));
1546 if (!IncreasePreviousDuration(enc, average_duration)) {
1547 return 0;
1548 }
1549 }
1550
1551 // Flush any remaining frames.
1552 enc->flush_count_ = enc->count_;
1553 if (!FlushFrames(enc)) {
1554 return 0;
1555 }
1556
1557 // Set definitive canvas size.
1558 mux = enc->mux_;
1559 err = WebPMuxSetCanvasSize(mux, enc->canvas_width_, enc->canvas_height_);
1560 if (err != WEBP_MUX_OK) goto Err;
1561
1562 err = WebPMuxSetAnimationParams(mux, &enc->options_.anim_params);
1563 if (err != WEBP_MUX_OK) goto Err;
1564
1565 // Assemble into a WebP bitstream.
1566 err = WebPMuxAssemble(mux, webp_data);
1567 if (err != WEBP_MUX_OK) goto Err;
1568
1569 if (enc->out_frame_count_ == 1) {
1570 err = OptimizeSingleFrame(enc, webp_data);
1571 if (err != WEBP_MUX_OK) goto Err;
1572 }
1573 return 1;
1574
1575 Err:
1576 MarkError2(enc, "ERROR assembling WebP", err);
1577 return 0;
1578 }
1579
WebPAnimEncoderGetError(WebPAnimEncoder * enc)1580 const char* WebPAnimEncoderGetError(WebPAnimEncoder* enc) {
1581 if (enc == NULL) return NULL;
1582 return enc->error_str_;
1583 }
1584
1585 // -----------------------------------------------------------------------------
1586