• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 Google Inc. All Rights Reserved.
2 //
3 // Use of this source code is governed by a BSD-style license
4 // that can be found in the COPYING file in the root of the source
5 // tree. An additional intellectual property rights grant can be found
6 // in the file PATENTS. All contributing project authors may
7 // be found in the AUTHORS file in the root of the source tree.
8 // -----------------------------------------------------------------------------
9 //
10 // Author: Jyrki Alakuijala (jyrki@google.com)
11 //
12 
13 #include "src/enc/backward_references_enc.h"
14 
15 #include <assert.h>
16 #include <float.h>
17 #include <math.h>
18 
19 #include "src/dsp/dsp.h"
20 #include "src/dsp/lossless.h"
21 #include "src/dsp/lossless_common.h"
22 #include "src/enc/histogram_enc.h"
23 #include "src/enc/vp8i_enc.h"
24 #include "src/utils/color_cache_utils.h"
25 #include "src/utils/utils.h"
26 #include "src/webp/encode.h"
27 
28 #define MIN_BLOCK_SIZE 256  // minimum block size for backward references
29 
30 #define MAX_ENTROPY    (1e30f)
31 
32 // 1M window (4M bytes) minus 120 special codes for short distances.
33 #define WINDOW_SIZE ((1 << WINDOW_SIZE_BITS) - 120)
34 
35 // Minimum number of pixels for which it is cheaper to encode a
36 // distance + length instead of each pixel as a literal.
37 #define MIN_LENGTH 4
38 
39 // -----------------------------------------------------------------------------
40 
41 static const uint8_t plane_to_code_lut[128] = {
42  96,   73,  55,  39,  23,  13,   5,  1,  255, 255, 255, 255, 255, 255, 255, 255,
43  101,  78,  58,  42,  26,  16,   8,  2,    0,   3,  9,   17,  27,  43,  59,  79,
44  102,  86,  62,  46,  32,  20,  10,  6,    4,   7,  11,  21,  33,  47,  63,  87,
45  105,  90,  70,  52,  37,  28,  18,  14,  12,  15,  19,  29,  38,  53,  71,  91,
46  110,  99,  82,  66,  48,  35,  30,  24,  22,  25,  31,  36,  49,  67,  83, 100,
47  115, 108,  94,  76,  64,  50,  44,  40,  34,  41,  45,  51,  65,  77,  95, 109,
48  118, 113, 103,  92,  80,  68,  60,  56,  54,  57,  61,  69,  81,  93, 104, 114,
49  119, 116, 111, 106,  97,  88,  84,  74,  72,  75,  85,  89,  98, 107, 112, 117
50 };
51 
52 extern int VP8LDistanceToPlaneCode(int xsize, int dist);
VP8LDistanceToPlaneCode(int xsize,int dist)53 int VP8LDistanceToPlaneCode(int xsize, int dist) {
54   const int yoffset = dist / xsize;
55   const int xoffset = dist - yoffset * xsize;
56   if (xoffset <= 8 && yoffset < 8) {
57     return plane_to_code_lut[yoffset * 16 + 8 - xoffset] + 1;
58   } else if (xoffset > xsize - 8 && yoffset < 7) {
59     return plane_to_code_lut[(yoffset + 1) * 16 + 8 + (xsize - xoffset)] + 1;
60   }
61   return dist + 120;
62 }
63 
64 // Returns the exact index where array1 and array2 are different. For an index
65 // inferior or equal to best_len_match, the return value just has to be strictly
66 // inferior to best_len_match. The current behavior is to return 0 if this index
67 // is best_len_match, and the index itself otherwise.
68 // If no two elements are the same, it returns max_limit.
FindMatchLength(const uint32_t * const array1,const uint32_t * const array2,int best_len_match,int max_limit)69 static WEBP_INLINE int FindMatchLength(const uint32_t* const array1,
70                                        const uint32_t* const array2,
71                                        int best_len_match, int max_limit) {
72   // Before 'expensive' linear match, check if the two arrays match at the
73   // current best length index.
74   if (array1[best_len_match] != array2[best_len_match]) return 0;
75 
76   return VP8LVectorMismatch(array1, array2, max_limit);
77 }
78 
79 // -----------------------------------------------------------------------------
80 //  VP8LBackwardRefs
81 
82 struct PixOrCopyBlock {
83   PixOrCopyBlock* next_;   // next block (or NULL)
84   PixOrCopy* start_;       // data start
85   int size_;               // currently used size
86 };
87 
88 extern void VP8LClearBackwardRefs(VP8LBackwardRefs* const refs);
VP8LClearBackwardRefs(VP8LBackwardRefs * const refs)89 void VP8LClearBackwardRefs(VP8LBackwardRefs* const refs) {
90   assert(refs != NULL);
91   if (refs->tail_ != NULL) {
92     *refs->tail_ = refs->free_blocks_;  // recycle all blocks at once
93   }
94   refs->free_blocks_ = refs->refs_;
95   refs->tail_ = &refs->refs_;
96   refs->last_block_ = NULL;
97   refs->refs_ = NULL;
98 }
99 
VP8LBackwardRefsClear(VP8LBackwardRefs * const refs)100 void VP8LBackwardRefsClear(VP8LBackwardRefs* const refs) {
101   assert(refs != NULL);
102   VP8LClearBackwardRefs(refs);
103   while (refs->free_blocks_ != NULL) {
104     PixOrCopyBlock* const next = refs->free_blocks_->next_;
105     WebPSafeFree(refs->free_blocks_);
106     refs->free_blocks_ = next;
107   }
108 }
109 
110 // Swaps the content of two VP8LBackwardRefs.
BackwardRefsSwap(VP8LBackwardRefs * const refs1,VP8LBackwardRefs * const refs2)111 static void BackwardRefsSwap(VP8LBackwardRefs* const refs1,
112                              VP8LBackwardRefs* const refs2) {
113   const int point_to_refs1 =
114       (refs1->tail_ != NULL && refs1->tail_ == &refs1->refs_);
115   const int point_to_refs2 =
116       (refs2->tail_ != NULL && refs2->tail_ == &refs2->refs_);
117   const VP8LBackwardRefs tmp = *refs1;
118   *refs1 = *refs2;
119   *refs2 = tmp;
120   if (point_to_refs2) refs1->tail_ = &refs1->refs_;
121   if (point_to_refs1) refs2->tail_ = &refs2->refs_;
122 }
123 
VP8LBackwardRefsInit(VP8LBackwardRefs * const refs,int block_size)124 void VP8LBackwardRefsInit(VP8LBackwardRefs* const refs, int block_size) {
125   assert(refs != NULL);
126   memset(refs, 0, sizeof(*refs));
127   refs->tail_ = &refs->refs_;
128   refs->block_size_ =
129       (block_size < MIN_BLOCK_SIZE) ? MIN_BLOCK_SIZE : block_size;
130 }
131 
VP8LRefsCursorInit(const VP8LBackwardRefs * const refs)132 VP8LRefsCursor VP8LRefsCursorInit(const VP8LBackwardRefs* const refs) {
133   VP8LRefsCursor c;
134   c.cur_block_ = refs->refs_;
135   if (refs->refs_ != NULL) {
136     c.cur_pos = c.cur_block_->start_;
137     c.last_pos_ = c.cur_pos + c.cur_block_->size_;
138   } else {
139     c.cur_pos = NULL;
140     c.last_pos_ = NULL;
141   }
142   return c;
143 }
144 
VP8LRefsCursorNextBlock(VP8LRefsCursor * const c)145 void VP8LRefsCursorNextBlock(VP8LRefsCursor* const c) {
146   PixOrCopyBlock* const b = c->cur_block_->next_;
147   c->cur_pos = (b == NULL) ? NULL : b->start_;
148   c->last_pos_ = (b == NULL) ? NULL : b->start_ + b->size_;
149   c->cur_block_ = b;
150 }
151 
152 // Create a new block, either from the free list or allocated
BackwardRefsNewBlock(VP8LBackwardRefs * const refs)153 static PixOrCopyBlock* BackwardRefsNewBlock(VP8LBackwardRefs* const refs) {
154   PixOrCopyBlock* b = refs->free_blocks_;
155   if (b == NULL) {   // allocate new memory chunk
156     const size_t total_size =
157         sizeof(*b) + refs->block_size_ * sizeof(*b->start_);
158     b = (PixOrCopyBlock*)WebPSafeMalloc(1ULL, total_size);
159     if (b == NULL) {
160       refs->error_ |= 1;
161       return NULL;
162     }
163     b->start_ = (PixOrCopy*)((uint8_t*)b + sizeof(*b));  // not always aligned
164   } else {  // recycle from free-list
165     refs->free_blocks_ = b->next_;
166   }
167   *refs->tail_ = b;
168   refs->tail_ = &b->next_;
169   refs->last_block_ = b;
170   b->next_ = NULL;
171   b->size_ = 0;
172   return b;
173 }
174 
175 // Return 1 on success, 0 on error.
BackwardRefsClone(const VP8LBackwardRefs * const from,VP8LBackwardRefs * const to)176 static int BackwardRefsClone(const VP8LBackwardRefs* const from,
177                              VP8LBackwardRefs* const to) {
178   const PixOrCopyBlock* block_from = from->refs_;
179   VP8LClearBackwardRefs(to);
180   while (block_from != NULL) {
181     PixOrCopyBlock* const block_to = BackwardRefsNewBlock(to);
182     if (block_to == NULL) return 0;
183     memcpy(block_to->start_, block_from->start_,
184            block_from->size_ * sizeof(PixOrCopy));
185     block_to->size_ = block_from->size_;
186     block_from = block_from->next_;
187   }
188   return 1;
189 }
190 
191 extern void VP8LBackwardRefsCursorAdd(VP8LBackwardRefs* const refs,
192                                       const PixOrCopy v);
VP8LBackwardRefsCursorAdd(VP8LBackwardRefs * const refs,const PixOrCopy v)193 void VP8LBackwardRefsCursorAdd(VP8LBackwardRefs* const refs,
194                                const PixOrCopy v) {
195   PixOrCopyBlock* b = refs->last_block_;
196   if (b == NULL || b->size_ == refs->block_size_) {
197     b = BackwardRefsNewBlock(refs);
198     if (b == NULL) return;   // refs->error_ is set
199   }
200   b->start_[b->size_++] = v;
201 }
202 
203 // -----------------------------------------------------------------------------
204 // Hash chains
205 
VP8LHashChainInit(VP8LHashChain * const p,int size)206 int VP8LHashChainInit(VP8LHashChain* const p, int size) {
207   assert(p->size_ == 0);
208   assert(p->offset_length_ == NULL);
209   assert(size > 0);
210   p->offset_length_ =
211       (uint32_t*)WebPSafeMalloc(size, sizeof(*p->offset_length_));
212   if (p->offset_length_ == NULL) return 0;
213   p->size_ = size;
214 
215   return 1;
216 }
217 
VP8LHashChainClear(VP8LHashChain * const p)218 void VP8LHashChainClear(VP8LHashChain* const p) {
219   assert(p != NULL);
220   WebPSafeFree(p->offset_length_);
221 
222   p->size_ = 0;
223   p->offset_length_ = NULL;
224 }
225 
226 // -----------------------------------------------------------------------------
227 
228 static const uint32_t kHashMultiplierHi = 0xc6a4a793u;
229 static const uint32_t kHashMultiplierLo = 0x5bd1e996u;
230 
231 static WEBP_UBSAN_IGNORE_UNSIGNED_OVERFLOW WEBP_INLINE
GetPixPairHash64(const uint32_t * const argb)232 uint32_t GetPixPairHash64(const uint32_t* const argb) {
233   uint32_t key;
234   key  = argb[1] * kHashMultiplierHi;
235   key += argb[0] * kHashMultiplierLo;
236   key = key >> (32 - HASH_BITS);
237   return key;
238 }
239 
240 // Returns the maximum number of hash chain lookups to do for a
241 // given compression quality. Return value in range [8, 86].
GetMaxItersForQuality(int quality)242 static int GetMaxItersForQuality(int quality) {
243   return 8 + (quality * quality) / 128;
244 }
245 
GetWindowSizeForHashChain(int quality,int xsize)246 static int GetWindowSizeForHashChain(int quality, int xsize) {
247   const int max_window_size = (quality > 75) ? WINDOW_SIZE
248                             : (quality > 50) ? (xsize << 8)
249                             : (quality > 25) ? (xsize << 6)
250                             : (xsize << 4);
251   assert(xsize > 0);
252   return (max_window_size > WINDOW_SIZE) ? WINDOW_SIZE : max_window_size;
253 }
254 
MaxFindCopyLength(int len)255 static WEBP_INLINE int MaxFindCopyLength(int len) {
256   return (len < MAX_LENGTH) ? len : MAX_LENGTH;
257 }
258 
VP8LHashChainFill(VP8LHashChain * const p,int quality,const uint32_t * const argb,int xsize,int ysize,int low_effort,const WebPPicture * const pic,int percent_range,int * const percent)259 int VP8LHashChainFill(VP8LHashChain* const p, int quality,
260                       const uint32_t* const argb, int xsize, int ysize,
261                       int low_effort, const WebPPicture* const pic,
262                       int percent_range, int* const percent) {
263   const int size = xsize * ysize;
264   const int iter_max = GetMaxItersForQuality(quality);
265   const uint32_t window_size = GetWindowSizeForHashChain(quality, xsize);
266   int remaining_percent = percent_range;
267   int percent_start = *percent;
268   int pos;
269   int argb_comp;
270   uint32_t base_position;
271   int32_t* hash_to_first_index;
272   // Temporarily use the p->offset_length_ as a hash chain.
273   int32_t* chain = (int32_t*)p->offset_length_;
274   assert(size > 0);
275   assert(p->size_ != 0);
276   assert(p->offset_length_ != NULL);
277 
278   if (size <= 2) {
279     p->offset_length_[0] = p->offset_length_[size - 1] = 0;
280     return 1;
281   }
282 
283   hash_to_first_index =
284       (int32_t*)WebPSafeMalloc(HASH_SIZE, sizeof(*hash_to_first_index));
285   if (hash_to_first_index == NULL) {
286     WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
287     return 0;
288   }
289 
290   percent_range = remaining_percent / 2;
291   remaining_percent -= percent_range;
292 
293   // Set the int32_t array to -1.
294   memset(hash_to_first_index, 0xff, HASH_SIZE * sizeof(*hash_to_first_index));
295   // Fill the chain linking pixels with the same hash.
296   argb_comp = (argb[0] == argb[1]);
297   for (pos = 0; pos < size - 2;) {
298     uint32_t hash_code;
299     const int argb_comp_next = (argb[pos + 1] == argb[pos + 2]);
300     if (argb_comp && argb_comp_next) {
301       // Consecutive pixels with the same color will share the same hash.
302       // We therefore use a different hash: the color and its repetition
303       // length.
304       uint32_t tmp[2];
305       uint32_t len = 1;
306       tmp[0] = argb[pos];
307       // Figure out how far the pixels are the same.
308       // The last pixel has a different 64 bit hash, as its next pixel does
309       // not have the same color, so we just need to get to the last pixel equal
310       // to its follower.
311       while (pos + (int)len + 2 < size && argb[pos + len + 2] == argb[pos]) {
312         ++len;
313       }
314       if (len > MAX_LENGTH) {
315         // Skip the pixels that match for distance=1 and length>MAX_LENGTH
316         // because they are linked to their predecessor and we automatically
317         // check that in the main for loop below. Skipping means setting no
318         // predecessor in the chain, hence -1.
319         memset(chain + pos, 0xff, (len - MAX_LENGTH) * sizeof(*chain));
320         pos += len - MAX_LENGTH;
321         len = MAX_LENGTH;
322       }
323       // Process the rest of the hash chain.
324       while (len) {
325         tmp[1] = len--;
326         hash_code = GetPixPairHash64(tmp);
327         chain[pos] = hash_to_first_index[hash_code];
328         hash_to_first_index[hash_code] = pos++;
329       }
330       argb_comp = 0;
331     } else {
332       // Just move one pixel forward.
333       hash_code = GetPixPairHash64(argb + pos);
334       chain[pos] = hash_to_first_index[hash_code];
335       hash_to_first_index[hash_code] = pos++;
336       argb_comp = argb_comp_next;
337     }
338 
339     if (!WebPReportProgress(
340             pic, percent_start + percent_range * pos / (size - 2), percent)) {
341       WebPSafeFree(hash_to_first_index);
342       return 0;
343     }
344   }
345   // Process the penultimate pixel.
346   chain[pos] = hash_to_first_index[GetPixPairHash64(argb + pos)];
347 
348   WebPSafeFree(hash_to_first_index);
349 
350   percent_start += percent_range;
351   if (!WebPReportProgress(pic, percent_start, percent)) return 0;
352   percent_range = remaining_percent;
353 
354   // Find the best match interval at each pixel, defined by an offset to the
355   // pixel and a length. The right-most pixel cannot match anything to the right
356   // (hence a best length of 0) and the left-most pixel nothing to the left
357   // (hence an offset of 0).
358   assert(size > 2);
359   p->offset_length_[0] = p->offset_length_[size - 1] = 0;
360   for (base_position = size - 2; base_position > 0;) {
361     const int max_len = MaxFindCopyLength(size - 1 - base_position);
362     const uint32_t* const argb_start = argb + base_position;
363     int iter = iter_max;
364     int best_length = 0;
365     uint32_t best_distance = 0;
366     uint32_t best_argb;
367     const int min_pos =
368         (base_position > window_size) ? base_position - window_size : 0;
369     const int length_max = (max_len < 256) ? max_len : 256;
370     uint32_t max_base_position;
371 
372     pos = chain[base_position];
373     if (!low_effort) {
374       int curr_length;
375       // Heuristic: use the comparison with the above line as an initialization.
376       if (base_position >= (uint32_t)xsize) {
377         curr_length = FindMatchLength(argb_start - xsize, argb_start,
378                                       best_length, max_len);
379         if (curr_length > best_length) {
380           best_length = curr_length;
381           best_distance = xsize;
382         }
383         --iter;
384       }
385       // Heuristic: compare to the previous pixel.
386       curr_length =
387           FindMatchLength(argb_start - 1, argb_start, best_length, max_len);
388       if (curr_length > best_length) {
389         best_length = curr_length;
390         best_distance = 1;
391       }
392       --iter;
393       // Skip the for loop if we already have the maximum.
394       if (best_length == MAX_LENGTH) pos = min_pos - 1;
395     }
396     best_argb = argb_start[best_length];
397 
398     for (; pos >= min_pos && --iter; pos = chain[pos]) {
399       int curr_length;
400       assert(base_position > (uint32_t)pos);
401 
402       if (argb[pos + best_length] != best_argb) continue;
403 
404       curr_length = VP8LVectorMismatch(argb + pos, argb_start, max_len);
405       if (best_length < curr_length) {
406         best_length = curr_length;
407         best_distance = base_position - pos;
408         best_argb = argb_start[best_length];
409         // Stop if we have reached a good enough length.
410         if (best_length >= length_max) break;
411       }
412     }
413     // We have the best match but in case the two intervals continue matching
414     // to the left, we have the best matches for the left-extended pixels.
415     max_base_position = base_position;
416     while (1) {
417       assert(best_length <= MAX_LENGTH);
418       assert(best_distance <= WINDOW_SIZE);
419       p->offset_length_[base_position] =
420           (best_distance << MAX_LENGTH_BITS) | (uint32_t)best_length;
421       --base_position;
422       // Stop if we don't have a match or if we are out of bounds.
423       if (best_distance == 0 || base_position == 0) break;
424       // Stop if we cannot extend the matching intervals to the left.
425       if (base_position < best_distance ||
426           argb[base_position - best_distance] != argb[base_position]) {
427         break;
428       }
429       // Stop if we are matching at its limit because there could be a closer
430       // matching interval with the same maximum length. Then again, if the
431       // matching interval is as close as possible (best_distance == 1), we will
432       // never find anything better so let's continue.
433       if (best_length == MAX_LENGTH && best_distance != 1 &&
434           base_position + MAX_LENGTH < max_base_position) {
435         break;
436       }
437       if (best_length < MAX_LENGTH) {
438         ++best_length;
439         max_base_position = base_position;
440       }
441     }
442 
443     if (!WebPReportProgress(pic,
444                             percent_start + percent_range *
445                                                 (size - 2 - base_position) /
446                                                 (size - 2),
447                             percent)) {
448       return 0;
449     }
450   }
451 
452   return WebPReportProgress(pic, percent_start + percent_range, percent);
453 }
454 
AddSingleLiteral(uint32_t pixel,int use_color_cache,VP8LColorCache * const hashers,VP8LBackwardRefs * const refs)455 static WEBP_INLINE void AddSingleLiteral(uint32_t pixel, int use_color_cache,
456                                          VP8LColorCache* const hashers,
457                                          VP8LBackwardRefs* const refs) {
458   PixOrCopy v;
459   if (use_color_cache) {
460     const uint32_t key = VP8LColorCacheGetIndex(hashers, pixel);
461     if (VP8LColorCacheLookup(hashers, key) == pixel) {
462       v = PixOrCopyCreateCacheIdx(key);
463     } else {
464       v = PixOrCopyCreateLiteral(pixel);
465       VP8LColorCacheSet(hashers, key, pixel);
466     }
467   } else {
468     v = PixOrCopyCreateLiteral(pixel);
469   }
470   VP8LBackwardRefsCursorAdd(refs, v);
471 }
472 
BackwardReferencesRle(int xsize,int ysize,const uint32_t * const argb,int cache_bits,VP8LBackwardRefs * const refs)473 static int BackwardReferencesRle(int xsize, int ysize,
474                                  const uint32_t* const argb,
475                                  int cache_bits, VP8LBackwardRefs* const refs) {
476   const int pix_count = xsize * ysize;
477   int i, k;
478   const int use_color_cache = (cache_bits > 0);
479   VP8LColorCache hashers;
480 
481   if (use_color_cache && !VP8LColorCacheInit(&hashers, cache_bits)) {
482     return 0;
483   }
484   VP8LClearBackwardRefs(refs);
485   // Add first pixel as literal.
486   AddSingleLiteral(argb[0], use_color_cache, &hashers, refs);
487   i = 1;
488   while (i < pix_count) {
489     const int max_len = MaxFindCopyLength(pix_count - i);
490     const int rle_len = FindMatchLength(argb + i, argb + i - 1, 0, max_len);
491     const int prev_row_len = (i < xsize) ? 0 :
492         FindMatchLength(argb + i, argb + i - xsize, 0, max_len);
493     if (rle_len >= prev_row_len && rle_len >= MIN_LENGTH) {
494       VP8LBackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(1, rle_len));
495       // We don't need to update the color cache here since it is always the
496       // same pixel being copied, and that does not change the color cache
497       // state.
498       i += rle_len;
499     } else if (prev_row_len >= MIN_LENGTH) {
500       VP8LBackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(xsize, prev_row_len));
501       if (use_color_cache) {
502         for (k = 0; k < prev_row_len; ++k) {
503           VP8LColorCacheInsert(&hashers, argb[i + k]);
504         }
505       }
506       i += prev_row_len;
507     } else {
508       AddSingleLiteral(argb[i], use_color_cache, &hashers, refs);
509       i++;
510     }
511   }
512   if (use_color_cache) VP8LColorCacheClear(&hashers);
513   return !refs->error_;
514 }
515 
BackwardReferencesLz77(int xsize,int ysize,const uint32_t * const argb,int cache_bits,const VP8LHashChain * const hash_chain,VP8LBackwardRefs * const refs)516 static int BackwardReferencesLz77(int xsize, int ysize,
517                                   const uint32_t* const argb, int cache_bits,
518                                   const VP8LHashChain* const hash_chain,
519                                   VP8LBackwardRefs* const refs) {
520   int i;
521   int i_last_check = -1;
522   int ok = 0;
523   int cc_init = 0;
524   const int use_color_cache = (cache_bits > 0);
525   const int pix_count = xsize * ysize;
526   VP8LColorCache hashers;
527 
528   if (use_color_cache) {
529     cc_init = VP8LColorCacheInit(&hashers, cache_bits);
530     if (!cc_init) goto Error;
531   }
532   VP8LClearBackwardRefs(refs);
533   for (i = 0; i < pix_count;) {
534     // Alternative#1: Code the pixels starting at 'i' using backward reference.
535     int offset = 0;
536     int len = 0;
537     int j;
538     VP8LHashChainFindCopy(hash_chain, i, &offset, &len);
539     if (len >= MIN_LENGTH) {
540       const int len_ini = len;
541       int max_reach = 0;
542       const int j_max =
543           (i + len_ini >= pix_count) ? pix_count - 1 : i + len_ini;
544       // Only start from what we have not checked already.
545       i_last_check = (i > i_last_check) ? i : i_last_check;
546       // We know the best match for the current pixel but we try to find the
547       // best matches for the current pixel AND the next one combined.
548       // The naive method would use the intervals:
549       // [i,i+len) + [i+len, length of best match at i+len)
550       // while we check if we can use:
551       // [i,j) (where j<=i+len) + [j, length of best match at j)
552       for (j = i_last_check + 1; j <= j_max; ++j) {
553         const int len_j = VP8LHashChainFindLength(hash_chain, j);
554         const int reach =
555             j + (len_j >= MIN_LENGTH ? len_j : 1);  // 1 for single literal.
556         if (reach > max_reach) {
557           len = j - i;
558           max_reach = reach;
559           if (max_reach >= pix_count) break;
560         }
561       }
562     } else {
563       len = 1;
564     }
565     // Go with literal or backward reference.
566     assert(len > 0);
567     if (len == 1) {
568       AddSingleLiteral(argb[i], use_color_cache, &hashers, refs);
569     } else {
570       VP8LBackwardRefsCursorAdd(refs, PixOrCopyCreateCopy(offset, len));
571       if (use_color_cache) {
572         for (j = i; j < i + len; ++j) VP8LColorCacheInsert(&hashers, argb[j]);
573       }
574     }
575     i += len;
576   }
577 
578   ok = !refs->error_;
579  Error:
580   if (cc_init) VP8LColorCacheClear(&hashers);
581   return ok;
582 }
583 
584 // Compute an LZ77 by forcing matches to happen within a given distance cost.
585 // We therefore limit the algorithm to the lowest 32 values in the PlaneCode
586 // definition.
587 #define WINDOW_OFFSETS_SIZE_MAX 32
BackwardReferencesLz77Box(int xsize,int ysize,const uint32_t * const argb,int cache_bits,const VP8LHashChain * const hash_chain_best,VP8LHashChain * hash_chain,VP8LBackwardRefs * const refs)588 static int BackwardReferencesLz77Box(int xsize, int ysize,
589                                      const uint32_t* const argb, int cache_bits,
590                                      const VP8LHashChain* const hash_chain_best,
591                                      VP8LHashChain* hash_chain,
592                                      VP8LBackwardRefs* const refs) {
593   int i;
594   const int pix_count = xsize * ysize;
595   uint16_t* counts;
596   int window_offsets[WINDOW_OFFSETS_SIZE_MAX] = {0};
597   int window_offsets_new[WINDOW_OFFSETS_SIZE_MAX] = {0};
598   int window_offsets_size = 0;
599   int window_offsets_new_size = 0;
600   uint16_t* const counts_ini =
601       (uint16_t*)WebPSafeMalloc(xsize * ysize, sizeof(*counts_ini));
602   int best_offset_prev = -1, best_length_prev = -1;
603   if (counts_ini == NULL) return 0;
604 
605   // counts[i] counts how many times a pixel is repeated starting at position i.
606   i = pix_count - 2;
607   counts = counts_ini + i;
608   counts[1] = 1;
609   for (; i >= 0; --i, --counts) {
610     if (argb[i] == argb[i + 1]) {
611       // Max out the counts to MAX_LENGTH.
612       counts[0] = counts[1] + (counts[1] != MAX_LENGTH);
613     } else {
614       counts[0] = 1;
615     }
616   }
617 
618   // Figure out the window offsets around a pixel. They are stored in a
619   // spiraling order around the pixel as defined by VP8LDistanceToPlaneCode.
620   {
621     int x, y;
622     for (y = 0; y <= 6; ++y) {
623       for (x = -6; x <= 6; ++x) {
624         const int offset = y * xsize + x;
625         int plane_code;
626         // Ignore offsets that bring us after the pixel.
627         if (offset <= 0) continue;
628         plane_code = VP8LDistanceToPlaneCode(xsize, offset) - 1;
629         if (plane_code >= WINDOW_OFFSETS_SIZE_MAX) continue;
630         window_offsets[plane_code] = offset;
631       }
632     }
633     // For narrow images, not all plane codes are reached, so remove those.
634     for (i = 0; i < WINDOW_OFFSETS_SIZE_MAX; ++i) {
635       if (window_offsets[i] == 0) continue;
636       window_offsets[window_offsets_size++] = window_offsets[i];
637     }
638     // Given a pixel P, find the offsets that reach pixels unreachable from P-1
639     // with any of the offsets in window_offsets[].
640     for (i = 0; i < window_offsets_size; ++i) {
641       int j;
642       int is_reachable = 0;
643       for (j = 0; j < window_offsets_size && !is_reachable; ++j) {
644         is_reachable |= (window_offsets[i] == window_offsets[j] + 1);
645       }
646       if (!is_reachable) {
647         window_offsets_new[window_offsets_new_size] = window_offsets[i];
648         ++window_offsets_new_size;
649       }
650     }
651   }
652 
653   hash_chain->offset_length_[0] = 0;
654   for (i = 1; i < pix_count; ++i) {
655     int ind;
656     int best_length = VP8LHashChainFindLength(hash_chain_best, i);
657     int best_offset;
658     int do_compute = 1;
659 
660     if (best_length >= MAX_LENGTH) {
661       // Do not recompute the best match if we already have a maximal one in the
662       // window.
663       best_offset = VP8LHashChainFindOffset(hash_chain_best, i);
664       for (ind = 0; ind < window_offsets_size; ++ind) {
665         if (best_offset == window_offsets[ind]) {
666           do_compute = 0;
667           break;
668         }
669       }
670     }
671     if (do_compute) {
672       // Figure out if we should use the offset/length from the previous pixel
673       // as an initial guess and therefore only inspect the offsets in
674       // window_offsets_new[].
675       const int use_prev =
676           (best_length_prev > 1) && (best_length_prev < MAX_LENGTH);
677       const int num_ind =
678           use_prev ? window_offsets_new_size : window_offsets_size;
679       best_length = use_prev ? best_length_prev - 1 : 0;
680       best_offset = use_prev ? best_offset_prev : 0;
681       // Find the longest match in a window around the pixel.
682       for (ind = 0; ind < num_ind; ++ind) {
683         int curr_length = 0;
684         int j = i;
685         int j_offset =
686             use_prev ? i - window_offsets_new[ind] : i - window_offsets[ind];
687         if (j_offset < 0 || argb[j_offset] != argb[i]) continue;
688         // The longest match is the sum of how many times each pixel is
689         // repeated.
690         do {
691           const int counts_j_offset = counts_ini[j_offset];
692           const int counts_j = counts_ini[j];
693           if (counts_j_offset != counts_j) {
694             curr_length +=
695                 (counts_j_offset < counts_j) ? counts_j_offset : counts_j;
696             break;
697           }
698           // The same color is repeated counts_pos times at j_offset and j.
699           curr_length += counts_j_offset;
700           j_offset += counts_j_offset;
701           j += counts_j_offset;
702         } while (curr_length <= MAX_LENGTH && j < pix_count &&
703                  argb[j_offset] == argb[j]);
704         if (best_length < curr_length) {
705           best_offset =
706               use_prev ? window_offsets_new[ind] : window_offsets[ind];
707           if (curr_length >= MAX_LENGTH) {
708             best_length = MAX_LENGTH;
709             break;
710           } else {
711             best_length = curr_length;
712           }
713         }
714       }
715     }
716 
717     assert(i + best_length <= pix_count);
718     assert(best_length <= MAX_LENGTH);
719     if (best_length <= MIN_LENGTH) {
720       hash_chain->offset_length_[i] = 0;
721       best_offset_prev = 0;
722       best_length_prev = 0;
723     } else {
724       hash_chain->offset_length_[i] =
725           (best_offset << MAX_LENGTH_BITS) | (uint32_t)best_length;
726       best_offset_prev = best_offset;
727       best_length_prev = best_length;
728     }
729   }
730   hash_chain->offset_length_[0] = 0;
731   WebPSafeFree(counts_ini);
732 
733   return BackwardReferencesLz77(xsize, ysize, argb, cache_bits, hash_chain,
734                                 refs);
735 }
736 
737 // -----------------------------------------------------------------------------
738 
BackwardReferences2DLocality(int xsize,const VP8LBackwardRefs * const refs)739 static void BackwardReferences2DLocality(int xsize,
740                                          const VP8LBackwardRefs* const refs) {
741   VP8LRefsCursor c = VP8LRefsCursorInit(refs);
742   while (VP8LRefsCursorOk(&c)) {
743     if (PixOrCopyIsCopy(c.cur_pos)) {
744       const int dist = c.cur_pos->argb_or_distance;
745       const int transformed_dist = VP8LDistanceToPlaneCode(xsize, dist);
746       c.cur_pos->argb_or_distance = transformed_dist;
747     }
748     VP8LRefsCursorNext(&c);
749   }
750 }
751 
752 // Evaluate optimal cache bits for the local color cache.
753 // The input *best_cache_bits sets the maximum cache bits to use (passing 0
754 // implies disabling the local color cache). The local color cache is also
755 // disabled for the lower (<= 25) quality.
756 // Returns 0 in case of memory error.
CalculateBestCacheSize(const uint32_t * argb,int quality,const VP8LBackwardRefs * const refs,int * const best_cache_bits)757 static int CalculateBestCacheSize(const uint32_t* argb, int quality,
758                                   const VP8LBackwardRefs* const refs,
759                                   int* const best_cache_bits) {
760   int i;
761   const int cache_bits_max = (quality <= 25) ? 0 : *best_cache_bits;
762   float entropy_min = MAX_ENTROPY;
763   int cc_init[MAX_COLOR_CACHE_BITS + 1] = { 0 };
764   VP8LColorCache hashers[MAX_COLOR_CACHE_BITS + 1];
765   VP8LRefsCursor c = VP8LRefsCursorInit(refs);
766   VP8LHistogram* histos[MAX_COLOR_CACHE_BITS + 1] = { NULL };
767   int ok = 0;
768 
769   assert(cache_bits_max >= 0 && cache_bits_max <= MAX_COLOR_CACHE_BITS);
770 
771   if (cache_bits_max == 0) {
772     *best_cache_bits = 0;
773     // Local color cache is disabled.
774     return 1;
775   }
776 
777   // Allocate data.
778   for (i = 0; i <= cache_bits_max; ++i) {
779     histos[i] = VP8LAllocateHistogram(i);
780     if (histos[i] == NULL) goto Error;
781     VP8LHistogramInit(histos[i], i, /*init_arrays=*/ 1);
782     if (i == 0) continue;
783     cc_init[i] = VP8LColorCacheInit(&hashers[i], i);
784     if (!cc_init[i]) goto Error;
785   }
786 
787   // Find the cache_bits giving the lowest entropy. The search is done in a
788   // brute-force way as the function (entropy w.r.t cache_bits) can be
789   // anything in practice.
790   while (VP8LRefsCursorOk(&c)) {
791     const PixOrCopy* const v = c.cur_pos;
792     if (PixOrCopyIsLiteral(v)) {
793       const uint32_t pix = *argb++;
794       const uint32_t a = (pix >> 24) & 0xff;
795       const uint32_t r = (pix >> 16) & 0xff;
796       const uint32_t g = (pix >>  8) & 0xff;
797       const uint32_t b = (pix >>  0) & 0xff;
798       // The keys of the caches can be derived from the longest one.
799       int key = VP8LHashPix(pix, 32 - cache_bits_max);
800       // Do not use the color cache for cache_bits = 0.
801       ++histos[0]->blue_[b];
802       ++histos[0]->literal_[g];
803       ++histos[0]->red_[r];
804       ++histos[0]->alpha_[a];
805       // Deal with cache_bits > 0.
806       for (i = cache_bits_max; i >= 1; --i, key >>= 1) {
807         if (VP8LColorCacheLookup(&hashers[i], key) == pix) {
808           ++histos[i]->literal_[NUM_LITERAL_CODES + NUM_LENGTH_CODES + key];
809         } else {
810           VP8LColorCacheSet(&hashers[i], key, pix);
811           ++histos[i]->blue_[b];
812           ++histos[i]->literal_[g];
813           ++histos[i]->red_[r];
814           ++histos[i]->alpha_[a];
815         }
816       }
817     } else {
818       int code, extra_bits, extra_bits_value;
819       // We should compute the contribution of the (distance,length)
820       // histograms but those are the same independently from the cache size.
821       // As those constant contributions are in the end added to the other
822       // histogram contributions, we can ignore them, except for the length
823       // prefix that is part of the literal_ histogram.
824       int len = PixOrCopyLength(v);
825       uint32_t argb_prev = *argb ^ 0xffffffffu;
826       VP8LPrefixEncode(len, &code, &extra_bits, &extra_bits_value);
827       for (i = 0; i <= cache_bits_max; ++i) {
828         ++histos[i]->literal_[NUM_LITERAL_CODES + code];
829       }
830       // Update the color caches.
831       do {
832         if (*argb != argb_prev) {
833           // Efficiency: insert only if the color changes.
834           int key = VP8LHashPix(*argb, 32 - cache_bits_max);
835           for (i = cache_bits_max; i >= 1; --i, key >>= 1) {
836             hashers[i].colors_[key] = *argb;
837           }
838           argb_prev = *argb;
839         }
840         argb++;
841       } while (--len != 0);
842     }
843     VP8LRefsCursorNext(&c);
844   }
845 
846   for (i = 0; i <= cache_bits_max; ++i) {
847     const float entropy = VP8LHistogramEstimateBits(histos[i]);
848     if (i == 0 || entropy < entropy_min) {
849       entropy_min = entropy;
850       *best_cache_bits = i;
851     }
852   }
853   ok = 1;
854  Error:
855   for (i = 0; i <= cache_bits_max; ++i) {
856     if (cc_init[i]) VP8LColorCacheClear(&hashers[i]);
857     VP8LFreeHistogram(histos[i]);
858   }
859   return ok;
860 }
861 
862 // Update (in-place) backward references for specified cache_bits.
BackwardRefsWithLocalCache(const uint32_t * const argb,int cache_bits,VP8LBackwardRefs * const refs)863 static int BackwardRefsWithLocalCache(const uint32_t* const argb,
864                                       int cache_bits,
865                                       VP8LBackwardRefs* const refs) {
866   int pixel_index = 0;
867   VP8LColorCache hashers;
868   VP8LRefsCursor c = VP8LRefsCursorInit(refs);
869   if (!VP8LColorCacheInit(&hashers, cache_bits)) return 0;
870 
871   while (VP8LRefsCursorOk(&c)) {
872     PixOrCopy* const v = c.cur_pos;
873     if (PixOrCopyIsLiteral(v)) {
874       const uint32_t argb_literal = v->argb_or_distance;
875       const int ix = VP8LColorCacheContains(&hashers, argb_literal);
876       if (ix >= 0) {
877         // hashers contains argb_literal
878         *v = PixOrCopyCreateCacheIdx(ix);
879       } else {
880         VP8LColorCacheInsert(&hashers, argb_literal);
881       }
882       ++pixel_index;
883     } else {
884       // refs was created without local cache, so it can not have cache indexes.
885       int k;
886       assert(PixOrCopyIsCopy(v));
887       for (k = 0; k < v->len; ++k) {
888         VP8LColorCacheInsert(&hashers, argb[pixel_index++]);
889       }
890     }
891     VP8LRefsCursorNext(&c);
892   }
893   VP8LColorCacheClear(&hashers);
894   return 1;
895 }
896 
GetBackwardReferencesLowEffort(int width,int height,const uint32_t * const argb,int * const cache_bits,const VP8LHashChain * const hash_chain,VP8LBackwardRefs * const refs_lz77)897 static VP8LBackwardRefs* GetBackwardReferencesLowEffort(
898     int width, int height, const uint32_t* const argb,
899     int* const cache_bits, const VP8LHashChain* const hash_chain,
900     VP8LBackwardRefs* const refs_lz77) {
901   *cache_bits = 0;
902   if (!BackwardReferencesLz77(width, height, argb, 0, hash_chain, refs_lz77)) {
903     return NULL;
904   }
905   BackwardReferences2DLocality(width, refs_lz77);
906   return refs_lz77;
907 }
908 
909 extern int VP8LBackwardReferencesTraceBackwards(
910     int xsize, int ysize, const uint32_t* const argb, int cache_bits,
911     const VP8LHashChain* const hash_chain,
912     const VP8LBackwardRefs* const refs_src, VP8LBackwardRefs* const refs_dst);
GetBackwardReferences(int width,int height,const uint32_t * const argb,int quality,int lz77_types_to_try,int cache_bits_max,int do_no_cache,const VP8LHashChain * const hash_chain,VP8LBackwardRefs * const refs,int * const cache_bits_best)913 static int GetBackwardReferences(int width, int height,
914                                  const uint32_t* const argb, int quality,
915                                  int lz77_types_to_try, int cache_bits_max,
916                                  int do_no_cache,
917                                  const VP8LHashChain* const hash_chain,
918                                  VP8LBackwardRefs* const refs,
919                                  int* const cache_bits_best) {
920   VP8LHistogram* histo = NULL;
921   int i, lz77_type;
922   // Index 0 is for a color cache, index 1 for no cache (if needed).
923   int lz77_types_best[2] = {0, 0};
924   float bit_costs_best[2] = {FLT_MAX, FLT_MAX};
925   VP8LHashChain hash_chain_box;
926   VP8LBackwardRefs* const refs_tmp = &refs[do_no_cache ? 2 : 1];
927   int status = 0;
928   memset(&hash_chain_box, 0, sizeof(hash_chain_box));
929 
930   histo = VP8LAllocateHistogram(MAX_COLOR_CACHE_BITS);
931   if (histo == NULL) goto Error;
932 
933   for (lz77_type = 1; lz77_types_to_try;
934        lz77_types_to_try &= ~lz77_type, lz77_type <<= 1) {
935     int res = 0;
936     float bit_cost = 0.f;
937     if ((lz77_types_to_try & lz77_type) == 0) continue;
938     switch (lz77_type) {
939       case kLZ77RLE:
940         res = BackwardReferencesRle(width, height, argb, 0, refs_tmp);
941         break;
942       case kLZ77Standard:
943         // Compute LZ77 with no cache (0 bits), as the ideal LZ77 with a color
944         // cache is not that different in practice.
945         res = BackwardReferencesLz77(width, height, argb, 0, hash_chain,
946                                      refs_tmp);
947         break;
948       case kLZ77Box:
949         if (!VP8LHashChainInit(&hash_chain_box, width * height)) goto Error;
950         res = BackwardReferencesLz77Box(width, height, argb, 0, hash_chain,
951                                         &hash_chain_box, refs_tmp);
952         break;
953       default:
954         assert(0);
955     }
956     if (!res) goto Error;
957 
958     // Start with the no color cache case.
959     for (i = 1; i >= 0; --i) {
960       int cache_bits = (i == 1) ? 0 : cache_bits_max;
961 
962       if (i == 1 && !do_no_cache) continue;
963 
964       if (i == 0) {
965         // Try with a color cache.
966         if (!CalculateBestCacheSize(argb, quality, refs_tmp, &cache_bits)) {
967           goto Error;
968         }
969         if (cache_bits > 0) {
970           if (!BackwardRefsWithLocalCache(argb, cache_bits, refs_tmp)) {
971             goto Error;
972           }
973         }
974       }
975 
976       if (i == 0 && do_no_cache && cache_bits == 0) {
977         // No need to re-compute bit_cost as it was computed at i == 1.
978       } else {
979         VP8LHistogramCreate(histo, refs_tmp, cache_bits);
980         bit_cost = VP8LHistogramEstimateBits(histo);
981       }
982 
983       if (bit_cost < bit_costs_best[i]) {
984         if (i == 1) {
985           // Do not swap as the full cache analysis would have the wrong
986           // VP8LBackwardRefs to start with.
987           if (!BackwardRefsClone(refs_tmp, &refs[1])) goto Error;
988         } else {
989           BackwardRefsSwap(refs_tmp, &refs[0]);
990         }
991         bit_costs_best[i] = bit_cost;
992         lz77_types_best[i] = lz77_type;
993         if (i == 0) *cache_bits_best = cache_bits;
994       }
995     }
996   }
997   assert(lz77_types_best[0] > 0);
998   assert(!do_no_cache || lz77_types_best[1] > 0);
999 
1000   // Improve on simple LZ77 but only for high quality (TraceBackwards is
1001   // costly).
1002   for (i = 1; i >= 0; --i) {
1003     if (i == 1 && !do_no_cache) continue;
1004     if ((lz77_types_best[i] == kLZ77Standard ||
1005          lz77_types_best[i] == kLZ77Box) &&
1006         quality >= 25) {
1007       const VP8LHashChain* const hash_chain_tmp =
1008           (lz77_types_best[i] == kLZ77Standard) ? hash_chain : &hash_chain_box;
1009       const int cache_bits = (i == 1) ? 0 : *cache_bits_best;
1010       float bit_cost_trace;
1011       if (!VP8LBackwardReferencesTraceBackwards(width, height, argb, cache_bits,
1012                                                 hash_chain_tmp, &refs[i],
1013                                                 refs_tmp)) {
1014         goto Error;
1015       }
1016       VP8LHistogramCreate(histo, refs_tmp, cache_bits);
1017       bit_cost_trace = VP8LHistogramEstimateBits(histo);
1018       if (bit_cost_trace < bit_costs_best[i]) {
1019         BackwardRefsSwap(refs_tmp, &refs[i]);
1020       }
1021     }
1022 
1023     BackwardReferences2DLocality(width, &refs[i]);
1024 
1025     if (i == 1 && lz77_types_best[0] == lz77_types_best[1] &&
1026         *cache_bits_best == 0) {
1027       // If the best cache size is 0 and we have the same best LZ77, just copy
1028       // the data over and stop here.
1029       if (!BackwardRefsClone(&refs[1], &refs[0])) goto Error;
1030       break;
1031     }
1032   }
1033   status = 1;
1034 
1035  Error:
1036   VP8LHashChainClear(&hash_chain_box);
1037   VP8LFreeHistogram(histo);
1038   return status;
1039 }
1040 
VP8LGetBackwardReferences(int width,int height,const uint32_t * const argb,int quality,int low_effort,int lz77_types_to_try,int cache_bits_max,int do_no_cache,const VP8LHashChain * const hash_chain,VP8LBackwardRefs * const refs,int * const cache_bits_best,const WebPPicture * const pic,int percent_range,int * const percent)1041 int VP8LGetBackwardReferences(
1042     int width, int height, const uint32_t* const argb, int quality,
1043     int low_effort, int lz77_types_to_try, int cache_bits_max, int do_no_cache,
1044     const VP8LHashChain* const hash_chain, VP8LBackwardRefs* const refs,
1045     int* const cache_bits_best, const WebPPicture* const pic, int percent_range,
1046     int* const percent) {
1047   if (low_effort) {
1048     VP8LBackwardRefs* refs_best;
1049     *cache_bits_best = cache_bits_max;
1050     refs_best = GetBackwardReferencesLowEffort(
1051         width, height, argb, cache_bits_best, hash_chain, refs);
1052     if (refs_best == NULL) {
1053       WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1054       return 0;
1055     }
1056     // Set it in first position.
1057     BackwardRefsSwap(refs_best, &refs[0]);
1058   } else {
1059     if (!GetBackwardReferences(width, height, argb, quality, lz77_types_to_try,
1060                                cache_bits_max, do_no_cache, hash_chain, refs,
1061                                cache_bits_best)) {
1062       WebPEncodingSetError(pic, VP8_ENC_ERROR_OUT_OF_MEMORY);
1063       return 0;
1064     }
1065   }
1066 
1067   return WebPReportProgress(pic, *percent + percent_range, percent);
1068 }
1069