• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *
3  * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
4  *
5  * This program is free software; you can redistribute it and/or modify
6  * it under the terms of the GNU General Public License as published by
7  * the Free Software Foundation; either version 2 of the License, or
8  * (at your option) any later version.
9  *
10  * This program is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  *
15  */
16 
17 #ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
18 #include <stdio.h>
19 #include <stdlib.h>
20 #include <string.h>
21 #else
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 #include <linux/types.h>
25 #include <linux/errno.h>
26 #include <linux/spinlock.h>
27 #include <linux/slab.h>
28 #include <linux/vmalloc.h>
29 #include <linux/amlogic/media/canvas/canvas.h>
30 
31 #undef pr_info
32 #define pr_info printk
33 
34 #define __COMPARE(context, p1, p2) comp(p1, p2)
35 #define __SHORTSORT(lo, hi, width, comp, context) \
36   shortsort(lo, hi, width, comp)
37 #define CUTOFF 8            /* testing shows that this is good value */
38 #define STKSIZ (8*sizeof(void *) - 2)
39 
40 #undef swap
swap(char * a,char * b,size_t width)41 static void swap(char *a, char *b, size_t width)
42 {
43 	char tmp;
44 
45 	if (a != b)
46 	/* Do the swap one character at a time to avoid potential
47 	*   alignment problems.
48 	*/
49 	while (width--) {
50 		tmp = *a;
51 		*a++ = *b;
52 		*b++ = tmp;
53 	}
54 }
55 
shortsort(char * lo,char * hi,size_t width,int (* comp)(const void *,const void *))56 static void shortsort(char *lo, char *hi, size_t width,
57   int (*comp)(const void *, const void *))
58 {
59 	char *p, *max;
60 
61 	/* Note: in assertions below, i and j are alway inside original
62 	*   bound of array to sort.
63 	*/
64 	while (hi > lo) {
65 		/* A[i] <= A[j] for i <= j, j > hi */
66 		max = lo;
67 		for (p = lo + width; p <= hi; p += width) {
68 			/* A[i] <= A[max] for lo <= i < p */
69 			if (__COMPARE(context, p, max) > 0)
70 				max = p;
71 				/* A[i] <= A[max] for lo <= i <= p */
72 		}
73 		/* A[i] <= A[max] for lo <= i <= hi */
74 		swap(max, hi, width);
75 
76 		/* A[i] <= A[hi] for i <= hi, so A[i] <= A[j] for i <= j,
77 		*   j >= hi
78 		*/
79 		hi -= width;
80 
81 		/* A[i] <= A[j] for i <= j, j > hi, loop top condition
82 		*   established
83 		*/
84 	}
85 }
86 
qsort(void * base,size_t num,size_t width,int (* comp)(const void *,const void *))87 static void qsort(void *base, size_t num, size_t width,
88   int (*comp)(const void *, const void *))
89 {
90   char *lo, *hi;              /* ends of sub-array currently sorting */
91   char *mid;                  /* points to middle of subarray */
92   char *loguy, *higuy;        /* traveling pointers for partition step */
93   size_t size;                /* size of the sub-array */
94   char *lostk[STKSIZ], *histk[STKSIZ];
95   int stkptr;
96 
97 /*  stack for saving sub-array to be
98  *          processed
99  */
100 #if 0
101   /* validation section */
102   _VALIDATE_RETURN_VOID(base != NULL || num == 0, EINVAL);
103   _VALIDATE_RETURN_VOID(width > 0, EINVAL);
104   _VALIDATE_RETURN_VOID(comp != NULL, EINVAL);
105 #endif
106   if (num < 2)
107     return;                 /* nothing to do */
108 
109   stkptr = 0;                 /* initialize stack */
110   lo = (char *)base;
111   hi = (char *)base + width * (num - 1);      /* initialize limits */
112 
113   /* this entry point is for pseudo-recursion calling: setting
114    * lo and hi and jumping to here is like recursion, but stkptr is
115    * preserved, locals aren't, so we preserve stuff on the stack
116    */
117 recurse:
118 
119   size = (hi - lo) / width + 1;        /* number of el's to sort */
120 
121   /* below a certain size, it is faster to use a O(n^2) sorting method */
122   if (size <= CUTOFF) {
123     __SHORTSORT(lo, hi, width, comp, context);
124   } else {
125     /* First we pick a partitioning element.  The efficiency of
126      * the algorithm demands that we find one that is approximately
127      * the median of the values, but also that we select one fast.
128      * We choose the median of the first, middle, and last
129      * elements, to avoid bad performance in the face of already
130      * sorted data, or data that is made up of multiple sorted
131      * runs appended together.  Testing shows that a
132      * median-of-three algorithm provides better performance than
133      * simply picking the middle element for the latter case.
134      */
135 
136     mid = lo + (size / 2) * width;      /* find middle element */
137 
138     /* Sort the first, middle, last elements into order */
139     if (__COMPARE(context, lo, mid) > 0)
140       swap(lo, mid, width);
141     if (__COMPARE(context, lo, hi) > 0)
142       swap(lo, hi, width);
143     if (__COMPARE(context, mid, hi) > 0)
144       swap(mid, hi, width);
145 
146     /* We now wish to partition the array into three pieces, one
147      * consisting of elements <= partition element, one of elements
148      * equal to the partition element, and one of elements > than
149      * it. This is done below; comments indicate conditions
150      * established at every step.
151      */
152 
153     loguy = lo;
154     higuy = hi;
155 
156     /* Note that higuy decreases and loguy increases on every
157      *   iteration, so loop must terminate.
158      */
159     for (;;) {
160       /* lo <= loguy < hi, lo < higuy <= hi,
161        *   A[i] <= A[mid] for lo <= i <= loguy,
162        *   A[i] > A[mid] for higuy <= i < hi,
163        *   A[hi] >= A[mid]
164        */
165 
166       /* The doubled loop is to avoid calling comp(mid,mid),
167        *   since some existing comparison funcs don't work
168        *   when passed the same value for both pointers.
169        */
170 
171       if (mid > loguy) {
172         do  {
173           loguy += width;
174         } while (loguy < mid &&
175           __COMPARE(context, loguy, mid) <= 0);
176       }
177       if (mid <= loguy) {
178         do  {
179           loguy += width;
180         } while (loguy <= hi &&
181           __COMPARE(context, loguy, mid) <= 0);
182       }
183 
184       /* lo < loguy <= hi+1, A[i] <= A[mid] for
185        *   lo <= i < loguy,
186        *   either loguy > hi or A[loguy] > A[mid]
187        */
188 
189       do  {
190         higuy -= width;
191       } while (higuy > mid &&
192           __COMPARE(context, higuy, mid) > 0);
193 
194       /* lo <= higuy < hi, A[i] > A[mid] for higuy < i < hi,
195        *   either higuy == lo or A[higuy] <= A[mid]
196        */
197 
198       if (higuy < loguy)
199         break;
200 
201       /* if loguy > hi or higuy == lo, then we would have
202        *   exited, so A[loguy] > A[mid], A[higuy] <= A[mid],
203        *   loguy <= hi, higuy > lo
204        */
205 
206       swap(loguy, higuy, width);
207 
208       /* If the partition element was moved, follow it.
209        *   Only need to check for mid == higuy, since before
210        *   the swap, A[loguy] > A[mid] implies loguy != mid.
211        */
212 
213       if (mid == higuy)
214         mid = loguy;
215 
216       /* A[loguy] <= A[mid], A[higuy] > A[mid]; so condition
217        *   at top of loop is re-established
218        */
219     }
220 
221     /*     A[i] <= A[mid] for lo <= i < loguy,
222      *       A[i] > A[mid] for higuy < i < hi,
223      *       A[hi] >= A[mid]
224      *       higuy < loguy
225      *   implying:
226      *       higuy == loguy-1
227      *       or higuy == hi - 1, loguy == hi + 1, A[hi] == A[mid]
228      */
229 
230     /* Find adjacent elements equal to the partition element.  The
231      *   doubled loop is to avoid calling comp(mid,mid), since some
232      *   existing comparison funcs don't work when passed the same
233      *   value for both pointers.
234      */
235 
236     higuy += width;
237     if (mid < higuy) {
238       do  {
239         higuy -= width;
240       } while (higuy > mid &&
241         __COMPARE(context, higuy, mid) == 0);
242     }
243     if (mid >= higuy) {
244       do  {
245         higuy -= width;
246       } while (higuy > lo &&
247         __COMPARE(context, higuy, mid) == 0);
248     }
249 
250     /* OK, now we have the following:
251      *      higuy < loguy
252      *      lo <= higuy <= hi
253      *      A[i]  <= A[mid] for lo <= i <= higuy
254      *      A[i]  == A[mid] for higuy < i < loguy
255      *      A[i]  >  A[mid] for loguy <= i < hi
256      *      A[hi] >= A[mid]
257      */
258 
259     /* We've finished the partition, now we want to sort the
260      *   subarrays [lo, higuy] and [loguy, hi].
261      *   We do the smaller one first to minimize stack usage.
262      *   We only sort arrays of length 2 or more.
263      */
264 
265     if (higuy - lo >= hi - loguy) {
266       if (lo < higuy) {
267         lostk[stkptr] = lo;
268         histk[stkptr] = higuy;
269         ++stkptr;
270       }                    /* save big recursion for later */
271 
272       if (loguy < hi) {
273         lo = loguy;
274         goto recurse;          /* do small recursion */
275       }
276     } else {
277       if (loguy < hi) {
278         lostk[stkptr] = loguy;
279         histk[stkptr] = hi;
280         ++stkptr;    /* save big recursion for later */
281       }
282 
283       if (lo < higuy) {
284         hi = higuy;
285         goto recurse;          /* do small recursion */
286       }
287     }
288   }
289 
290   /* We have sorted the array, except for any pending sorts on the stack.
291    *   Check if there are any, and do them.
292    */
293 
294   --stkptr;
295   if (stkptr >= 0) {
296     lo = lostk[stkptr];
297     hi = histk[stkptr];
298     goto recurse;           /* pop subarray from stack */
299   } else
300     return;                 /* all subarrays done */
301 }
302 
303 #endif
304 
305 #include "av1_global.h"
306 int aom_realloc_frame_buffer(AV1_COMMON *cm, PIC_BUFFER_CONFIG *pic,
307   int width, int height, unsigned int order_hint);
308 void dump_params(AV1Decoder *pbi, union param_u *params);
309 
310 #define assert(a)
311 #define IMPLIES(a)
312 
313 int new_compressed_data_count = 0;
314 
valid_ref_frame_size(int ref_width,int ref_height,int this_width,int this_height)315 static int valid_ref_frame_size(int ref_width, int ref_height,
316                                        int this_width, int this_height) {
317   return 2 * this_width >= ref_width && 2 * this_height >= ref_height &&
318          this_width <= 16 * ref_width && this_height <= 16 * ref_height;
319 }
320 
321 #ifdef SUPPORT_SCALE_FACTOR
322 // Note: Expect val to be in q4 precision
scaled_x(int val,const struct scale_factors * sf)323 static inline int scaled_x(int val, const struct scale_factors *sf) {
324   const int off =
325       (sf->x_scale_fp - (1 << REF_SCALE_SHIFT)) * (1 << (SUBPEL_BITS - 1));
326   const int64_t tval = (int64_t)val * sf->x_scale_fp + off;
327   return (int)ROUND_POWER_OF_TWO_SIGNED_64(tval,
328                                            REF_SCALE_SHIFT - SCALE_EXTRA_BITS);
329 }
330 
331 // Note: Expect val to be in q4 precision
scaled_y(int val,const struct scale_factors * sf)332 static inline int scaled_y(int val, const struct scale_factors *sf) {
333   const int off =
334       (sf->y_scale_fp - (1 << REF_SCALE_SHIFT)) * (1 << (SUBPEL_BITS - 1));
335   const int64_t tval = (int64_t)val * sf->y_scale_fp + off;
336   return (int)ROUND_POWER_OF_TWO_SIGNED_64(tval,
337                                            REF_SCALE_SHIFT - SCALE_EXTRA_BITS);
338 }
339 
340 // Note: Expect val to be in q4 precision
unscaled_value(int val,const struct scale_factors * sf)341 static int unscaled_value(int val, const struct scale_factors *sf) {
342   (void)sf;
343   return val << SCALE_EXTRA_BITS;
344 }
345 
get_fixed_point_scale_factor(int other_size,int this_size)346 static int get_fixed_point_scale_factor(int other_size, int this_size) {
347   // Calculate scaling factor once for each reference frame
348   // and use fixed point scaling factors in decoding and encoding routines.
349   // Hardware implementations can calculate scale factor in device driver
350   // and use multiplication and shifting on hardware instead of division.
351   return ((other_size << REF_SCALE_SHIFT) + this_size / 2) / this_size;
352 }
353 
354 // Given the fixed point scale, calculate coarse point scale.
fixed_point_scale_to_coarse_point_scale(int scale_fp)355 static int fixed_point_scale_to_coarse_point_scale(int scale_fp) {
356   return ROUND_POWER_OF_TWO(scale_fp, REF_SCALE_SHIFT - SCALE_SUBPEL_BITS);
357 }
358 
359 
av1_setup_scale_factors_for_frame(struct scale_factors * sf,int other_w,int other_h,int this_w,int this_h)360 void av1_setup_scale_factors_for_frame(struct scale_factors *sf, int other_w,
361                                        int other_h, int this_w, int this_h) {
362   if (!valid_ref_frame_size(other_w, other_h, this_w, this_h)) {
363     sf->x_scale_fp = REF_INVALID_SCALE;
364     sf->y_scale_fp = REF_INVALID_SCALE;
365     return;
366   }
367 
368   sf->x_scale_fp = get_fixed_point_scale_factor(other_w, this_w);
369   sf->y_scale_fp = get_fixed_point_scale_factor(other_h, this_h);
370 
371   sf->x_step_q4 = fixed_point_scale_to_coarse_point_scale(sf->x_scale_fp);
372   sf->y_step_q4 = fixed_point_scale_to_coarse_point_scale(sf->y_scale_fp);
373 
374   if (av1_is_scaled(sf)) {
375     sf->scale_value_x = scaled_x;
376     sf->scale_value_y = scaled_y;
377   } else {
378     sf->scale_value_x = unscaled_value;
379     sf->scale_value_y = unscaled_value;
380   }
381 #ifdef ORI_CODE
382   // AV1 convolve functions
383   // Special case convolve functions should produce the same result as
384   // av1_convolve_2d.
385   // subpel_x_qn == 0 && subpel_y_qn == 0
386   sf->convolve[0][0][0] = av1_convolve_2d_copy_sr;
387   // subpel_x_qn == 0
388   sf->convolve[0][1][0] = av1_convolve_y_sr;
389   // subpel_y_qn == 0
390   sf->convolve[1][0][0] = av1_convolve_x_sr;
391   // subpel_x_qn != 0 && subpel_y_qn != 0
392   sf->convolve[1][1][0] = av1_convolve_2d_sr;
393   // subpel_x_qn == 0 && subpel_y_qn == 0
394   sf->convolve[0][0][1] = av1_dist_wtd_convolve_2d_copy;
395   // subpel_x_qn == 0
396   sf->convolve[0][1][1] = av1_dist_wtd_convolve_y;
397   // subpel_y_qn == 0
398   sf->convolve[1][0][1] = av1_dist_wtd_convolve_x;
399   // subpel_x_qn != 0 && subpel_y_qn != 0
400   sf->convolve[1][1][1] = av1_dist_wtd_convolve_2d;
401   // AV1 High BD convolve functions
402   // Special case convolve functions should produce the same result as
403   // av1_highbd_convolve_2d.
404   // subpel_x_qn == 0 && subpel_y_qn == 0
405   sf->highbd_convolve[0][0][0] = av1_highbd_convolve_2d_copy_sr;
406   // subpel_x_qn == 0
407   sf->highbd_convolve[0][1][0] = av1_highbd_convolve_y_sr;
408   // subpel_y_qn == 0
409   sf->highbd_convolve[1][0][0] = av1_highbd_convolve_x_sr;
410   // subpel_x_qn != 0 && subpel_y_qn != 0
411   sf->highbd_convolve[1][1][0] = av1_highbd_convolve_2d_sr;
412   // subpel_x_qn == 0 && subpel_y_qn == 0
413   sf->highbd_convolve[0][0][1] = av1_highbd_dist_wtd_convolve_2d_copy;
414   // subpel_x_qn == 0
415   sf->highbd_convolve[0][1][1] = av1_highbd_dist_wtd_convolve_y;
416   // subpel_y_qn == 0
417   sf->highbd_convolve[1][0][1] = av1_highbd_dist_wtd_convolve_x;
418   // subpel_x_qn != 0 && subpel_y_qn != 0
419   sf->highbd_convolve[1][1][1] = av1_highbd_dist_wtd_convolve_2d;
420 #endif
421 }
422 #endif
423 
424 
get_free_fb(AV1_COMMON * cm)425 static int get_free_fb(AV1_COMMON *cm) {
426   RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
427   int i;
428   unsigned long flags;
429   lock_buffer_pool(cm->buffer_pool, flags);
430   for (i = 0; i < FRAME_BUFFERS; ++i)
431     if (frame_bufs[i].ref_count == 0
432 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
433         && frame_bufs[i].buf.vf_ref == 0
434 #endif
435       )
436       break;
437 
438   if (i != FRAME_BUFFERS) {
439     if (frame_bufs[i].buf.use_external_reference_buffers) {
440       // If this frame buffer's y_buffer, u_buffer, and v_buffer point to the
441       // external reference buffers. Restore the buffer pointers to point to the
442       // internally allocated memory.
443       PIC_BUFFER_CONFIG *ybf = &frame_bufs[i].buf;
444       ybf->y_buffer = ybf->store_buf_adr[0];
445       ybf->u_buffer = ybf->store_buf_adr[1];
446       ybf->v_buffer = ybf->store_buf_adr[2];
447       ybf->use_external_reference_buffers = 0;
448     }
449 
450     frame_bufs[i].ref_count = 1;
451   } else {
452     // We should never run out of free buffers. If this assertion fails, there
453     // is a reference leak.
454     assert(0 && "Ran out of free frame buffers. Likely a reference leak.");
455     // Reset i to be INVALID_IDX to indicate no free buffer found.
456     i = INVALID_IDX;
457   }
458 
459   unlock_buffer_pool(cm->buffer_pool, flags);
460   return i;
461 }
462 
assign_cur_frame_new_fb(AV1_COMMON * const cm)463 static RefCntBuffer *assign_cur_frame_new_fb(AV1_COMMON *const cm) {
464   // Release the previously-used frame-buffer
465   int new_fb_idx;
466   if (cm->cur_frame != NULL) {
467     --cm->cur_frame->ref_count;
468     cm->cur_frame = NULL;
469   }
470 
471   // Assign a new framebuffer
472   new_fb_idx = get_free_fb(cm);
473   if (new_fb_idx == INVALID_IDX) return NULL;
474 
475   cm->cur_frame = &cm->buffer_pool->frame_bufs[new_fb_idx];
476   cm->cur_frame->buf.buf_8bit_valid = 0;
477 #ifdef AML
478   cm->cur_frame->buf.index = new_fb_idx;
479 #endif
480 #ifdef ORI_CODE
481   av1_zero(cm->cur_frame->interp_filter_selected);
482 #endif
483   return cm->cur_frame;
484 }
485 
486 // Modify 'lhs_ptr' to reference the buffer at 'rhs_ptr', and update the ref
487 // counts accordingly.
assign_frame_buffer_p(RefCntBuffer ** lhs_ptr,RefCntBuffer * rhs_ptr)488 static void assign_frame_buffer_p(RefCntBuffer **lhs_ptr,
489                                        RefCntBuffer *rhs_ptr) {
490   RefCntBuffer *const old_ptr = *lhs_ptr;
491   if (old_ptr != NULL) {
492     assert(old_ptr->ref_count > 0);
493     // One less reference to the buffer at 'old_ptr', so decrease ref count.
494     --old_ptr->ref_count;
495   }
496 
497   *lhs_ptr = rhs_ptr;
498   // One more reference to the buffer at 'rhs_ptr', so increase ref count.
499   ++rhs_ptr->ref_count;
500 }
501 
av1_decoder_create(BufferPool * const pool)502 AV1Decoder *av1_decoder_create(BufferPool *const pool) {
503   int i;
504   AV1_COMMON *cm;
505 
506 #ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
507   AV1Decoder *pbi = (AV1Decoder *)malloc(sizeof(*pbi));
508 #else
509   AV1Decoder *pbi = (AV1Decoder *)vmalloc(sizeof(AV1Decoder));
510 #endif
511   if (!pbi) return NULL;
512   memset(pbi, 0, sizeof(*pbi));
513 
514   cm = &pbi->common;
515 
516   // The jmp_buf is valid only for the duration of the function that calls
517   // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
518   // before it returns.
519 
520   cm->error.setjmp = 1;
521 
522 #ifdef ORI_CODE
523   memset(cm->fc, 0, sizeof(*cm->fc));
524   memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
525 #endif
526   pbi->need_resync = 1;
527 
528   // Initialize the references to not point to any frame buffers.
529   for (i = 0; i < REF_FRAMES; i++) {
530     cm->ref_frame_map[i] = NULL;
531     cm->next_ref_frame_map[i] = NULL;
532 #ifdef AML
533     cm->next_used_ref_frame_map[i] = NULL;
534 #endif
535   }
536 
537   cm->current_frame.frame_number = 0;
538   pbi->decoding_first_frame = 1;
539   pbi->common.buffer_pool = pool;
540 
541   cm->seq_params.bit_depth = AOM_BITS_8;
542 
543 #ifdef ORI_CODE
544   cm->alloc_mi = dec_alloc_mi;
545   cm->free_mi = dec_free_mi;
546   cm->setup_mi = dec_setup_mi;
547 
548   av1_loop_filter_init(cm);
549 
550   av1_qm_init(cm);
551   av1_loop_restoration_precal();
552 #if CONFIG_ACCOUNTING
553   pbi->acct_enabled = 1;
554   aom_accounting_init(&pbi->accounting);
555 #endif
556 #endif
557   cm->error.setjmp = 0;
558 
559 #ifdef ORI_CODE
560   aom_get_worker_interface()->init(&pbi->lf_worker);
561   pbi->lf_worker.thread_name = "aom lf worker";
562 #endif
563 
564   return pbi;
565 }
566 
release_fb_cb(void * cb_priv,aom_codec_frame_buffer_t * fb)567 int release_fb_cb(void *cb_priv, aom_codec_frame_buffer_t *fb) {
568 #if 0
569   InternalFrameBuffer *const int_fb = (InternalFrameBuffer *)fb->priv;
570   (void)cb_priv;
571   if (int_fb) int_fb->in_use = 0;
572 #endif
573   return 0;
574 }
575 
decrease_ref_count(AV1Decoder * pbi,RefCntBuffer * const buf,BufferPool * const pool)576 static void decrease_ref_count(AV1Decoder *pbi, RefCntBuffer *const buf,
577                                       BufferPool *const pool) {
578   if (buf != NULL) {
579     --buf->ref_count;
580     // Reference counts should never become negative. If this assertion fails,
581     // there is a bug in our reference count management.
582     assert(buf->ref_count >= 0);
583     // A worker may only get a free framebuffer index when calling get_free_fb.
584     // But the raw frame buffer is not set up until we finish decoding header.
585     // So if any error happens during decoding header, frame_bufs[idx] will not
586     // have a valid raw frame buffer.
587     if (buf->ref_count == 0
588 #ifdef ORI_CODE
589      && buf->raw_frame_buffer.data
590 #endif
591      ) {
592 #ifdef AML
593       av1_release_buf(pbi, buf);
594 #endif
595       release_fb_cb(pool->cb_priv, &buf->raw_frame_buffer);
596       buf->raw_frame_buffer.data = NULL;
597       buf->raw_frame_buffer.size = 0;
598       buf->raw_frame_buffer.priv = NULL;
599     }
600   }
601 }
602 
swap_frame_buffers(AV1Decoder * pbi,int frame_decoded)603 static void swap_frame_buffers(AV1Decoder *pbi, int frame_decoded) {
604   int ref_index = 0, mask;
605   AV1_COMMON *const cm = &pbi->common;
606   BufferPool *const pool = cm->buffer_pool;
607   unsigned long flags;
608 
609   if (frame_decoded) {
610     int check_on_show_existing_frame;
611     lock_buffer_pool(pool, flags);
612 
613     // In ext-tile decoding, the camera frame header is only decoded once. So,
614     // we don't release the references here.
615     if (!pbi->camera_frame_header_ready) {
616       // If we are not holding reference buffers in cm->next_ref_frame_map,
617       // assert that the following two for loops are no-ops.
618       assert(IMPLIES(!pbi->hold_ref_buf,
619                      cm->current_frame.refresh_frame_flags == 0));
620       assert(IMPLIES(!pbi->hold_ref_buf,
621                      cm->show_existing_frame && !pbi->reset_decoder_state));
622 
623       // The following two for loops need to release the reference stored in
624       // cm->ref_frame_map[ref_index] before transferring the reference stored
625       // in cm->next_ref_frame_map[ref_index] to cm->ref_frame_map[ref_index].
626       for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
627         decrease_ref_count(pbi, cm->ref_frame_map[ref_index], pool);
628         cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
629         cm->next_ref_frame_map[ref_index] = NULL;
630         ++ref_index;
631       }
632 
633       check_on_show_existing_frame =
634           !cm->show_existing_frame || pbi->reset_decoder_state;
635       for (; ref_index < REF_FRAMES && check_on_show_existing_frame;
636            ++ref_index) {
637         decrease_ref_count(pbi, cm->ref_frame_map[ref_index], pool);
638         cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
639         cm->next_ref_frame_map[ref_index] = NULL;
640       }
641     }
642 
643     if (cm->show_existing_frame || cm->show_frame) {
644       if (pbi->output_all_layers) {
645         // Append this frame to the output queue
646         if (pbi->num_output_frames >= MAX_NUM_SPATIAL_LAYERS) {
647           // We can't store the new frame anywhere, so drop it and return an
648           // error
649           cm->cur_frame->buf.corrupted = 1;
650           decrease_ref_count(pbi, cm->cur_frame, pool);
651           cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
652         } else {
653           pbi->output_frames[pbi->num_output_frames] = cm->cur_frame;
654           pbi->num_output_frames++;
655         }
656       } else {
657         // Replace any existing output frame
658         assert(pbi->num_output_frames == 0 || pbi->num_output_frames == 1);
659         if (pbi->num_output_frames > 0) {
660           decrease_ref_count(pbi, pbi->output_frames[0], pool);
661         }
662         pbi->output_frames[0] = cm->cur_frame;
663         pbi->num_output_frames = 1;
664       }
665     } else {
666       decrease_ref_count(pbi, cm->cur_frame, pool);
667     }
668 
669     unlock_buffer_pool(pool, flags);
670   } else {
671     // The code here assumes we are not holding reference buffers in
672     // cm->next_ref_frame_map. If this assertion fails, we are leaking the
673     // frame buffer references in cm->next_ref_frame_map.
674     assert(IMPLIES(!pbi->camera_frame_header_ready, !pbi->hold_ref_buf));
675     // Nothing was decoded, so just drop this frame buffer
676     lock_buffer_pool(pool, flags);
677     decrease_ref_count(pbi, cm->cur_frame, pool);
678     unlock_buffer_pool(pool, flags);
679   }
680   cm->cur_frame = NULL;
681 
682   if (!pbi->camera_frame_header_ready) {
683     pbi->hold_ref_buf = 0;
684 
685     // Invalidate these references until the next frame starts.
686     for (ref_index = 0; ref_index < INTER_REFS_PER_FRAME; ref_index++) {
687       cm->remapped_ref_idx[ref_index] = INVALID_IDX;
688     }
689   }
690 }
691 
aom_internal_error(struct aom_internal_error_info * info,aom_codec_err_t error,const char * fmt,...)692 void aom_internal_error(struct aom_internal_error_info *info,
693                         aom_codec_err_t error, const char *fmt, ...) {
694   va_list ap;
695 
696   info->error_code = error;
697   info->has_detail = 0;
698 
699   if (fmt) {
700     size_t sz = sizeof(info->detail);
701 
702     info->has_detail = 1;
703     va_start(ap, fmt);
704     vsnprintf(info->detail, sz - 1, fmt, ap);
705     va_end(ap);
706     info->detail[sz - 1] = '\0';
707   }
708 #ifdef ORI_CODE
709   if (info->setjmp) longjmp(info->jmp, info->error_code);
710 #endif
711 }
712 
713 #ifdef ORI_CODE
av1_zero_unused_internal_frame_buffers(InternalFrameBufferList * list)714 void av1_zero_unused_internal_frame_buffers(InternalFrameBufferList *list) {
715   int i;
716 
717   assert(list != NULL);
718 
719   for (i = 0; i < list->num_internal_frame_buffers; ++i) {
720     if (list->int_fb[i].data && !list->int_fb[i].in_use)
721       memset(list->int_fb[i].data, 0, list->int_fb[i].size);
722   }
723 }
724 #endif
725 
726 // Release the references to the frame buffers in cm->ref_frame_map and reset
727 // all elements of cm->ref_frame_map to NULL.
reset_ref_frame_map(AV1Decoder * const pbi)728 static void reset_ref_frame_map(AV1Decoder *const pbi) {
729   AV1_COMMON *const cm = &pbi->common;
730   BufferPool *const pool = cm->buffer_pool;
731   int i;
732 
733   for (i = 0; i < REF_FRAMES; i++) {
734     decrease_ref_count(pbi, cm->ref_frame_map[i], pool);
735     cm->ref_frame_map[i] = NULL;
736 #ifdef AML
737     cm->next_used_ref_frame_map[i] = NULL;
738 #endif
739   }
740 }
741 
742 // Generate next_ref_frame_map.
generate_next_ref_frame_map(AV1Decoder * const pbi)743 static void generate_next_ref_frame_map(AV1Decoder *const pbi) {
744   AV1_COMMON *const cm = &pbi->common;
745   BufferPool *const pool = cm->buffer_pool;
746   unsigned long flags;
747   int ref_index = 0;
748   int mask;
749 
750   lock_buffer_pool(pool, flags);
751   // cm->next_ref_frame_map holds references to frame buffers. After storing a
752   // frame buffer index in cm->next_ref_frame_map, we need to increase the
753   // frame buffer's ref_count.
754   for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
755     if (mask & 1) {
756       cm->next_ref_frame_map[ref_index] = cm->cur_frame;
757     } else {
758       cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
759     }
760     if (cm->next_ref_frame_map[ref_index] != NULL)
761       ++cm->next_ref_frame_map[ref_index]->ref_count;
762     ++ref_index;
763   }
764 
765   for (; ref_index < REF_FRAMES; ++ref_index) {
766     cm->next_ref_frame_map[ref_index] = cm->ref_frame_map[ref_index];
767     if (cm->next_ref_frame_map[ref_index] != NULL)
768       ++cm->next_ref_frame_map[ref_index]->ref_count;
769   }
770   unlock_buffer_pool(pool, flags);
771   pbi->hold_ref_buf = 1;
772 }
773 
774 // If the refresh_frame_flags bitmask is set, update reference frame id values
775 // and mark frames as valid for reference.
update_ref_frame_id(AV1_COMMON * const cm,int frame_id)776 static void update_ref_frame_id(AV1_COMMON *const cm, int frame_id) {
777   int i;
778   int refresh_frame_flags = cm->current_frame.refresh_frame_flags;
779   assert(cm->seq_params.frame_id_numbers_present_flag);
780   for (i = 0; i < REF_FRAMES; i++) {
781     if ((refresh_frame_flags >> i) & 1) {
782       cm->ref_frame_id[i] = frame_id;
783       cm->valid_for_referencing[i] = 1;
784     }
785   }
786 }
787 
show_existing_frame_reset(AV1Decoder * const pbi,int existing_frame_idx)788 static void show_existing_frame_reset(AV1Decoder *const pbi,
789                                       int existing_frame_idx) {
790   AV1_COMMON *const cm = &pbi->common;
791   int i;
792   assert(cm->show_existing_frame);
793 
794   cm->current_frame.frame_type = KEY_FRAME;
795 
796   cm->current_frame.refresh_frame_flags = (1 << REF_FRAMES) - 1;
797 
798   for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
799     cm->remapped_ref_idx[i] = INVALID_IDX;
800   }
801 
802   if (pbi->need_resync) {
803     reset_ref_frame_map(pbi);
804     pbi->need_resync = 0;
805   }
806 
807   // Note that the displayed frame must be valid for referencing in order to
808   // have been selected.
809   if (cm->seq_params.frame_id_numbers_present_flag) {
810     cm->current_frame_id = cm->ref_frame_id[existing_frame_idx];
811     update_ref_frame_id(cm, cm->current_frame_id);
812   }
813 
814   cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
815 
816   generate_next_ref_frame_map(pbi);
817 
818 #ifdef ORI_CODE
819   // Reload the adapted CDFs from when we originally coded this keyframe
820   *cm->fc = cm->next_ref_frame_map[existing_frame_idx]->frame_context;
821 #endif
822 }
823 
reset_frame_buffers(AV1Decoder * const pbi)824 static void reset_frame_buffers(AV1Decoder *const pbi) {
825   AV1_COMMON *const cm = &pbi->common;
826   RefCntBuffer *const frame_bufs = cm->buffer_pool->frame_bufs;
827   int i;
828   unsigned long flags;
829 
830   // We have not stored any references to frame buffers in
831   // cm->next_ref_frame_map, so we can directly reset it to all NULL.
832   for (i = 0; i < REF_FRAMES; ++i) {
833     cm->next_ref_frame_map[i] = NULL;
834   }
835 
836   lock_buffer_pool(cm->buffer_pool, flags);
837   reset_ref_frame_map(pbi);
838   assert(cm->cur_frame->ref_count == 1);
839   for (i = 0; i < FRAME_BUFFERS; ++i) {
840     // Reset all unreferenced frame buffers. We can also reset cm->cur_frame
841     // because we are the sole owner of cm->cur_frame.
842     if (frame_bufs[i].ref_count > 0 && &frame_bufs[i] != cm->cur_frame) {
843       continue;
844     }
845     frame_bufs[i].order_hint = 0;
846     av1_zero(frame_bufs[i].ref_order_hints);
847   }
848 #ifdef ORI_CODE
849   av1_zero_unused_internal_frame_buffers(&cm->buffer_pool->int_frame_buffers);
850 #endif
851   unlock_buffer_pool(cm->buffer_pool, flags);
852 }
853 
frame_is_intra_only(const AV1_COMMON * const cm)854 static int frame_is_intra_only(const AV1_COMMON *const cm) {
855   return cm->current_frame.frame_type == KEY_FRAME ||
856       cm->current_frame.frame_type == INTRA_ONLY_FRAME;
857 }
858 
frame_is_sframe(const AV1_COMMON * cm)859 static int frame_is_sframe(const AV1_COMMON *cm) {
860   return cm->current_frame.frame_type == S_FRAME;
861 }
862 
863 // These functions take a reference frame label between LAST_FRAME and
864 // EXTREF_FRAME inclusive.  Note that this is different to the indexing
865 // previously used by the frame_refs[] array.
get_ref_frame_map_idx(const AV1_COMMON * const cm,const MV_REFERENCE_FRAME ref_frame)866 static int get_ref_frame_map_idx(const AV1_COMMON *const cm,
867                                         const MV_REFERENCE_FRAME ref_frame) {
868   return (ref_frame >= LAST_FRAME && ref_frame <= EXTREF_FRAME)
869              ? cm->remapped_ref_idx[ref_frame - LAST_FRAME]
870              : INVALID_IDX;
871 }
872 
get_ref_frame_buf(const AV1_COMMON * const cm,const MV_REFERENCE_FRAME ref_frame)873 static RefCntBuffer *get_ref_frame_buf(
874     const AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame) {
875   const int map_idx = get_ref_frame_map_idx(cm, ref_frame);
876   return (map_idx != INVALID_IDX) ? cm->ref_frame_map[map_idx] : NULL;
877 }
878 #ifdef SUPPORT_SCALE_FACTOR
get_ref_scale_factors(AV1_COMMON * const cm,const MV_REFERENCE_FRAME ref_frame)879 static struct scale_factors *get_ref_scale_factors(
880     AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame) {
881   const int map_idx = get_ref_frame_map_idx(cm, ref_frame);
882   return (map_idx != INVALID_IDX) ? &cm->ref_scale_factors[map_idx] : NULL;
883 }
884 #endif
get_primary_ref_frame_buf(const AV1_COMMON * const cm)885 static RefCntBuffer *get_primary_ref_frame_buf(
886     const AV1_COMMON *const cm) {
887   int map_idx;
888   if (cm->primary_ref_frame == PRIMARY_REF_NONE) return NULL;
889   map_idx = get_ref_frame_map_idx(cm, cm->primary_ref_frame + 1);
890   return (map_idx != INVALID_IDX) ? cm->ref_frame_map[map_idx] : NULL;
891 }
892 
get_relative_dist(const OrderHintInfo * oh,int a,int b)893 static int get_relative_dist(const OrderHintInfo *oh, int a, int b) {
894   int bits;
895   int m;
896   int diff;
897   if (!oh->enable_order_hint) return 0;
898 
899   bits = oh->order_hint_bits_minus_1 + 1;
900 
901   assert(bits >= 1);
902   assert(a >= 0 && a < (1 << bits));
903   assert(b >= 0 && b < (1 << bits));
904 
905   diff = a - b;
906   m = 1 << (bits - 1);
907   diff = (diff & (m - 1)) - (diff & m);
908   return diff;
909 }
910 
911 
av1_read_frame_size(union param_u * params,int num_bits_width,int num_bits_height,int * width,int * height,int * dec_width)912 void av1_read_frame_size(union param_u *params, int num_bits_width,
913                          int num_bits_height, int *width, int *height, int* dec_width) {
914   *width = params->p.frame_width;
915   *height = params->p.frame_height;//aom_rb_read_literal(rb, num_bits_height) + 1;
916 #ifdef AML
917   *dec_width = params->p.dec_frame_width;
918 #endif
919 }
920 
read_frame_reference_mode(const AV1_COMMON * cm,union param_u * params)921 static REFERENCE_MODE read_frame_reference_mode(
922     const AV1_COMMON *cm, union param_u *params) {
923   if (frame_is_intra_only(cm)) {
924     return SINGLE_REFERENCE;
925   } else {
926     return params->p.reference_mode ? REFERENCE_MODE_SELECT : SINGLE_REFERENCE;
927   }
928 }
929 
calc_mi_size(int len)930 static inline int calc_mi_size(int len) {
931   // len is in mi units. Align to a multiple of SBs.
932   return ALIGN_POWER_OF_TWO(len, MAX_MIB_SIZE_LOG2);
933 }
934 
av1_set_mb_mi(AV1_COMMON * cm,int width,int height)935 void av1_set_mb_mi(AV1_COMMON *cm, int width, int height) {
936   // Ensure that the decoded width and height are both multiples of
937   // 8 luma pixels (note: this may only be a multiple of 4 chroma pixels if
938   // subsampling is used).
939   // This simplifies the implementation of various experiments,
940   // eg. cdef, which operates on units of 8x8 luma pixels.
941   const int aligned_width = ALIGN_POWER_OF_TWO(width, 3);
942   const int aligned_height = ALIGN_POWER_OF_TWO(height, 3);
943   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, " [PICTURE] av1_set_mb_mi (%d X %d)\n", width, height);
944 
945   cm->mi_cols = aligned_width >> MI_SIZE_LOG2;
946   cm->mi_rows = aligned_height >> MI_SIZE_LOG2;
947   cm->mi_stride = calc_mi_size(cm->mi_cols);
948 
949   cm->mb_cols = (cm->mi_cols + 2) >> 2;
950   cm->mb_rows = (cm->mi_rows + 2) >> 2;
951   cm->MBs = cm->mb_rows * cm->mb_cols;
952 
953 #if CONFIG_LPF_MASK
954   alloc_loop_filter_mask(cm);
955 #endif
956 }
957 
av1_alloc_context_buffers(AV1_COMMON * cm,int width,int height)958 int av1_alloc_context_buffers(AV1_COMMON *cm, int width, int height) {
959 #ifdef ORI_CODE
960   int new_mi_size;
961 #endif
962   av1_set_mb_mi(cm, width, height);
963 #ifdef ORI_CODE
964   new_mi_size = cm->mi_stride * calc_mi_size(cm->mi_rows);
965   if (cm->mi_alloc_size < new_mi_size) {
966     cm->free_mi(cm);
967     if (cm->alloc_mi(cm, new_mi_size)) goto fail;
968   }
969 #endif
970   return 0;
971 
972 #ifdef ORI_CODE
973 fail:
974 #endif
975   // clear the mi_* values to force a realloc on resync
976   av1_set_mb_mi(cm, 0, 0);
977 #ifdef ORI_CODE
978   av1_free_context_buffers(cm);
979 #endif
980   return 1;
981 }
982 
983 #ifndef USE_SCALED_WIDTH_FROM_UCODE
calculate_scaled_size_helper(int * dim,int denom)984 static void calculate_scaled_size_helper(int *dim, int denom) {
985   if (denom != SCALE_NUMERATOR) {
986     // We need to ensure the constraint in "Appendix A" of the spec:
987     // * FrameWidth is greater than or equal to 16
988     // * FrameHeight is greater than or equal to 16
989     // For this, we clamp the downscaled dimension to at least 16. One
990     // exception: if original dimension itself was < 16, then we keep the
991     // downscaled dimension to be same as the original, to ensure that resizing
992     // is valid.
993     const int min_dim = AOMMIN(16, *dim);
994     // Use this version if we need *dim to be even
995     // *width = (*width * SCALE_NUMERATOR + denom) / (2 * denom);
996     // *width <<= 1;
997     *dim = (*dim * SCALE_NUMERATOR + denom / 2) / (denom);
998     *dim = AOMMAX(*dim, min_dim);
999   }
1000 }
1001 #ifdef ORI_CODE
av1_calculate_scaled_size(int * width,int * height,int resize_denom)1002 void av1_calculate_scaled_size(int *width, int *height, int resize_denom) {
1003   calculate_scaled_size_helper(width, resize_denom);
1004   calculate_scaled_size_helper(height, resize_denom);
1005 }
1006 #endif
av1_calculate_scaled_superres_size(int * width,int * height,int superres_denom)1007 void av1_calculate_scaled_superres_size(int *width, int *height,
1008                                         int superres_denom) {
1009   (void)height;
1010   calculate_scaled_size_helper(width, superres_denom);
1011 }
1012 #endif
1013 
setup_superres(AV1_COMMON * const cm,union param_u * params,int * width,int * height)1014 static void setup_superres(AV1_COMMON *const cm, union param_u *params,
1015                            int *width, int *height) {
1016 #ifdef USE_SCALED_WIDTH_FROM_UCODE
1017   cm->superres_upscaled_width = params->p.frame_width_scaled;
1018   cm->superres_upscaled_height = params->p.frame_height;
1019 
1020 
1021   *width = params->p.dec_frame_width;
1022   *height = params->p.frame_height;
1023   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, " [PICTURE] set decoding size to (%d X %d) scaled size to (%d X %d)\n",
1024 	*width, *height,
1025 	cm->superres_upscaled_width,
1026 	cm->superres_upscaled_height);
1027 #else
1028   cm->superres_upscaled_width = *width;
1029   cm->superres_upscaled_height = *height;
1030 
1031   const SequenceHeader *const seq_params = &cm->seq_params;
1032   if (!seq_params->enable_superres) return;
1033 
1034   //if (aom_rb_read_bit(-1, defmark, rb)) {
1035   if (params->p.superres_scale_denominator != SCALE_NUMERATOR) {
1036 #ifdef ORI_CODE
1037     cm->superres_scale_denominator =
1038         (uint8_t)aom_rb_read_literal(-1, defmark, rb, SUPERRES_SCALE_BITS);
1039     cm->superres_scale_denominator += SUPERRES_SCALE_DENOMINATOR_MIN;
1040 #else
1041     cm->superres_scale_denominator = params->p.superres_scale_denominator;
1042 #endif
1043     // Don't edit cm->width or cm->height directly, or the buffers won't get
1044     // resized correctly
1045     av1_calculate_scaled_superres_size(width, height,
1046                                        cm->superres_scale_denominator);
1047   } else {
1048     // 1:1 scaling - ie. no scaling, scale not provided
1049     cm->superres_scale_denominator = SCALE_NUMERATOR;
1050   }
1051 /*!USE_SCALED_WIDTH_FROM_UCODE*/
1052 #endif
1053 }
1054 
resize_context_buffers(AV1_COMMON * cm,int width,int height)1055 static void resize_context_buffers(AV1_COMMON *cm, int width, int height) {
1056 #if CONFIG_SIZE_LIMIT
1057   if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
1058     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1059                        "Dimensions of %dx%d beyond allowed size of %dx%d.",
1060                        width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
1061 #endif
1062   if (cm->width != width || cm->height != height) {
1063     const int new_mi_rows =
1064         ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1065     const int new_mi_cols =
1066         ALIGN_POWER_OF_TWO(width, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
1067 
1068     // Allocations in av1_alloc_context_buffers() depend on individual
1069     // dimensions as well as the overall size.
1070     if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
1071       if (av1_alloc_context_buffers(cm, width, height)) {
1072         // The cm->mi_* values have been cleared and any existing context
1073         // buffers have been freed. Clear cm->width and cm->height to be
1074         // consistent and to force a realloc next time.
1075         cm->width = 0;
1076         cm->height = 0;
1077         aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
1078                            "Failed to allocate context buffers");
1079       }
1080     } else {
1081       av1_set_mb_mi(cm, width, height);
1082     }
1083 #ifdef ORI_CODE
1084     av1_init_context_buffers(cm);
1085 #endif
1086     cm->width = width;
1087     cm->height = height;
1088   }
1089 
1090 #ifdef ORI_CODE
1091   ensure_mv_buffer(cm->cur_frame, cm);
1092 #endif
1093   cm->cur_frame->width = cm->width;
1094   cm->cur_frame->height = cm->height;
1095 }
1096 
setup_buffer_pool(AV1_COMMON * cm)1097 static void setup_buffer_pool(AV1_COMMON *cm) {
1098   BufferPool *const pool = cm->buffer_pool;
1099   const SequenceHeader *const seq_params = &cm->seq_params;
1100   unsigned long flags;
1101 
1102   lock_buffer_pool(pool, flags);
1103   if (aom_realloc_frame_buffer(cm, &cm->cur_frame->buf,
1104     cm->width, cm->height, cm->cur_frame->order_hint)) {
1105     unlock_buffer_pool(pool, flags);
1106     aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
1107                        "Failed to allocate frame buffer");
1108   }
1109   unlock_buffer_pool(pool, flags);
1110 
1111   cm->cur_frame->buf.bit_depth = (unsigned int)seq_params->bit_depth;
1112   cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
1113   cm->cur_frame->buf.transfer_characteristics =
1114       seq_params->transfer_characteristics;
1115   cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
1116   cm->cur_frame->buf.monochrome = seq_params->monochrome;
1117   cm->cur_frame->buf.chroma_sample_position =
1118       seq_params->chroma_sample_position;
1119   cm->cur_frame->buf.color_range = seq_params->color_range;
1120   cm->cur_frame->buf.render_width = cm->render_width;
1121   cm->cur_frame->buf.render_height = cm->render_height;
1122 }
1123 
setup_frame_size(AV1_COMMON * cm,int frame_size_override_flag,union param_u * params)1124 static void setup_frame_size(AV1_COMMON *cm, int frame_size_override_flag, union param_u *params) {
1125   const SequenceHeader *const seq_params = &cm->seq_params;
1126   int width, height, dec_width;
1127 
1128   if (frame_size_override_flag) {
1129     int num_bits_width = seq_params->num_bits_width;
1130     int num_bits_height = seq_params->num_bits_height;
1131     av1_read_frame_size(params, num_bits_width, num_bits_height, &width, &height, &dec_width);
1132 #ifdef AML
1133     cm->dec_width = dec_width;
1134 #endif
1135     if (width > seq_params->max_frame_width ||
1136         height > seq_params->max_frame_height) {
1137       aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1138                          "Frame dimensions are larger than the maximum values");
1139     }
1140   } else {
1141     width = seq_params->max_frame_width;
1142     height = seq_params->max_frame_height;
1143 #ifdef AML
1144     cm->dec_width = dec_width = params->p.dec_frame_width;
1145 #endif
1146   }
1147   setup_superres(cm, params, &width, &height);
1148   resize_context_buffers(cm, width, height);
1149 #ifdef ORI_CODE
1150   setup_render_size(cm, params);
1151 #endif
1152   setup_buffer_pool(cm);
1153 }
1154 
valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,int ref_xss,int ref_yss,aom_bit_depth_t this_bit_depth,int this_xss,int this_yss)1155 static int valid_ref_frame_img_fmt(aom_bit_depth_t ref_bit_depth,
1156                                           int ref_xss, int ref_yss,
1157                                           aom_bit_depth_t this_bit_depth,
1158                                           int this_xss, int this_yss) {
1159   return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1160          ref_yss == this_yss;
1161 }
1162 
setup_frame_size_with_refs(AV1_COMMON * cm,union param_u * params)1163 static void setup_frame_size_with_refs(AV1_COMMON *cm, union param_u *params) {
1164   int width, height, dec_width;
1165   int found = 0;
1166   int has_valid_ref_frame = 0;
1167   int i;
1168   SequenceHeader *seq_params;
1169   for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
1170     /*if (aom_rb_read_bit(rb)) {*/
1171     if (params->p.valid_ref_frame_bits & (1<<i)) {
1172       const RefCntBuffer *const ref_buf = get_ref_frame_buf(cm, i);
1173       // This will never be NULL in a normal stream, as streams are required to
1174       // have a shown keyframe before any inter frames, which would refresh all
1175       // the reference buffers. However, it might be null if we're starting in
1176       // the middle of a stream, and static analysis will error if we don't do
1177       // a null check here.
1178       if (ref_buf == NULL) {
1179         aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1180                            "Invalid condition: invalid reference buffer");
1181       } else {
1182         const PIC_BUFFER_CONFIG *const buf = &ref_buf->buf;
1183         width = buf->y_crop_width;
1184         height = buf->y_crop_height;
1185         cm->render_width = buf->render_width;
1186         cm->render_height = buf->render_height;
1187         setup_superres(cm, params, &width, &height);
1188         resize_context_buffers(cm, width, height);
1189         found = 1;
1190         break;
1191       }
1192     }
1193   }
1194 
1195   seq_params = &cm->seq_params;
1196   if (!found) {
1197     int num_bits_width = seq_params->num_bits_width;
1198     int num_bits_height = seq_params->num_bits_height;
1199 
1200     av1_read_frame_size(params, num_bits_width, num_bits_height, &width, &height, &dec_width);
1201 #ifdef AML
1202     cm->dec_width = dec_width;
1203 #endif
1204     setup_superres(cm, params, &width, &height);
1205     resize_context_buffers(cm, width, height);
1206 #ifdef ORI_CODE
1207     setup_render_size(cm, rb);
1208 #endif
1209   }
1210 
1211   if (width <= 0 || height <= 0)
1212     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1213                        "Invalid frame size");
1214 
1215   // Check to make sure at least one of frames that this frame references
1216   // has valid dimensions.
1217   for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
1218     const RefCntBuffer *const ref_frame = get_ref_frame_buf(cm, i);
1219     if (ref_frame != NULL) {
1220       has_valid_ref_frame |=
1221         valid_ref_frame_size(ref_frame->buf.y_crop_width,
1222                              ref_frame->buf.y_crop_height, width, height);
1223     }
1224   }
1225   if (!has_valid_ref_frame)
1226     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1227                        "Referenced frame has invalid size");
1228   for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
1229     const RefCntBuffer *const ref_frame = get_ref_frame_buf(cm, i);
1230     if (ref_frame != NULL) {
1231       if (!valid_ref_frame_img_fmt(
1232             ref_frame->buf.bit_depth, ref_frame->buf.subsampling_x,
1233             ref_frame->buf.subsampling_y, seq_params->bit_depth,
1234             seq_params->subsampling_x, seq_params->subsampling_y))
1235       aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1236                          "Referenced frame has incompatible color format");
1237     }
1238   }
1239   setup_buffer_pool(cm);
1240 }
1241 
1242 typedef struct {
1243   int map_idx;        // frame map index
1244   RefCntBuffer *buf;  // frame buffer
1245   int sort_idx;       // index based on the offset to be used for sorting
1246 } REF_FRAME_INFO;
1247 
1248 // Compares the sort_idx fields. If they are equal, then compares the map_idx
1249 // fields to break the tie. This ensures a stable sort.
compare_ref_frame_info(const void * arg_a,const void * arg_b)1250 static int compare_ref_frame_info(const void *arg_a, const void *arg_b) {
1251   const REF_FRAME_INFO *info_a = (REF_FRAME_INFO *)arg_a;
1252   const REF_FRAME_INFO *info_b = (REF_FRAME_INFO *)arg_b;
1253 
1254   const int sort_idx_diff = info_a->sort_idx - info_b->sort_idx;
1255   if (sort_idx_diff != 0) return sort_idx_diff;
1256   return info_a->map_idx - info_b->map_idx;
1257 }
1258 
1259 
1260 /*
1261 for av1_setup_motion_field()
1262 */
motion_field_projection(AV1_COMMON * cm,MV_REFERENCE_FRAME start_frame,int dir)1263 static int motion_field_projection(AV1_COMMON *cm,
1264                                    MV_REFERENCE_FRAME start_frame, int dir) {
1265 #ifdef ORI_CODE
1266   TPL_MV_REF *tpl_mvs_base = cm->tpl_mvs;
1267   int ref_offset[REF_FRAMES] = { 0 };
1268 #endif
1269   MV_REFERENCE_FRAME rf;
1270   const RefCntBuffer *const start_frame_buf =
1271       get_ref_frame_buf(cm, start_frame);
1272   int start_frame_order_hint;
1273   unsigned int const *ref_order_hints;
1274   int cur_order_hint;
1275   int start_to_current_frame_offset;
1276 
1277 #ifdef AML
1278   int i;
1279   //av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "$$$$$$$$$$$%s:cm->mv_ref_id_index = %d, start_frame=%d\n", __func__, cm->mv_ref_id_index, start_frame);
1280   cm->mv_ref_id[cm->mv_ref_id_index] = start_frame;
1281   for (i = 0; i < REF_FRAMES; i++) {
1282       cm->mv_ref_offset[cm->mv_ref_id_index][i]=0;
1283   }
1284   cm->mv_cal_tpl_mvs[cm->mv_ref_id_index]=0;
1285   cm->mv_ref_id_index++;
1286 #endif
1287   if (start_frame_buf == NULL) return 0;
1288 
1289   if (start_frame_buf->frame_type == KEY_FRAME ||
1290       start_frame_buf->frame_type == INTRA_ONLY_FRAME)
1291     return 0;
1292 
1293   if (start_frame_buf->mi_rows != cm->mi_rows ||
1294       start_frame_buf->mi_cols != cm->mi_cols)
1295     return 0;
1296 
1297   start_frame_order_hint = start_frame_buf->order_hint;
1298   ref_order_hints =
1299       &start_frame_buf->ref_order_hints[0];
1300   cur_order_hint = cm->cur_frame->order_hint;
1301   start_to_current_frame_offset = get_relative_dist(
1302       &cm->seq_params.order_hint_info, start_frame_order_hint, cur_order_hint);
1303 
1304   for (rf = LAST_FRAME; rf <= INTER_REFS_PER_FRAME; ++rf) {
1305     cm->mv_ref_offset[cm->mv_ref_id_index-1][rf] = get_relative_dist(&cm->seq_params.order_hint_info,
1306                                        start_frame_order_hint,
1307                                        ref_order_hints[rf - LAST_FRAME]);
1308   }
1309 #ifdef AML
1310   cm->mv_cal_tpl_mvs[cm->mv_ref_id_index-1]=1;
1311 #endif
1312   if (dir == 2) start_to_current_frame_offset = -start_to_current_frame_offset;
1313 #ifdef ORI_CODE
1314   MV_REF *mv_ref_base = start_frame_buf->mvs;
1315   const int mvs_rows = (cm->mi_rows + 1) >> 1;
1316   const int mvs_cols = (cm->mi_cols + 1) >> 1;
1317 
1318   for (int blk_row = 0; blk_row < mvs_rows; ++blk_row) {
1319     for (int blk_col = 0; blk_col < mvs_cols; ++blk_col) {
1320       MV_REF *mv_ref = &mv_ref_base[blk_row * mvs_cols + blk_col];
1321       MV fwd_mv = mv_ref->mv.as_mv;
1322 
1323       if (mv_ref->ref_frame > INTRA_FRAME) {
1324         int_mv this_mv;
1325         int mi_r, mi_c;
1326         const int ref_frame_offset = ref_offset[mv_ref->ref_frame];
1327 
1328         int pos_valid =
1329             abs(ref_frame_offset) <= MAX_FRAME_DISTANCE &&
1330             ref_frame_offset > 0 &&
1331             abs(start_to_current_frame_offset) <= MAX_FRAME_DISTANCE;
1332 
1333         if (pos_valid) {
1334           get_mv_projection(&this_mv.as_mv, fwd_mv,
1335                             start_to_current_frame_offset, ref_frame_offset);
1336           pos_valid = get_block_position(cm, &mi_r, &mi_c, blk_row, blk_col,
1337                                          this_mv.as_mv, dir >> 1);
1338         }
1339 
1340         if (pos_valid) {
1341           const int mi_offset = mi_r * (cm->mi_stride >> 1) + mi_c;
1342 
1343           tpl_mvs_base[mi_offset].mfmv0.as_mv.row = fwd_mv.row;
1344           tpl_mvs_base[mi_offset].mfmv0.as_mv.col = fwd_mv.col;
1345           tpl_mvs_base[mi_offset].ref_frame_offset = ref_frame_offset;
1346         }
1347       }
1348     }
1349   }
1350 #endif
1351   return 1;
1352 }
1353 
1354 #ifdef AML
1355 static int setup_motion_field_debug_count = 0;
1356 #endif
av1_setup_motion_field(AV1_COMMON * cm)1357 void av1_setup_motion_field(AV1_COMMON *cm) {
1358   const OrderHintInfo *const order_hint_info = &cm->seq_params.order_hint_info;
1359   int ref_frame;
1360   int size;
1361   int cur_order_hint;
1362   const RefCntBuffer *ref_buf[INTER_REFS_PER_FRAME];
1363   int ref_order_hint[INTER_REFS_PER_FRAME];
1364   int ref_stamp;
1365   memset(cm->ref_frame_side, 0, sizeof(cm->ref_frame_side));
1366   if (!order_hint_info->enable_order_hint) return;
1367 #ifdef ORI_CODE
1368   TPL_MV_REF *tpl_mvs_base = cm->tpl_mvs;
1369 #endif
1370   size = ((cm->mi_rows + MAX_MIB_SIZE) >> 1) * (cm->mi_stride >> 1);
1371 #ifdef ORI_CODE
1372   for (int idx = 0; idx < size; ++idx) {
1373     tpl_mvs_base[idx].mfmv0.as_int = INVALID_MV;
1374     tpl_mvs_base[idx].ref_frame_offset = 0;
1375   }
1376 #endif
1377   cur_order_hint = cm->cur_frame->order_hint;
1378 
1379   for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ref_frame++) {
1380     const int ref_idx = ref_frame - LAST_FRAME;
1381     const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
1382     int order_hint = 0;
1383 
1384     if (buf != NULL) order_hint = buf->order_hint;
1385 
1386     ref_buf[ref_idx] = buf;
1387     ref_order_hint[ref_idx] = order_hint;
1388 
1389     if (get_relative_dist(order_hint_info, order_hint, cur_order_hint) > 0)
1390       cm->ref_frame_side[ref_frame] = 1;
1391     else if (order_hint == cur_order_hint)
1392       cm->ref_frame_side[ref_frame] = -1;
1393   }
1394   ref_stamp = MFMV_STACK_SIZE - 1;
1395 #ifdef AML
1396   cm->mv_ref_id_index = 0;
1397   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s(%d) mi_cols %d mi_rows %d\n",
1398       __func__, setup_motion_field_debug_count++,
1399       cm->mi_cols,
1400       cm->mi_rows
1401       );
1402 #endif
1403   if (ref_buf[LAST_FRAME - LAST_FRAME] != NULL) {
1404     const int alt_of_lst_order_hint =
1405         ref_buf[LAST_FRAME - LAST_FRAME]
1406             ->ref_order_hints[ALTREF_FRAME - LAST_FRAME];
1407 
1408     const int is_lst_overlay =
1409         (alt_of_lst_order_hint == ref_order_hint[GOLDEN_FRAME - LAST_FRAME]);
1410     if (!is_lst_overlay) motion_field_projection(cm, LAST_FRAME, 2);
1411     --ref_stamp;
1412   }
1413 
1414   if (get_relative_dist(order_hint_info,
1415                         ref_order_hint[BWDREF_FRAME - LAST_FRAME],
1416                         cur_order_hint) > 0) {
1417     if (motion_field_projection(cm, BWDREF_FRAME, 0)) --ref_stamp;
1418   }
1419 
1420   if (get_relative_dist(order_hint_info,
1421                         ref_order_hint[ALTREF2_FRAME - LAST_FRAME],
1422                         cur_order_hint) > 0) {
1423     if (motion_field_projection(cm, ALTREF2_FRAME, 0)) --ref_stamp;
1424   }
1425 
1426   if (get_relative_dist(order_hint_info,
1427                         ref_order_hint[ALTREF_FRAME - LAST_FRAME],
1428                         cur_order_hint) > 0 &&
1429       ref_stamp >= 0)
1430     if (motion_field_projection(cm, ALTREF_FRAME, 0)) --ref_stamp;
1431 
1432   if (ref_stamp >= 0) motion_field_projection(cm, LAST2_FRAME, 2);
1433 }
1434 
1435 
set_ref_frame_info(int * remapped_ref_idx,int frame_idx,REF_FRAME_INFO * ref_info)1436 static void set_ref_frame_info(int *remapped_ref_idx, int frame_idx,
1437                                REF_FRAME_INFO *ref_info) {
1438   assert(frame_idx >= 0 && frame_idx < INTER_REFS_PER_FRAME);
1439 
1440   remapped_ref_idx[frame_idx] = ref_info->map_idx;
1441   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "+++++++++++++%s:remapped_ref_idx[%d]=0x%x\n", __func__, frame_idx, ref_info->map_idx);
1442 }
1443 
1444 
av1_set_frame_refs(AV1_COMMON * const cm,int * remapped_ref_idx,int lst_map_idx,int gld_map_idx)1445 void av1_set_frame_refs(AV1_COMMON *const cm, int *remapped_ref_idx,
1446                         int lst_map_idx, int gld_map_idx) {
1447   int lst_frame_sort_idx = -1;
1448   int gld_frame_sort_idx = -1;
1449   int i;
1450   //assert(cm->seq_params.order_hint_info.enable_order_hint);
1451   //assert(cm->seq_params.order_hint_info.order_hint_bits_minus_1 >= 0);
1452   const int cur_order_hint = (int)cm->current_frame.order_hint;
1453   const int cur_frame_sort_idx =
1454       1 << cm->seq_params.order_hint_info.order_hint_bits_minus_1;
1455 
1456   REF_FRAME_INFO ref_frame_info[REF_FRAMES];
1457   int ref_flag_list[INTER_REFS_PER_FRAME] = { 0, 0, 0, 0, 0, 0, 0 };
1458   int bwd_start_idx;
1459   int bwd_end_idx;
1460   int fwd_start_idx, fwd_end_idx;
1461   int ref_idx;
1462   static const MV_REFERENCE_FRAME ref_frame_list[INTER_REFS_PER_FRAME - 2] = {
1463     LAST2_FRAME, LAST3_FRAME, BWDREF_FRAME, ALTREF2_FRAME, ALTREF_FRAME
1464   };
1465 
1466   for (i = 0; i < REF_FRAMES; ++i) {
1467     const int map_idx = i;
1468     RefCntBuffer *buf;
1469     int offset;
1470 
1471     ref_frame_info[i].map_idx = map_idx;
1472     ref_frame_info[i].sort_idx = -1;
1473 
1474     buf = cm->ref_frame_map[map_idx];
1475     ref_frame_info[i].buf = buf;
1476 
1477     if (buf == NULL) continue;
1478     // If this assertion fails, there is a reference leak.
1479     assert(buf->ref_count > 0);
1480 
1481     offset = (int)buf->order_hint;
1482     ref_frame_info[i].sort_idx =
1483         (offset == -1) ? -1
1484                        : cur_frame_sort_idx +
1485                              get_relative_dist(&cm->seq_params.order_hint_info,
1486                                                offset, cur_order_hint);
1487     assert(ref_frame_info[i].sort_idx >= -1);
1488 
1489     if (map_idx == lst_map_idx) lst_frame_sort_idx = ref_frame_info[i].sort_idx;
1490     if (map_idx == gld_map_idx) gld_frame_sort_idx = ref_frame_info[i].sort_idx;
1491   }
1492 
1493   // Confirm both LAST_FRAME and GOLDEN_FRAME are valid forward reference
1494   // frames.
1495   if (lst_frame_sort_idx == -1 || lst_frame_sort_idx >= cur_frame_sort_idx) {
1496     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1497                        "Inter frame requests a look-ahead frame as LAST");
1498   }
1499   if (gld_frame_sort_idx == -1 || gld_frame_sort_idx >= cur_frame_sort_idx) {
1500     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1501                        "Inter frame requests a look-ahead frame as GOLDEN");
1502   }
1503 
1504   // Sort ref frames based on their frame_offset values.
1505   qsort(ref_frame_info, REF_FRAMES, sizeof(REF_FRAME_INFO),
1506         compare_ref_frame_info);
1507 
1508   // Identify forward and backward reference frames.
1509   // Forward  reference: offset < order_hint
1510   // Backward reference: offset >= order_hint
1511   fwd_start_idx = 0;
1512   fwd_end_idx = REF_FRAMES - 1;
1513 
1514   for (i = 0; i < REF_FRAMES; i++) {
1515     if (ref_frame_info[i].sort_idx == -1) {
1516       fwd_start_idx++;
1517       continue;
1518     }
1519 
1520     if (ref_frame_info[i].sort_idx >= cur_frame_sort_idx) {
1521       fwd_end_idx = i - 1;
1522       break;
1523     }
1524   }
1525 
1526   bwd_start_idx = fwd_end_idx + 1;
1527   bwd_end_idx = REF_FRAMES - 1;
1528 
1529   // === Backward Reference Frames ===
1530 
1531   // == ALTREF_FRAME ==
1532   if (bwd_start_idx <= bwd_end_idx) {
1533     set_ref_frame_info(remapped_ref_idx, ALTREF_FRAME - LAST_FRAME,
1534                        &ref_frame_info[bwd_end_idx]);
1535     ref_flag_list[ALTREF_FRAME - LAST_FRAME] = 1;
1536     bwd_end_idx--;
1537   }
1538 
1539   // == BWDREF_FRAME ==
1540   if (bwd_start_idx <= bwd_end_idx) {
1541     set_ref_frame_info(remapped_ref_idx, BWDREF_FRAME - LAST_FRAME,
1542                        &ref_frame_info[bwd_start_idx]);
1543     ref_flag_list[BWDREF_FRAME - LAST_FRAME] = 1;
1544     bwd_start_idx++;
1545   }
1546 
1547   // == ALTREF2_FRAME ==
1548   if (bwd_start_idx <= bwd_end_idx) {
1549     set_ref_frame_info(remapped_ref_idx, ALTREF2_FRAME - LAST_FRAME,
1550                        &ref_frame_info[bwd_start_idx]);
1551     ref_flag_list[ALTREF2_FRAME - LAST_FRAME] = 1;
1552   }
1553 
1554   // === Forward Reference Frames ===
1555 
1556   for (i = fwd_start_idx; i <= fwd_end_idx; ++i) {
1557     // == LAST_FRAME ==
1558     if (ref_frame_info[i].map_idx == lst_map_idx) {
1559       set_ref_frame_info(remapped_ref_idx, LAST_FRAME - LAST_FRAME,
1560                          &ref_frame_info[i]);
1561       ref_flag_list[LAST_FRAME - LAST_FRAME] = 1;
1562     }
1563 
1564     // == GOLDEN_FRAME ==
1565     if (ref_frame_info[i].map_idx == gld_map_idx) {
1566       set_ref_frame_info(remapped_ref_idx, GOLDEN_FRAME - LAST_FRAME,
1567                          &ref_frame_info[i]);
1568       ref_flag_list[GOLDEN_FRAME - LAST_FRAME] = 1;
1569     }
1570   }
1571 
1572   assert(ref_flag_list[LAST_FRAME - LAST_FRAME] == 1 &&
1573          ref_flag_list[GOLDEN_FRAME - LAST_FRAME] == 1);
1574 
1575   // == LAST2_FRAME ==
1576   // == LAST3_FRAME ==
1577   // == BWDREF_FRAME ==
1578   // == ALTREF2_FRAME ==
1579   // == ALTREF_FRAME ==
1580 
1581   // Set up the reference frames in the anti-chronological order.
1582   for (ref_idx = 0; ref_idx < (INTER_REFS_PER_FRAME - 2); ref_idx++) {
1583     const MV_REFERENCE_FRAME ref_frame = ref_frame_list[ref_idx];
1584 
1585     if (ref_flag_list[ref_frame - LAST_FRAME] == 1) continue;
1586 
1587     while (fwd_start_idx <= fwd_end_idx &&
1588            (ref_frame_info[fwd_end_idx].map_idx == lst_map_idx ||
1589             ref_frame_info[fwd_end_idx].map_idx == gld_map_idx)) {
1590       fwd_end_idx--;
1591     }
1592     if (fwd_start_idx > fwd_end_idx) break;
1593 
1594     set_ref_frame_info(remapped_ref_idx, ref_frame - LAST_FRAME,
1595                        &ref_frame_info[fwd_end_idx]);
1596     ref_flag_list[ref_frame - LAST_FRAME] = 1;
1597 
1598     fwd_end_idx--;
1599   }
1600 
1601   // Assign all the remaining frame(s), if any, to the earliest reference frame.
1602   for (; ref_idx < (INTER_REFS_PER_FRAME - 2); ref_idx++) {
1603     const MV_REFERENCE_FRAME ref_frame = ref_frame_list[ref_idx];
1604     if (ref_flag_list[ref_frame - LAST_FRAME] == 1) continue;
1605     set_ref_frame_info(remapped_ref_idx, ref_frame - LAST_FRAME,
1606                        &ref_frame_info[fwd_start_idx]);
1607     ref_flag_list[ref_frame - LAST_FRAME] = 1;
1608   }
1609 
1610   for (i = 0; i < INTER_REFS_PER_FRAME; i++) {
1611     assert(ref_flag_list[i] == 1);
1612   }
1613 }
1614 
av1_setup_frame_buf_refs(AV1_COMMON * cm)1615 void av1_setup_frame_buf_refs(AV1_COMMON *cm) {
1616   MV_REFERENCE_FRAME ref_frame;
1617   cm->cur_frame->order_hint = cm->current_frame.order_hint;
1618 
1619   for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
1620     const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
1621     if (buf != NULL)
1622       cm->cur_frame->ref_order_hints[ref_frame - LAST_FRAME] = buf->order_hint;
1623   }
1624 }
1625 
av1_setup_frame_sign_bias(AV1_COMMON * cm)1626 void av1_setup_frame_sign_bias(AV1_COMMON *cm) {
1627   MV_REFERENCE_FRAME ref_frame;
1628   for (ref_frame = LAST_FRAME; ref_frame <= ALTREF_FRAME; ++ref_frame) {
1629     const RefCntBuffer *const buf = get_ref_frame_buf(cm, ref_frame);
1630     if (cm->seq_params.order_hint_info.enable_order_hint && buf != NULL) {
1631       const int ref_order_hint = buf->order_hint;
1632       cm->ref_frame_sign_bias[ref_frame] =
1633           (get_relative_dist(&cm->seq_params.order_hint_info, ref_order_hint,
1634                              (int)cm->current_frame.order_hint) <= 0)
1635               ? 0
1636               : 1;
1637     } else {
1638       cm->ref_frame_sign_bias[ref_frame] = 0;
1639     }
1640   }
1641 }
1642 
1643 
av1_setup_skip_mode_allowed(AV1_COMMON * cm)1644 void av1_setup_skip_mode_allowed(AV1_COMMON *cm)
1645 {
1646 	const OrderHintInfo *const order_hint_info = &cm->seq_params.order_hint_info;
1647 	SkipModeInfo *const skip_mode_info = &cm->current_frame.skip_mode_info;
1648 	int i;
1649 	int cur_order_hint;
1650 	int ref_order_hints[2] = { -1, INT_MAX };
1651 	int ref_idx[2] = { INVALID_IDX, INVALID_IDX };
1652 
1653 	skip_mode_info->skip_mode_allowed = 0;
1654 	skip_mode_info->ref_frame_idx_0 = INVALID_IDX;
1655 	skip_mode_info->ref_frame_idx_1 = INVALID_IDX;
1656 	av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "av1_setup_skip_mode_allowed %d %d %d\n", order_hint_info->enable_order_hint,
1657 		frame_is_intra_only(cm),
1658 		cm->current_frame.reference_mode);
1659 	if (!order_hint_info->enable_order_hint || frame_is_intra_only(cm) ||
1660 		cm->current_frame.reference_mode == SINGLE_REFERENCE)
1661 		return;
1662 
1663 	cur_order_hint = cm->current_frame.order_hint;
1664 
1665 	// Identify the nearest forward and backward references.
1666 	for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1667 		const RefCntBuffer *const buf = get_ref_frame_buf(cm, LAST_FRAME + i);
1668 		int ref_order_hint;
1669 		if (buf == NULL) continue;
1670 
1671 		ref_order_hint = buf->order_hint;
1672 		if (get_relative_dist(order_hint_info, ref_order_hint, cur_order_hint) < 0) {
1673 			// Forward reference
1674 			if (ref_order_hints[0] == -1 ||
1675 				get_relative_dist(order_hint_info, ref_order_hint,
1676 				ref_order_hints[0]) > 0) {
1677 				ref_order_hints[0] = ref_order_hint;
1678 				ref_idx[0] = i;
1679 			}
1680 		} else if (get_relative_dist(order_hint_info, ref_order_hint,
1681 		cur_order_hint) > 0) {
1682 			// Backward reference
1683 			if (ref_order_hints[1] == INT_MAX ||
1684 				get_relative_dist(order_hint_info, ref_order_hint,
1685 				ref_order_hints[1]) < 0) {
1686 				ref_order_hints[1] = ref_order_hint;
1687 				ref_idx[1] = i;
1688 			}
1689 		}
1690 	}
1691 
1692 	if (ref_idx[0] != INVALID_IDX && ref_idx[1] != INVALID_IDX) {
1693 		// == Bi-directional prediction ==
1694 		skip_mode_info->skip_mode_allowed = 1;
1695 		skip_mode_info->ref_frame_idx_0 = AOMMIN(ref_idx[0], ref_idx[1]);
1696 		skip_mode_info->ref_frame_idx_1 = AOMMAX(ref_idx[0], ref_idx[1]);
1697 	} else if (ref_idx[0] != INVALID_IDX && ref_idx[1] == INVALID_IDX) {
1698 		// == Forward prediction only ==
1699 		// Identify the second nearest forward reference.
1700 		ref_order_hints[1] = -1;
1701 		for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
1702 			const RefCntBuffer *const buf = get_ref_frame_buf(cm, LAST_FRAME + i);
1703 			int ref_order_hint;
1704 			if (buf == NULL) continue;
1705 
1706 			ref_order_hint = buf->order_hint;
1707 			if ((ref_order_hints[0] != -1 &&
1708 			get_relative_dist(order_hint_info, ref_order_hint, ref_order_hints[0]) < 0) &&
1709 			(ref_order_hints[1] == -1 ||
1710 			get_relative_dist(order_hint_info, ref_order_hint, ref_order_hints[1]) > 0)) {
1711 				// Second closest forward reference
1712 				ref_order_hints[1] = ref_order_hint;
1713 				ref_idx[1] = i;
1714 			}
1715 		}
1716 		if (ref_order_hints[1] != -1) {
1717 			skip_mode_info->skip_mode_allowed = 1;
1718 			skip_mode_info->ref_frame_idx_0 = AOMMIN(ref_idx[0], ref_idx[1]);
1719 			skip_mode_info->ref_frame_idx_1 = AOMMAX(ref_idx[0], ref_idx[1]);
1720 		}
1721 	}
1722 	av1_print2(AV1_DEBUG_BUFMGR_DETAIL,
1723 		"skip_mode_info: skip_mode_allowed 0x%x 0x%x 0x%x\n",
1724 	cm->current_frame.skip_mode_info.skip_mode_allowed,
1725 	cm->current_frame.skip_mode_info.ref_frame_idx_0,
1726 	cm->current_frame.skip_mode_info.ref_frame_idx_1);
1727 }
1728 
frame_might_allow_ref_frame_mvs(const AV1_COMMON * cm)1729 static inline int frame_might_allow_ref_frame_mvs(const AV1_COMMON *cm) {
1730   return !cm->error_resilient_mode &&
1731     cm->seq_params.order_hint_info.enable_ref_frame_mvs &&
1732     cm->seq_params.order_hint_info.enable_order_hint &&
1733     !frame_is_intra_only(cm);
1734 }
1735 
1736 #ifdef ORI_CODE
1737 /*
1738 * segmentation
1739 */
1740 static const int seg_feature_data_signed[SEG_LVL_MAX] = {
1741   1, 1, 1, 1, 1, 0, 0, 0
1742 };
1743 
1744 static const int seg_feature_data_max[SEG_LVL_MAX] = { MAXQ,
1745                                                        MAX_LOOP_FILTER,
1746                                                        MAX_LOOP_FILTER,
1747                                                        MAX_LOOP_FILTER,
1748                                                        MAX_LOOP_FILTER,
1749                                                        7,
1750                                                        0,
1751                                                        0 };
1752 
1753 
segfeatures_copy(struct segmentation * dst,const struct segmentation * src)1754 static inline void segfeatures_copy(struct segmentation *dst,
1755                                     const struct segmentation *src) {
1756   int i, j;
1757   for (i = 0; i < MAX_SEGMENTS; i++) {
1758     dst->feature_mask[i] = src->feature_mask[i];
1759     for (j = 0; j < SEG_LVL_MAX; j++) {
1760       dst->feature_data[i][j] = src->feature_data[i][j];
1761     }
1762   }
1763   dst->segid_preskip = src->segid_preskip;
1764   dst->last_active_segid = src->last_active_segid;
1765 }
1766 
av1_clearall_segfeatures(struct segmentation * seg)1767 static void av1_clearall_segfeatures(struct segmentation *seg) {
1768   av1_zero(seg->feature_data);
1769   av1_zero(seg->feature_mask);
1770 }
1771 
av1_enable_segfeature(struct segmentation * seg,int segment_id,int feature_id)1772 static void av1_enable_segfeature(struct segmentation *seg, int segment_id,
1773     int feature_id) {
1774   seg->feature_mask[segment_id] |= 1 << feature_id;
1775 }
1776 
av1_calculate_segdata(struct segmentation * seg)1777 void av1_calculate_segdata(struct segmentation *seg) {
1778   seg->segid_preskip = 0;
1779   seg->last_active_segid = 0;
1780   for (int i = 0; i < MAX_SEGMENTS; i++) {
1781     for (int j = 0; j < SEG_LVL_MAX; j++) {
1782       if (seg->feature_mask[i] & (1 << j)) {
1783         seg->segid_preskip |= (j >= SEG_LVL_REF_FRAME);
1784         seg->last_active_segid = i;
1785       }
1786     }
1787   }
1788 }
1789 
av1_seg_feature_data_max(int feature_id)1790 static int av1_seg_feature_data_max(int feature_id) {
1791   return seg_feature_data_max[feature_id];
1792 }
1793 
av1_is_segfeature_signed(int feature_id)1794 static int av1_is_segfeature_signed(int feature_id) {
1795   return seg_feature_data_signed[feature_id];
1796 }
1797 
av1_set_segdata(struct segmentation * seg,int segment_id,int feature_id,int seg_data)1798 static void av1_set_segdata(struct segmentation *seg, int segment_id,
1799                      int feature_id, int seg_data) {
1800   if (seg_data < 0) {
1801     assert(seg_feature_data_signed[feature_id]);
1802     assert(-seg_data <= seg_feature_data_max[feature_id]);
1803   } else {
1804     assert(seg_data <= seg_feature_data_max[feature_id]);
1805   }
1806 
1807   seg->feature_data[segment_id][feature_id] = seg_data;
1808 }
1809 
clamp(int value,int low,int high)1810 static inline int clamp(int value, int low, int high) {
1811   return value < low ? low : (value > high ? high : value);
1812 }
1813 
setup_segmentation(AV1_COMMON * const cm,union param_u * params)1814 static void setup_segmentation(AV1_COMMON *const cm,
1815                                union param_u *params) {
1816   struct segmentation *const seg = &cm->seg;
1817 
1818   seg->update_map = 0;
1819   seg->update_data = 0;
1820   seg->temporal_update = 0;
1821 
1822   seg->enabled = params->p.seg_enabled; //aom_rb_read_bit(-1, defmark, rb);
1823   if (!seg->enabled) {
1824     if (cm->cur_frame->seg_map)
1825       memset(cm->cur_frame->seg_map, 0, (cm->mi_rows * cm->mi_cols));
1826 
1827     memset(seg, 0, sizeof(*seg));
1828     segfeatures_copy(&cm->cur_frame->seg, seg);
1829     return;
1830   }
1831   if (cm->seg.enabled && cm->prev_frame &&
1832       (cm->mi_rows == cm->prev_frame->mi_rows) &&
1833       (cm->mi_cols == cm->prev_frame->mi_cols)) {
1834     cm->last_frame_seg_map = cm->prev_frame->seg_map;
1835   } else {
1836     cm->last_frame_seg_map = NULL;
1837   }
1838   // Read update flags
1839   if (cm->primary_ref_frame == PRIMARY_REF_NONE) {
1840     // These frames can't use previous frames, so must signal map + features
1841     seg->update_map = 1;
1842     seg->temporal_update = 0;
1843     seg->update_data = 1;
1844   } else {
1845     seg->update_map = params->p.seg_update_map; // aom_rb_read_bit(-1, defmark, rb);
1846     if (seg->update_map) {
1847       seg->temporal_update = params->p.seg_temporal_update; //aom_rb_read_bit(-1, defmark, rb);
1848     } else {
1849       seg->temporal_update = 0;
1850     }
1851     seg->update_data = params->p.seg_update_data; //aom_rb_read_bit(-1, defmark, rb);
1852   }
1853 
1854   // Segmentation data update
1855   if (seg->update_data) {
1856     av1_clearall_segfeatures(seg);
1857 
1858     for (int i = 0; i < MAX_SEGMENTS; i++) {
1859       for (int j = 0; j < SEG_LVL_MAX; j++) {
1860         int data = 0;
1861         const int feature_enabled = params->p.seg_feature_enabled ;//aom_rb_read_bit(-1, defmark, rb);
1862         if (feature_enabled) {
1863           av1_enable_segfeature(seg, i, j);
1864 
1865           const int data_max = av1_seg_feature_data_max(j);
1866           const int data_min = -data_max;
1867           /*
1868           const int ubits = get_unsigned_bits(data_max);
1869 
1870           if (av1_is_segfeature_signed(j)) {
1871             data = aom_rb_read_inv_signed_literal(-1, defmark, rb, ubits);
1872           } else {
1873             data = aom_rb_read_literal(-1, defmark, rb, ubits);
1874           }*/
1875           data = params->p.seg_data;
1876           data = clamp(data, data_min, data_max);
1877         }
1878         av1_set_segdata(seg, i, j, data);
1879       }
1880     }
1881     av1_calculate_segdata(seg);
1882   } else if (cm->prev_frame) {
1883     segfeatures_copy(seg, &cm->prev_frame->seg);
1884   }
1885   segfeatures_copy(&cm->cur_frame->seg, seg);
1886 }
1887 #endif
1888 
1889 /**/
1890 
1891 
av1_decode_frame_headers_and_setup(AV1Decoder * pbi,int trailing_bits_present,union param_u * params)1892 int av1_decode_frame_headers_and_setup(AV1Decoder *pbi, int trailing_bits_present, union param_u *params)
1893 {
1894   AV1_COMMON *const cm = &pbi->common;
1895   /*
1896   read_uncompressed_header()
1897   */
1898   const SequenceHeader *const seq_params = &cm->seq_params;
1899   CurrentFrame *const current_frame = &cm->current_frame;
1900   //MACROBLOCKD *const xd = &pbi->mb;
1901   BufferPool *const pool = cm->buffer_pool;
1902   RefCntBuffer *const frame_bufs = pool->frame_bufs;
1903   int i;
1904   int frame_size_override_flag;
1905   unsigned long flags;
1906 
1907   if (!pbi->sequence_header_ready) {
1908     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1909                        "No sequence header");
1910   }
1911   cm->last_frame_type = current_frame->frame_type;
1912 
1913   if (seq_params->reduced_still_picture_hdr) {
1914     cm->show_existing_frame = 0;
1915     cm->show_frame = 1;
1916     current_frame->frame_type = KEY_FRAME;
1917     if (pbi->sequence_header_changed) {
1918       // This is the start of a new coded video sequence.
1919       pbi->sequence_header_changed = 0;
1920       pbi->decoding_first_frame = 1;
1921       reset_frame_buffers(pbi);
1922     }
1923     cm->error_resilient_mode = 1;
1924   } else {
1925     cm->show_existing_frame = params->p.show_existing_frame;
1926     pbi->reset_decoder_state = 0;
1927     if (cm->show_existing_frame) {
1928       int existing_frame_idx;
1929       RefCntBuffer *frame_to_show;
1930       if (pbi->sequence_header_changed) {
1931         aom_internal_error(
1932             &cm->error, AOM_CODEC_CORRUPT_FRAME,
1933             "New sequence header starts with a show_existing_frame.");
1934       }
1935       // Show an existing frame directly.
1936       existing_frame_idx = params->p.existing_frame_idx; //aom_rb_read_literal(rb, 3);
1937       frame_to_show = cm->ref_frame_map[existing_frame_idx];
1938       if (frame_to_show == NULL) {
1939         aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1940                            "Buffer does not contain a decoded frame");
1941       }
1942       if (seq_params->decoder_model_info_present_flag &&
1943           cm->timing_info.equal_picture_interval == 0) {
1944         cm->frame_presentation_time = params->p.frame_presentation_time;
1945         //read_temporal_point_info(cm);
1946       }
1947       if (seq_params->frame_id_numbers_present_flag) {
1948         //int frame_id_length = seq_params->frame_id_length;
1949         int display_frame_id = params->p.display_frame_id; //aom_rb_read_literal(rb, frame_id_length);
1950         /* Compare display_frame_id with ref_frame_id and check valid for
1951          * referencing */
1952         if (display_frame_id != cm->ref_frame_id[existing_frame_idx] ||
1953             cm->valid_for_referencing[existing_frame_idx] == 0)
1954           aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
1955                              "Reference buffer frame ID mismatch");
1956       }
1957       lock_buffer_pool(pool, flags);
1958       assert(frame_to_show->ref_count > 0);
1959       // cm->cur_frame should be the buffer referenced by the return value
1960       // of the get_free_fb() call in av1_receive_compressed_data(), and
1961       // generate_next_ref_frame_map() has not been called, so ref_count
1962       // should still be 1.
1963       assert(cm->cur_frame->ref_count == 1);
1964       // assign_frame_buffer_p() decrements ref_count directly rather than
1965       // call decrease_ref_count(). If cm->cur_frame->raw_frame_buffer has
1966       // already been allocated, it will not be released by
1967       // assign_frame_buffer_p()!
1968       assert(!cm->cur_frame->raw_frame_buffer.data);
1969       assign_frame_buffer_p(&cm->cur_frame, frame_to_show);
1970       pbi->reset_decoder_state = frame_to_show->frame_type == KEY_FRAME;
1971       unlock_buffer_pool(pool, flags);
1972 
1973 #ifdef ORI_CODE
1974       cm->lf.filter_level[0] = 0;
1975       cm->lf.filter_level[1] = 0;
1976 #endif
1977       cm->show_frame = 1;
1978 
1979       // Section 6.8.2: It is a requirement of bitstream conformance that when
1980       // show_existing_frame is used to show a previous frame, that the value
1981       // of showable_frame for the previous frame was equal to 1.
1982       if (!frame_to_show->showable_frame) {
1983         aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
1984                            "Buffer does not contain a showable frame");
1985       }
1986       // Section 6.8.2: It is a requirement of bitstream conformance that when
1987       // show_existing_frame is used to show a previous frame with
1988       // RefFrameType[ frame_to_show_map_idx ] equal to KEY_FRAME, that the
1989       // frame is output via the show_existing_frame mechanism at most once.
1990       if (pbi->reset_decoder_state) frame_to_show->showable_frame = 0;
1991 
1992 #ifdef ORI_CODE
1993       cm->film_grain_params = frame_to_show->film_grain_params;
1994 #endif
1995       if (pbi->reset_decoder_state) {
1996         show_existing_frame_reset(pbi, existing_frame_idx);
1997       } else {
1998         current_frame->refresh_frame_flags = 0;
1999       }
2000 
2001       return 0;
2002     }
2003 
2004     current_frame->frame_type = (FRAME_TYPE)params->p.frame_type; //aom_rb_read_literal(rb, 2);
2005     if (pbi->sequence_header_changed) {
2006       if (current_frame->frame_type == KEY_FRAME) {
2007         // This is the start of a new coded video sequence.
2008         pbi->sequence_header_changed = 0;
2009         pbi->decoding_first_frame = 1;
2010         reset_frame_buffers(pbi);
2011       } else {
2012         aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2013                            "Sequence header has changed without a keyframe.");
2014       }
2015     }
2016     cm->show_frame = params->p.show_frame; //aom_rb_read_bit(rb);
2017     if (seq_params->still_picture &&
2018         (current_frame->frame_type != KEY_FRAME || !cm->show_frame)) {
2019       aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2020                          "Still pictures must be coded as shown keyframes");
2021     }
2022     cm->showable_frame = current_frame->frame_type != KEY_FRAME;
2023     if (cm->show_frame) {
2024       if (seq_params->decoder_model_info_present_flag &&
2025           cm->timing_info.equal_picture_interval == 0)
2026         cm->frame_presentation_time = params->p.frame_presentation_time;
2027         //read_temporal_point_info(cm);
2028     } else {
2029       // See if this frame can be used as show_existing_frame in future
2030       cm->showable_frame = params->p.showable_frame;//aom_rb_read_bit(rb);
2031     }
2032     cm->cur_frame->showable_frame = cm->showable_frame;
2033     cm->error_resilient_mode =
2034         frame_is_sframe(cm) ||
2035                 (current_frame->frame_type == KEY_FRAME && cm->show_frame)
2036             ? 1
2037             : params->p.error_resilient_mode; //aom_rb_read_bit(rb);
2038   }
2039 
2040 #ifdef ORI_CODE
2041   cm->disable_cdf_update = aom_rb_read_bit(rb);
2042   if (seq_params->force_screen_content_tools == 2) {
2043     cm->allow_screen_content_tools = aom_rb_read_bit(rb);
2044   } else {
2045     cm->allow_screen_content_tools = seq_params->force_screen_content_tools;
2046   }
2047 
2048   if (cm->allow_screen_content_tools) {
2049     if (seq_params->force_integer_mv == 2) {
2050       cm->cur_frame_force_integer_mv = aom_rb_read_bit(rb);
2051     } else {
2052       cm->cur_frame_force_integer_mv = seq_params->force_integer_mv;
2053     }
2054   } else {
2055     cm->cur_frame_force_integer_mv = 0;
2056   }
2057 #endif
2058 
2059   frame_size_override_flag = 0;
2060   cm->allow_intrabc = 0;
2061   cm->primary_ref_frame = PRIMARY_REF_NONE;
2062 
2063   if (!seq_params->reduced_still_picture_hdr) {
2064     if (seq_params->frame_id_numbers_present_flag) {
2065       int frame_id_length = seq_params->frame_id_length;
2066       int diff_len = seq_params->delta_frame_id_length;
2067       int prev_frame_id = 0;
2068       int have_prev_frame_id =
2069           !pbi->decoding_first_frame &&
2070           !(current_frame->frame_type == KEY_FRAME && cm->show_frame);
2071       if (have_prev_frame_id) {
2072         prev_frame_id = cm->current_frame_id;
2073       }
2074       cm->current_frame_id = params->p.current_frame_id; //aom_rb_read_literal(rb, frame_id_length);
2075 
2076       if (have_prev_frame_id) {
2077         int diff_frame_id;
2078         if (cm->current_frame_id > prev_frame_id) {
2079           diff_frame_id = cm->current_frame_id - prev_frame_id;
2080         } else {
2081           diff_frame_id =
2082               (1 << frame_id_length) + cm->current_frame_id - prev_frame_id;
2083         }
2084         /* Check current_frame_id for conformance */
2085         if (prev_frame_id == cm->current_frame_id ||
2086             diff_frame_id >= (1 << (frame_id_length - 1))) {
2087           aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2088                              "Invalid value of current_frame_id");
2089         }
2090       }
2091       /* Check if some frames need to be marked as not valid for referencing */
2092       for (i = 0; i < REF_FRAMES; i++) {
2093         if (current_frame->frame_type == KEY_FRAME && cm->show_frame) {
2094           cm->valid_for_referencing[i] = 0;
2095         } else if (cm->current_frame_id - (1 << diff_len) > 0) {
2096           if (cm->ref_frame_id[i] > cm->current_frame_id ||
2097               cm->ref_frame_id[i] < cm->current_frame_id - (1 << diff_len))
2098             cm->valid_for_referencing[i] = 0;
2099         } else {
2100           if (cm->ref_frame_id[i] > cm->current_frame_id &&
2101               cm->ref_frame_id[i] < (1 << frame_id_length) +
2102                                         cm->current_frame_id - (1 << diff_len))
2103             cm->valid_for_referencing[i] = 0;
2104         }
2105       }
2106     }
2107 
2108     frame_size_override_flag = frame_is_sframe(cm) ? 1 : params->p.frame_size_override_flag; //aom_rb_read_bit(rb);
2109 
2110     current_frame->order_hint = params->p.order_hint; /*aom_rb_read_literal(
2111         rb, seq_params->order_hint_info.order_hint_bits_minus_1 + 1);*/
2112     current_frame->frame_number = current_frame->order_hint;
2113 
2114     if (!cm->error_resilient_mode && !frame_is_intra_only(cm)) {
2115       cm->primary_ref_frame = params->p.primary_ref_frame;//aom_rb_read_literal(rb, PRIMARY_REF_BITS);
2116     }
2117   }
2118 
2119   if (seq_params->decoder_model_info_present_flag) {
2120     cm->buffer_removal_time_present = params->p.buffer_removal_time_present; //aom_rb_read_bit(rb);
2121     if (cm->buffer_removal_time_present) {
2122       int op_num;
2123       for (op_num = 0;
2124            op_num < seq_params->operating_points_cnt_minus_1 + 1; op_num++) {
2125         if (cm->op_params[op_num].decoder_model_param_present_flag) {
2126           if ((((seq_params->operating_point_idc[op_num] >>
2127                  cm->temporal_layer_id) &
2128                 0x1) &&
2129                ((seq_params->operating_point_idc[op_num] >>
2130                  (cm->spatial_layer_id + 8)) &
2131                 0x1)) ||
2132               seq_params->operating_point_idc[op_num] == 0) {
2133             cm->op_frame_timing[op_num].buffer_removal_time =
2134                 params->p.op_frame_timing[op_num];
2135                 /*aom_rb_read_unsigned_literal(
2136                     rb, cm->buffer_model.buffer_removal_time_length);*/
2137           } else {
2138             cm->op_frame_timing[op_num].buffer_removal_time = 0;
2139           }
2140         } else {
2141           cm->op_frame_timing[op_num].buffer_removal_time = 0;
2142         }
2143       }
2144     }
2145   }
2146   if (current_frame->frame_type == KEY_FRAME) {
2147     if (!cm->show_frame) {  // unshown keyframe (forward keyframe)
2148       current_frame->refresh_frame_flags = params->p.refresh_frame_flags; //aom_rb_read_literal(rb, REF_FRAMES);
2149     } else {  // shown keyframe
2150       current_frame->refresh_frame_flags = (1 << REF_FRAMES) - 1;
2151     }
2152 
2153     for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2154       cm->remapped_ref_idx[i] = INVALID_IDX;
2155     }
2156     if (pbi->need_resync) {
2157       reset_ref_frame_map(pbi);
2158       pbi->need_resync = 0;
2159     }
2160   } else {
2161     if (current_frame->frame_type == INTRA_ONLY_FRAME) {
2162       current_frame->refresh_frame_flags = params->p.refresh_frame_flags; //aom_rb_read_literal(rb, REF_FRAMES);
2163       if (current_frame->refresh_frame_flags == 0xFF) {
2164         aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2165                            "Intra only frames cannot have refresh flags 0xFF");
2166       }
2167       if (pbi->need_resync) {
2168         reset_ref_frame_map(pbi);
2169         pbi->need_resync = 0;
2170       }
2171     } else if (pbi->need_resync != 1) { /* Skip if need resync */
2172       current_frame->refresh_frame_flags =
2173           frame_is_sframe(cm) ? 0xFF : params->p.refresh_frame_flags; //aom_rb_read_literal(rb, REF_FRAMES);
2174     }
2175   }
2176 
2177   if (!frame_is_intra_only(cm) || current_frame->refresh_frame_flags != 0xFF) {
2178     // Read all ref frame order hints if error_resilient_mode == 1
2179     if (cm->error_resilient_mode &&
2180         seq_params->order_hint_info.enable_order_hint) {
2181       int ref_idx;
2182       for (ref_idx = 0; ref_idx < REF_FRAMES; ref_idx++) {
2183         // Read order hint from bit stream
2184         unsigned int order_hint = params->p.ref_order_hint[ref_idx];/*aom_rb_read_literal(
2185             rb, seq_params->order_hint_info.order_hint_bits_minus_1 + 1);*/
2186         // Get buffer
2187         RefCntBuffer *buf = cm->ref_frame_map[ref_idx];
2188         int buf_idx;
2189         if (buf == NULL || order_hint != buf->order_hint) {
2190           if (buf != NULL) {
2191             lock_buffer_pool(pool, flags);
2192             decrease_ref_count(pbi, buf, pool);
2193             unlock_buffer_pool(pool, flags);
2194           }
2195           // If no corresponding buffer exists, allocate a new buffer with all
2196           // pixels set to neutral grey.
2197           buf_idx = get_free_fb(cm);
2198           if (buf_idx == INVALID_IDX) {
2199             aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
2200                                "Unable to find free frame buffer");
2201           }
2202           buf = &frame_bufs[buf_idx];
2203           lock_buffer_pool(pool, flags);
2204           if (aom_realloc_frame_buffer(cm, &buf->buf, seq_params->max_frame_width,
2205                   seq_params->max_frame_height, buf->order_hint)) {
2206             decrease_ref_count(pbi, buf, pool);
2207             unlock_buffer_pool(pool, flags);
2208             aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
2209                                "Failed to allocate frame buffer");
2210           }
2211           unlock_buffer_pool(pool, flags);
2212 #ifdef ORI_CODE
2213           set_planes_to_neutral_grey(seq_params, &buf->buf, 0);
2214 #endif
2215           cm->ref_frame_map[ref_idx] = buf;
2216           buf->order_hint = order_hint;
2217         }
2218       }
2219     }
2220   }
2221 
2222   if (current_frame->frame_type == KEY_FRAME) {
2223     setup_frame_size(cm, frame_size_override_flag, params);
2224 #ifdef ORI_CODE
2225     if (cm->allow_screen_content_tools && !av1_superres_scaled(cm))
2226       cm->allow_intrabc = aom_rb_read_bit(rb);
2227 #endif
2228     cm->allow_ref_frame_mvs = 0;
2229     cm->prev_frame = NULL;
2230   } else {
2231     cm->allow_ref_frame_mvs = 0;
2232 
2233     if (current_frame->frame_type == INTRA_ONLY_FRAME) {
2234 #ifdef ORI_CODE
2235       cm->cur_frame->film_grain_params_present =
2236           seq_params->film_grain_params_present;
2237 #endif
2238       setup_frame_size(cm, frame_size_override_flag, params);
2239 #ifdef ORI_CODE
2240       if (cm->allow_screen_content_tools && !av1_superres_scaled(cm))
2241         cm->allow_intrabc = aom_rb_read_bit(rb);
2242 #endif
2243     } else if (pbi->need_resync != 1) { /* Skip if need resync */
2244       int frame_refs_short_signaling = 0;
2245       // Frame refs short signaling is off when error resilient mode is on.
2246       if (seq_params->order_hint_info.enable_order_hint)
2247         frame_refs_short_signaling = params->p.frame_refs_short_signaling;//aom_rb_read_bit(rb);
2248 
2249       if (frame_refs_short_signaling) {
2250         // == LAST_FRAME ==
2251         const int lst_ref = params->p.lst_ref; //aom_rb_read_literal(rb, REF_FRAMES_LOG2);
2252         const RefCntBuffer *const lst_buf = cm->ref_frame_map[lst_ref];
2253 
2254         // == GOLDEN_FRAME ==
2255         const int gld_ref = params->p.gld_ref; //aom_rb_read_literal(rb, REF_FRAMES_LOG2);
2256         const RefCntBuffer *const gld_buf = cm->ref_frame_map[gld_ref];
2257 
2258         // Most of the time, streams start with a keyframe. In that case,
2259         // ref_frame_map will have been filled in at that point and will not
2260         // contain any NULLs. However, streams are explicitly allowed to start
2261         // with an intra-only frame, so long as they don't then signal a
2262         // reference to a slot that hasn't been set yet. That's what we are
2263         // checking here.
2264         if (lst_buf == NULL)
2265           aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2266                              "Inter frame requests nonexistent reference");
2267         if (gld_buf == NULL)
2268           aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2269                              "Inter frame requests nonexistent reference");
2270 
2271         av1_set_frame_refs(cm, cm->remapped_ref_idx, lst_ref, gld_ref);
2272       }
2273 
2274       for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
2275         int ref = 0;
2276         if (!frame_refs_short_signaling) {
2277           ref = params->p.remapped_ref_idx[i];//aom_rb_read_literal(rb, REF_FRAMES_LOG2);
2278 
2279           // Most of the time, streams start with a keyframe. In that case,
2280           // ref_frame_map will have been filled in at that point and will not
2281           // contain any NULLs. However, streams are explicitly allowed to start
2282           // with an intra-only frame, so long as they don't then signal a
2283           // reference to a slot that hasn't been set yet. That's what we are
2284           // checking here.
2285           if (cm->ref_frame_map[ref] == NULL)
2286             aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2287                                "Inter frame requests nonexistent reference");
2288           cm->remapped_ref_idx[i] = ref;
2289         } else {
2290           ref = cm->remapped_ref_idx[i];
2291         }
2292 
2293         cm->ref_frame_sign_bias[LAST_FRAME + i] = 0;
2294 
2295         if (seq_params->frame_id_numbers_present_flag) {
2296           int frame_id_length = seq_params->frame_id_length;
2297           //int diff_len = seq_params->delta_frame_id_length;
2298           int delta_frame_id_minus_1 = params->p.delta_frame_id_minus_1[i];//aom_rb_read_literal(rb, diff_len);
2299           int ref_frame_id =
2300               ((cm->current_frame_id - (delta_frame_id_minus_1 + 1) +
2301                 (1 << frame_id_length)) %
2302                (1 << frame_id_length));
2303           // Compare values derived from delta_frame_id_minus_1 and
2304           // refresh_frame_flags. Also, check valid for referencing
2305           if (ref_frame_id != cm->ref_frame_id[ref] ||
2306               cm->valid_for_referencing[ref] == 0)
2307             aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2308                                "Reference buffer frame ID mismatch");
2309         }
2310       }
2311 
2312       if (!cm->error_resilient_mode && frame_size_override_flag) {
2313         setup_frame_size_with_refs(cm, params);
2314       } else {
2315         setup_frame_size(cm, frame_size_override_flag, params);
2316       }
2317 #ifdef ORI_CODE
2318       if (cm->cur_frame_force_integer_mv) {
2319         cm->allow_high_precision_mv = 0;
2320       } else {
2321         cm->allow_high_precision_mv = aom_rb_read_bit(rb);
2322       }
2323       cm->interp_filter = read_frame_interp_filter(rb);
2324       cm->switchable_motion_mode = aom_rb_read_bit(rb);
2325 #endif
2326     }
2327 
2328     cm->prev_frame = get_primary_ref_frame_buf(cm);
2329     if (cm->primary_ref_frame != PRIMARY_REF_NONE &&
2330         get_primary_ref_frame_buf(cm) == NULL) {
2331       aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2332                          "Reference frame containing this frame's initial "
2333                          "frame context is unavailable.");
2334     }
2335 #if 0
2336     av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%d,%d,%d,%d\n",cm->error_resilient_mode,
2337       cm->seq_params.order_hint_info.enable_ref_frame_mvs,
2338       cm->seq_params.order_hint_info.enable_order_hint,frame_is_intra_only(cm));
2339 
2340     printf("frame_might_allow_ref_frame_mvs()=>%d, current_frame->frame_type=%d, pbi->need_resync=%d, params->p.allow_ref_frame_mvs=%d\n",
2341         frame_might_allow_ref_frame_mvs(cm), current_frame->frame_type, pbi->need_resync,
2342         params->p.allow_ref_frame_mvs);
2343 #endif
2344     if (!(current_frame->frame_type == INTRA_ONLY_FRAME) &&
2345         pbi->need_resync != 1) {
2346       if (frame_might_allow_ref_frame_mvs(cm))
2347         cm->allow_ref_frame_mvs = params->p.allow_ref_frame_mvs; //aom_rb_read_bit(-1, "<allow_ref_frame_mvs>", rb);
2348       else
2349         cm->allow_ref_frame_mvs = 0;
2350 
2351 #ifdef SUPPORT_SCALE_FACTOR
2352       for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
2353         const RefCntBuffer *const ref_buf = get_ref_frame_buf(cm, i);
2354         struct scale_factors *const ref_scale_factors =
2355             get_ref_scale_factors(cm, i);
2356        if (ref_buf != NULL) {
2357 #ifdef AML
2358         av1_setup_scale_factors_for_frame(
2359             ref_scale_factors, ref_buf->buf.y_crop_width,
2360             ref_buf->buf.y_crop_height, cm->dec_width, cm->height);
2361 #else
2362         av1_setup_scale_factors_for_frame(
2363             ref_scale_factors, ref_buf->buf.y_crop_width,
2364             ref_buf->buf.y_crop_height, cm->width, cm->height);
2365 #endif
2366       }
2367        if (ref_scale_factors) {
2368         if ((!av1_is_valid_scale(ref_scale_factors)))
2369           aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2370                              "Reference frame has invalid dimensions");
2371        }
2372       }
2373 #endif
2374     }
2375   }
2376 
2377   av1_setup_frame_buf_refs(cm);
2378 
2379   av1_setup_frame_sign_bias(cm);
2380 
2381   cm->cur_frame->frame_type = current_frame->frame_type;
2382 
2383   if (seq_params->frame_id_numbers_present_flag) {
2384     update_ref_frame_id(cm, cm->current_frame_id);
2385   }
2386 #ifdef ORI_CODE
2387   const int might_bwd_adapt =
2388       !(seq_params->reduced_still_picture_hdr) && !(cm->disable_cdf_update);
2389   if (might_bwd_adapt) {
2390     cm->refresh_frame_context = aom_rb_read_bit(rb)
2391                                     ? REFRESH_FRAME_CONTEXT_DISABLED
2392                                     : REFRESH_FRAME_CONTEXT_BACKWARD;
2393   } else {
2394     cm->refresh_frame_context = REFRESH_FRAME_CONTEXT_DISABLED;
2395   }
2396 #endif
2397 
2398   cm->cur_frame->buf.bit_depth = seq_params->bit_depth;
2399   cm->cur_frame->buf.color_primaries = seq_params->color_primaries;
2400   cm->cur_frame->buf.transfer_characteristics =
2401       seq_params->transfer_characteristics;
2402   cm->cur_frame->buf.matrix_coefficients = seq_params->matrix_coefficients;
2403   cm->cur_frame->buf.monochrome = seq_params->monochrome;
2404   cm->cur_frame->buf.chroma_sample_position =
2405       seq_params->chroma_sample_position;
2406   cm->cur_frame->buf.color_range = seq_params->color_range;
2407   cm->cur_frame->buf.render_width = cm->render_width;
2408   cm->cur_frame->buf.render_height = cm->render_height;
2409 
2410   if (pbi->need_resync) {
2411     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2412                        "Keyframe / intra-only frame required to reset decoder"
2413                        " state");
2414   }
2415 
2416   generate_next_ref_frame_map(pbi);
2417 
2418 #ifdef ORI_CODE
2419   if (cm->allow_intrabc) {
2420     // Set parameters corresponding to no filtering.
2421     struct loopfilter *lf = &cm->lf;
2422     lf->filter_level[0] = 0;
2423     lf->filter_level[1] = 0;
2424     cm->cdef_info.cdef_bits = 0;
2425     cm->cdef_info.cdef_strengths[0] = 0;
2426     cm->cdef_info.nb_cdef_strengths = 1;
2427     cm->cdef_info.cdef_uv_strengths[0] = 0;
2428     cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
2429     cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
2430     cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
2431   }
2432 
2433   read_tile_info(pbi, rb);
2434   if (!av1_is_min_tile_width_satisfied(cm)) {
2435     aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2436                        "Minimum tile width requirement not satisfied");
2437   }
2438 
2439   setup_quantization(cm, rb);
2440   xd->bd = (int)seq_params->bit_depth;
2441 
2442   if (cm->num_allocated_above_context_planes < av1_num_planes(cm) ||
2443       cm->num_allocated_above_context_mi_col < cm->mi_cols ||
2444       cm->num_allocated_above_contexts < cm->tile_rows) {
2445     av1_free_above_context_buffers(cm, cm->num_allocated_above_contexts);
2446     if (av1_alloc_above_context_buffers(cm, cm->tile_rows))
2447       aom_internal_error(&cm->error, AOM_CODEC_MEM_ERROR,
2448                          "Failed to allocate context buffers");
2449   }
2450 
2451   if (cm->primary_ref_frame == PRIMARY_REF_NONE) {
2452     av1_setup_past_independence(cm);
2453   }
2454 
2455   setup_segmentation(cm, params);
2456 
2457   cm->delta_q_info.delta_q_res = 1;
2458   cm->delta_q_info.delta_lf_res = 1;
2459   cm->delta_q_info.delta_lf_present_flag = 0;
2460   cm->delta_q_info.delta_lf_multi = 0;
2461   cm->delta_q_info.delta_q_present_flag =
2462       cm->base_qindex > 0 ? aom_rb_read_bit(-1, defmark, rb) : 0;
2463   if (cm->delta_q_info.delta_q_present_flag) {
2464     xd->current_qindex = cm->base_qindex;
2465     cm->delta_q_info.delta_q_res = 1 << aom_rb_read_literal(-1, defmark, rb, 2);
2466     if (!cm->allow_intrabc)
2467       cm->delta_q_info.delta_lf_present_flag = aom_rb_read_bit(-1, defmark, rb);
2468     if (cm->delta_q_info.delta_lf_present_flag) {
2469       cm->delta_q_info.delta_lf_res = 1 << aom_rb_read_literal(-1, defmark, rb, 2);
2470       cm->delta_q_info.delta_lf_multi = aom_rb_read_bit(-1, defmark, rb);
2471       av1_reset_loop_filter_delta(xd, av1_num_planes(cm));
2472     }
2473   }
2474 
2475   xd->cur_frame_force_integer_mv = cm->cur_frame_force_integer_mv;
2476 
2477   for (int i = 0; i < MAX_SEGMENTS; ++i) {
2478     const int qindex = av1_get_qindex(&cm->seg, i, cm->base_qindex);
2479     xd->lossless[i] = qindex == 0 && cm->y_dc_delta_q == 0 &&
2480                       cm->u_dc_delta_q == 0 && cm->u_ac_delta_q == 0 &&
2481                       cm->v_dc_delta_q == 0 && cm->v_ac_delta_q == 0;
2482     xd->qindex[i] = qindex;
2483   }
2484   cm->coded_lossless = is_coded_lossless(cm, xd);
2485   cm->all_lossless = cm->coded_lossless && !av1_superres_scaled(cm);
2486   setup_segmentation_dequant(cm, xd);
2487   if (cm->coded_lossless) {
2488     cm->lf.filter_level[0] = 0;
2489     cm->lf.filter_level[1] = 0;
2490   }
2491   if (cm->coded_lossless || !seq_params->enable_cdef) {
2492     cm->cdef_info.cdef_bits = 0;
2493     cm->cdef_info.cdef_strengths[0] = 0;
2494     cm->cdef_info.cdef_uv_strengths[0] = 0;
2495   }
2496   if (cm->all_lossless || !seq_params->enable_restoration) {
2497     cm->rst_info[0].frame_restoration_type = RESTORE_NONE;
2498     cm->rst_info[1].frame_restoration_type = RESTORE_NONE;
2499     cm->rst_info[2].frame_restoration_type = RESTORE_NONE;
2500   }
2501   setup_loopfilter(cm, rb);
2502 
2503   if (!cm->coded_lossless && seq_params->enable_cdef) {
2504     setup_cdef(cm, rb);
2505   }
2506   if (!cm->all_lossless && seq_params->enable_restoration) {
2507     decode_restoration_mode(cm, rb);
2508   }
2509 
2510   cm->tx_mode = read_tx_mode(cm, rb);
2511 #endif
2512 
2513   current_frame->reference_mode = read_frame_reference_mode(cm, params);
2514 
2515 #ifdef ORI_CODE
2516   if (current_frame->reference_mode != SINGLE_REFERENCE)
2517     setup_compound_reference_mode(cm);
2518 
2519 
2520 #endif
2521 
2522   av1_setup_skip_mode_allowed(cm);
2523 
2524   /*
2525     the point that ucode send send_bufmgr_info
2526     and wait bufmgr code to return is_skip_mode_allowed
2527   */
2528 
2529   /*
2530   read_uncompressed_header() end
2531   */
2532 
2533   av1_setup_motion_field(cm);
2534 #ifdef AML
2535   cm->cur_frame->mi_cols = cm->mi_cols;
2536   cm->cur_frame->mi_rows = cm->mi_rows;
2537   cm->cur_frame->dec_width = cm->dec_width;
2538 
2539   /*
2540   superres_post_decode(AV1Decoder *pbi) =>
2541     av1_superres_upscale(cm, pool); =>
2542       aom_realloc_frame_buffer(
2543             frame_to_show, cm->superres_upscaled_width,
2544             cm->superres_upscaled_height, seq_params->subsampling_x,
2545             seq_params->subsampling_y, seq_params->use_highbitdepth,
2546             AOM_BORDER_IN_PIXELS, cm->byte_alignment, fb, cb, cb_priv)
2547   */
2548   aom_realloc_frame_buffer(cm, &cm->cur_frame->buf,
2549     cm->superres_upscaled_width, cm->superres_upscaled_height,
2550     cm->cur_frame->order_hint);
2551 #endif
2552   return 0;
2553 }
2554 
are_seq_headers_consistent(const SequenceHeader * seq_params_old,const SequenceHeader * seq_params_new)2555 static int are_seq_headers_consistent(const SequenceHeader *seq_params_old,
2556                                       const SequenceHeader *seq_params_new) {
2557   return !memcmp(seq_params_old, seq_params_new, sizeof(SequenceHeader));
2558 }
2559 
aom_get_num_layers_from_operating_point_idc(int operating_point_idc,unsigned int * number_spatial_layers,unsigned int * number_temporal_layers)2560 aom_codec_err_t aom_get_num_layers_from_operating_point_idc(
2561     int operating_point_idc, unsigned int *number_spatial_layers,
2562     unsigned int *number_temporal_layers) {
2563   // derive number of spatial/temporal layers from operating_point_idc
2564 
2565   if (!number_spatial_layers || !number_temporal_layers)
2566     return AOM_CODEC_INVALID_PARAM;
2567 
2568   if (operating_point_idc == 0) {
2569     *number_temporal_layers = 1;
2570     *number_spatial_layers = 1;
2571   } else {
2572     int j;
2573     *number_spatial_layers = 0;
2574     *number_temporal_layers = 0;
2575     for (j = 0; j < MAX_NUM_SPATIAL_LAYERS; j++) {
2576       *number_spatial_layers +=
2577           (operating_point_idc >> (j + MAX_NUM_TEMPORAL_LAYERS)) & 0x1;
2578     }
2579     for (j = 0; j < MAX_NUM_TEMPORAL_LAYERS; j++) {
2580       *number_temporal_layers += (operating_point_idc >> j) & 0x1;
2581     }
2582   }
2583 
2584   return AOM_CODEC_OK;
2585 }
2586 
av1_read_sequence_header(AV1_COMMON * cm,union param_u * params,SequenceHeader * seq_params)2587 void av1_read_sequence_header(AV1_COMMON *cm, union param_u *params,
2588                               SequenceHeader *seq_params) {
2589 #ifdef ORI_CODE
2590   const int num_bits_width = aom_rb_read_literal(-1, "<num_bits_width>", rb, 4) + 1;
2591   const int num_bits_height = aom_rb_read_literal(-1, "<num_bits_height>", rb, 4) + 1;
2592   const int max_frame_width = aom_rb_read_literal(-1, "<max_frame_width>", rb, num_bits_width) + 1;
2593   const int max_frame_height = aom_rb_read_literal(-1, "<max_frame_height>", rb, num_bits_height) + 1;
2594 
2595   seq_params->num_bits_width = num_bits_width;
2596   seq_params->num_bits_height = num_bits_height;
2597 #endif
2598   seq_params->max_frame_width = params->p.max_frame_width; //max_frame_width;
2599   seq_params->max_frame_height = params->p.max_frame_height; //max_frame_height;
2600 
2601   if (seq_params->reduced_still_picture_hdr) {
2602     seq_params->frame_id_numbers_present_flag = 0;
2603   } else {
2604     seq_params->frame_id_numbers_present_flag = params->p.frame_id_numbers_present_flag; //aom_rb_read_bit(-1, "<frame_id_numbers_present_flag>", rb);
2605   }
2606   if (seq_params->frame_id_numbers_present_flag) {
2607     // We must always have delta_frame_id_length < frame_id_length,
2608     // in order for a frame to be referenced with a unique delta.
2609     // Avoid wasting bits by using a coding that enforces this restriction.
2610 #ifdef ORI_CODE
2611     seq_params->delta_frame_id_length = aom_rb_read_literal(-1, "<delta_frame_id_length>", rb, 4) + 2;
2612     seq_params->frame_id_length = params->p.frame_id_length  + aom_rb_read_literal(-1, "<frame_id_length>", rb, 3) + seq_params->delta_frame_id_length + 1;
2613 #else
2614     seq_params->delta_frame_id_length = params->p.delta_frame_id_length;
2615     seq_params->frame_id_length = params->p.frame_id_length  + seq_params->delta_frame_id_length + 1;
2616 #endif
2617     if (seq_params->frame_id_length > 16)
2618       aom_internal_error(&cm->error, AOM_CODEC_CORRUPT_FRAME,
2619                          "Invalid frame_id_length");
2620   }
2621 #ifdef ORI_CODE
2622   setup_sb_size(seq_params, rb);
2623   seq_params->enable_filter_intra = aom_rb_read_bit(-1, "<enable_filter_intra>", rb);
2624   seq_params->enable_intra_edge_filter = aom_rb_read_bit(-1, "<enable_intra_edge_filter>", rb);
2625 #endif
2626 
2627   if (seq_params->reduced_still_picture_hdr) {
2628     seq_params->enable_interintra_compound = 0;
2629     seq_params->enable_masked_compound = 0;
2630     seq_params->enable_warped_motion = 0;
2631     seq_params->enable_dual_filter = 0;
2632     seq_params->order_hint_info.enable_order_hint = 0;
2633     seq_params->order_hint_info.enable_dist_wtd_comp = 0;
2634     seq_params->order_hint_info.enable_ref_frame_mvs = 0;
2635     seq_params->force_screen_content_tools = 2;  // SELECT_SCREEN_CONTENT_TOOLS
2636     seq_params->force_integer_mv = 2;            // SELECT_INTEGER_MV
2637     seq_params->order_hint_info.order_hint_bits_minus_1 = -1;
2638   } else {
2639 #ifdef ORI_CODE
2640     seq_params->enable_interintra_compound = aom_rb_read_bit(-1, "<enable_interintra_compound>", rb);
2641     seq_params->enable_masked_compound = aom_rb_read_bit(-1, "<enable_masked_compound>", rb);
2642     seq_params->enable_warped_motion = aom_rb_read_bit(-1, "<enable_warped_motion>", rb);
2643     seq_params->enable_dual_filter = aom_rb_read_bit(-1, "<enable_dual_filter>", rb);
2644 #endif
2645     seq_params->order_hint_info.enable_order_hint = params->p.enable_order_hint; //aom_rb_read_bit(-1, "<order_hint_info.enable_order_hint>", rb);
2646     seq_params->order_hint_info.enable_dist_wtd_comp =
2647         seq_params->order_hint_info.enable_order_hint ? params->p.enable_dist_wtd_comp : 0; //aom_rb_read_bit(-1, "<order_hint_info.enable_dist_wtd_comp>", rb) : 0;
2648     seq_params->order_hint_info.enable_ref_frame_mvs =
2649         seq_params->order_hint_info.enable_order_hint ? params->p.enable_ref_frame_mvs : 0; //aom_rb_read_bit(-1, "<order_hint_info.enable_ref_frame_mvs>", rb) : 0;
2650 
2651 #ifdef ORI_CODE
2652     if (aom_rb_read_bit(-1, defmark, rb)) {
2653       seq_params->force_screen_content_tools =
2654           2;  // SELECT_SCREEN_CONTENT_TOOLS
2655     } else {
2656       seq_params->force_screen_content_tools = aom_rb_read_bit(-1, defmark, rb);
2657     }
2658 
2659     if (seq_params->force_screen_content_tools > 0) {
2660       if (aom_rb_read_bit(-1, defmark, rb)) {
2661         seq_params->force_integer_mv = 2;  // SELECT_INTEGER_MV
2662       } else {
2663         seq_params->force_integer_mv = aom_rb_read_bit(-1, defmark, rb);
2664       }
2665     } else {
2666       seq_params->force_integer_mv = 2;  // SELECT_INTEGER_MV
2667     }
2668 #endif
2669     seq_params->order_hint_info.order_hint_bits_minus_1 =
2670         seq_params->order_hint_info.enable_order_hint
2671             ? params->p.order_hint_bits_minus_1 /*aom_rb_read_literal(-1, "<order_hint_info.order_hint_bits_minus_1>", rb, 3)*/
2672             : -1;
2673   }
2674   seq_params->enable_superres = params->p.enable_superres; //aom_rb_read_bit(-1, defmark, rb);
2675 
2676 #ifdef ORI_CODE
2677   seq_params->enable_cdef = aom_rb_read_bit(-1, defmark, rb);
2678   seq_params->enable_restoration = aom_rb_read_bit(-1, defmark, rb);
2679 #endif
2680 }
2681 
2682 #ifdef ORI_CODE
av1_read_op_parameters_info(AV1_COMMON * const cm,struct aom_read_bit_buffer * rb,int op_num)2683 void av1_read_op_parameters_info(AV1_COMMON *const cm,
2684                                  struct aom_read_bit_buffer *rb, int op_num) {
2685   // The cm->op_params array has MAX_NUM_OPERATING_POINTS + 1 elements.
2686   if (op_num > MAX_NUM_OPERATING_POINTS) {
2687     aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2688                        "AV1 does not support %d decoder model operating points",
2689                        op_num + 1);
2690   }
2691 
2692   cm->op_params[op_num].decoder_buffer_delay = aom_rb_read_unsigned_literal(-1, defmark,
2693       rb, cm->buffer_model.encoder_decoder_buffer_delay_length);
2694 
2695   cm->op_params[op_num].encoder_buffer_delay = aom_rb_read_unsigned_literal(-1, defmark,
2696       rb, cm->buffer_model.encoder_decoder_buffer_delay_length);
2697 
2698   cm->op_params[op_num].low_delay_mode_flag = aom_rb_read_bit(-1, defmark, rb);
2699 }
2700 #endif
2701 
is_valid_seq_level_idx(AV1_LEVEL seq_level_idx)2702 static int is_valid_seq_level_idx(AV1_LEVEL seq_level_idx) {
2703 	return seq_level_idx < SEQ_LEVELS || seq_level_idx == SEQ_LEVEL_MAX;
2704 }
2705 
read_sequence_header_obu(AV1Decoder * pbi,union param_u * params)2706 static uint32_t read_sequence_header_obu(AV1Decoder *pbi,
2707                                          union param_u *params) {
2708   AV1_COMMON *const cm = &pbi->common;
2709   int i;
2710   int operating_point;
2711   // Verify rb has been configured to report errors.
2712   //assert(rb->error_handler);
2713 
2714   // Use a local variable to store the information as we decode. At the end,
2715   // if no errors have occurred, cm->seq_params is updated.
2716   SequenceHeader sh = cm->seq_params;
2717   SequenceHeader *const seq_params = &sh;
2718 
2719   seq_params->profile = params->p.profile; //av1_read_profile(rb);
2720   if (seq_params->profile > CONFIG_MAX_DECODE_PROFILE) {
2721     cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2722     return 0;
2723   }
2724 
2725   // Still picture or not
2726   seq_params->still_picture = params->p.still_picture; //aom_rb_read_bit(-1, "<still_picture>", rb);
2727   seq_params->reduced_still_picture_hdr = params->p.reduced_still_picture_hdr; //aom_rb_read_bit(-1, "<reduced_still_picture_hdr>", rb);
2728   // Video must have reduced_still_picture_hdr = 0
2729   if (!seq_params->still_picture && seq_params->reduced_still_picture_hdr) {
2730     cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2731     return 0;
2732   }
2733 
2734   if (seq_params->reduced_still_picture_hdr) {
2735     cm->timing_info_present = 0;
2736     seq_params->decoder_model_info_present_flag = 0;
2737     seq_params->display_model_info_present_flag = 0;
2738     seq_params->operating_points_cnt_minus_1 = 0;
2739     seq_params->operating_point_idc[0] = 0;
2740     //if (!read_bitstream_level(0, "<seq_level_idx>", &seq_params->seq_level_idx[0], rb)) {
2741     if (!is_valid_seq_level_idx(params->p.seq_level_idx[0])) {
2742       cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2743       return 0;
2744     }
2745     seq_params->tier[0] = 0;
2746     cm->op_params[0].decoder_model_param_present_flag = 0;
2747     cm->op_params[0].display_model_param_present_flag = 0;
2748   } else {
2749     cm->timing_info_present = params->p.timing_info_present; //aom_rb_read_bit(-1, "<timing_info_present>", rb);  // timing_info_present_flag
2750     if (cm->timing_info_present) {
2751 #ifdef ORI_CODE
2752       av1_read_timing_info_header(cm, rb);
2753 #endif
2754       seq_params->decoder_model_info_present_flag = params->p.decoder_model_info_present_flag; //aom_rb_read_bit(-1, "<decoder_model_info_present_flag>", rb);
2755 #ifdef ORI_CODE
2756       if (seq_params->decoder_model_info_present_flag)
2757         av1_read_decoder_model_info(cm, rb);
2758 #endif
2759     } else {
2760       seq_params->decoder_model_info_present_flag = 0;
2761     }
2762 #ifdef ORI_CODE
2763     seq_params->display_model_info_present_flag = aom_rb_read_bit(-1, "<display_model_info_present_flag>", rb);
2764 #endif
2765     seq_params->operating_points_cnt_minus_1 = params->p.operating_points_cnt_minus_1;
2766         //aom_rb_read_literal(-1, "<operating_points_cnt_minus_1>", rb, OP_POINTS_CNT_MINUS_1_BITS);
2767     for (i = 0; i < seq_params->operating_points_cnt_minus_1 + 1; i++) {
2768       seq_params->operating_point_idc[i] = params->p.operating_point_idc[i];
2769           //aom_rb_read_literal(i, "<operating_point_idc>", rb, OP_POINTS_IDC_BITS);
2770       //if (!read_bitstream_level(i, "<seq_level_idx>", &seq_params->seq_level_idx[i], rb)) {
2771       if (!is_valid_seq_level_idx(params->p.seq_level_idx[i])) {
2772         cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2773         return 0;
2774       }
2775       // This is the seq_level_idx[i] > 7 check in the spec. seq_level_idx 7
2776       // is equivalent to level 3.3.
2777 #ifdef ORI_CODE
2778       if (seq_params->seq_level_idx[i] >= SEQ_LEVEL_4_0)
2779         seq_params->tier[i] = aom_rb_read_bit(i, "<tier>", rb);
2780       else
2781         seq_params->tier[i] = 0;
2782 #endif
2783       if (seq_params->decoder_model_info_present_flag) {
2784         cm->op_params[i].decoder_model_param_present_flag = params->p.decoder_model_param_present_flag[i]; //aom_rb_read_bit(-1, defmark, rb);
2785 #ifdef ORI_CODE
2786         if (cm->op_params[i].decoder_model_param_present_flag)
2787           av1_read_op_parameters_info(cm, rb, i);
2788 #endif
2789       } else {
2790         cm->op_params[i].decoder_model_param_present_flag = 0;
2791       }
2792 #ifdef ORI_CODE
2793       if (cm->timing_info_present &&
2794           (cm->timing_info.equal_picture_interval ||
2795            cm->op_params[i].decoder_model_param_present_flag)) {
2796         cm->op_params[i].bitrate = av1_max_level_bitrate(
2797             seq_params->profile, seq_params->seq_level_idx[i],
2798             seq_params->tier[i]);
2799         // Level with seq_level_idx = 31 returns a high "dummy" bitrate to pass
2800         // the check
2801         if (cm->op_params[i].bitrate == 0)
2802           aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2803                              "AV1 does not support this combination of "
2804                              "profile, level, and tier.");
2805         // Buffer size in bits/s is bitrate in bits/s * 1 s
2806         cm->op_params[i].buffer_size = cm->op_params[i].bitrate;
2807       }
2808 #endif
2809       if (cm->timing_info_present && cm->timing_info.equal_picture_interval &&
2810           !cm->op_params[i].decoder_model_param_present_flag) {
2811         // When the decoder_model_parameters are not sent for this op, set
2812         // the default ones that can be used with the resource availability mode
2813         cm->op_params[i].decoder_buffer_delay = 70000;
2814         cm->op_params[i].encoder_buffer_delay = 20000;
2815         cm->op_params[i].low_delay_mode_flag = 0;
2816       }
2817 
2818 #ifdef ORI_CODE
2819       if (seq_params->display_model_info_present_flag) {
2820         cm->op_params[i].display_model_param_present_flag = aom_rb_read_bit(-1, defmark, rb);
2821         if (cm->op_params[i].display_model_param_present_flag) {
2822           cm->op_params[i].initial_display_delay =
2823               aom_rb_read_literal(-1, defmark, rb, 4) + 1;
2824           if (cm->op_params[i].initial_display_delay > 10)
2825             aom_internal_error(
2826                 &cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2827                 "AV1 does not support more than 10 decoded frames delay");
2828         } else {
2829           cm->op_params[i].initial_display_delay = 10;
2830         }
2831       } else {
2832         cm->op_params[i].display_model_param_present_flag = 0;
2833         cm->op_params[i].initial_display_delay = 10;
2834       }
2835 #endif
2836     }
2837   }
2838   // This decoder supports all levels.  Choose operating point provided by
2839   // external means
2840   operating_point = pbi->operating_point;
2841   if (operating_point < 0 ||
2842       operating_point > seq_params->operating_points_cnt_minus_1)
2843     operating_point = 0;
2844   pbi->current_operating_point =
2845       seq_params->operating_point_idc[operating_point];
2846   if (aom_get_num_layers_from_operating_point_idc(
2847           pbi->current_operating_point, &cm->number_spatial_layers,
2848           &cm->number_temporal_layers) != AOM_CODEC_OK) {
2849     cm->error.error_code = AOM_CODEC_ERROR;
2850     return 0;
2851   }
2852 
2853   av1_read_sequence_header(cm, params, seq_params);
2854 #ifdef ORI_CODE
2855   av1_read_color_config(rb, pbi->allow_lowbitdepth, seq_params, &cm->error);
2856   if (!(seq_params->subsampling_x == 0 && seq_params->subsampling_y == 0) &&
2857       !(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 1) &&
2858       !(seq_params->subsampling_x == 1 && seq_params->subsampling_y == 0)) {
2859     aom_internal_error(&cm->error, AOM_CODEC_UNSUP_BITSTREAM,
2860                        "Only 4:4:4, 4:2:2 and 4:2:0 are currently supported, "
2861                        "%d %d subsampling is not supported.\n",
2862                        seq_params->subsampling_x, seq_params->subsampling_y);
2863   }
2864   seq_params->film_grain_params_present = aom_rb_read_bit(-1, "<film_grain_params_present>", rb);
2865 
2866   if (av1_check_trailing_bits(pbi, rb) != 0) {
2867     // cm->error.error_code is already set.
2868     return 0;
2869   }
2870 #endif
2871 
2872   // If a sequence header has been decoded before, we check if the new
2873   // one is consistent with the old one.
2874   if (pbi->sequence_header_ready) {
2875     if (!are_seq_headers_consistent(&cm->seq_params, seq_params))
2876       pbi->sequence_header_changed = 1;
2877   }
2878 
2879   cm->seq_params = *seq_params;
2880   pbi->sequence_header_ready = 1;
2881   return 0;
2882 
2883 }
2884 
aom_decode_frame_from_obus(AV1Decoder * pbi,union param_u * params,int obu_type)2885 int aom_decode_frame_from_obus(AV1Decoder *pbi, union param_u *params, int obu_type)
2886 {
2887   AV1_COMMON *const cm = &pbi->common;
2888   ObuHeader obu_header;
2889  int frame_decoding_finished = 0;
2890   uint32_t frame_header_size = 0;
2891 
2892     //struct aom_read_bit_buffer rb;
2893     size_t payload_size = 0;
2894     size_t decoded_payload_size = 0;
2895     size_t obu_payload_offset = 0;
2896     //size_t bytes_read = 0;
2897 
2898   memset(&obu_header, 0, sizeof(obu_header));
2899 #ifdef ORI_CODE
2900   pbi->seen_frame_header = 0;
2901 #else
2902   /* set in the test.c*/
2903 #endif
2904 
2905   obu_header.type = obu_type;
2906   pbi->cur_obu_type = obu_header.type;
2907   if (av1_is_debug(AOM_DEBUG_PRINT_LIST_INFO))
2908     dump_params(pbi, params);
2909   switch (obu_header.type) {
2910     case OBU_SEQUENCE_HEADER:
2911         decoded_payload_size = read_sequence_header_obu(pbi, params);
2912         if (cm->error.error_code != AOM_CODEC_OK) return -1;
2913         break;
2914 
2915     case OBU_FRAME_HEADER:
2916     case OBU_REDUNDANT_FRAME_HEADER:
2917     case OBU_FRAME:
2918         if (obu_header.type == OBU_REDUNDANT_FRAME_HEADER) {
2919           if (!pbi->seen_frame_header) {
2920             cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
2921             return -1;
2922           }
2923         } else {
2924           // OBU_FRAME_HEADER or OBU_FRAME.
2925           if (pbi->seen_frame_header) {
2926             cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
2927             return -1;
2928           }
2929         }
2930         // Only decode first frame header received
2931         if (!pbi->seen_frame_header ||
2932             (cm->large_scale_tile && !pbi->camera_frame_header_ready)) {
2933           frame_header_size = av1_decode_frame_headers_and_setup(
2934               pbi, /*&rb, data, p_data_end,*/obu_header.type != OBU_FRAME, params);
2935           pbi->seen_frame_header = 1;
2936           if (!pbi->ext_tile_debug && cm->large_scale_tile)
2937             pbi->camera_frame_header_ready = 1;
2938         } else {
2939           // TODO(wtc): Verify that the frame_header_obu is identical to the
2940           // original frame_header_obu. For now just skip frame_header_size
2941           // bytes in the bit buffer.
2942           if (frame_header_size > payload_size) {
2943             cm->error.error_code = AOM_CODEC_CORRUPT_FRAME;
2944             return -1;
2945           }
2946           assert(rb.bit_offset == 0);
2947 #ifdef ORI_CODE
2948           rb.bit_offset = 8 * frame_header_size;
2949 #endif
2950         }
2951 
2952         decoded_payload_size = frame_header_size;
2953         pbi->frame_header_size = frame_header_size;
2954 
2955         if (cm->show_existing_frame) {
2956           if (obu_header.type == OBU_FRAME) {
2957             cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
2958             return -1;
2959           }
2960           frame_decoding_finished = 1;
2961           pbi->seen_frame_header = 0;
2962           break;
2963         }
2964 
2965         // In large scale tile coding, decode the common camera frame header
2966         // before any tile list OBU.
2967         if (!pbi->ext_tile_debug && pbi->camera_frame_header_ready) {
2968           frame_decoding_finished = 1;
2969           // Skip the rest of the frame data.
2970           decoded_payload_size = payload_size;
2971           // Update data_end.
2972 #ifdef ORI_CODE
2973           *p_data_end = data_end;
2974 #endif
2975           break;
2976         }
2977 #if 0 //def AML
2978         frame_decoding_finished = 1;
2979 #endif
2980         if (obu_header.type != OBU_FRAME) break;
2981         obu_payload_offset = frame_header_size;
2982         // Byte align the reader before reading the tile group.
2983         // byte_alignment() has set cm->error.error_code if it returns -1.
2984 #ifdef ORI_CODE
2985         if (byte_alignment(cm, &rb)) return -1;
2986         AOM_FALLTHROUGH_INTENDED;  // fall through to read tile group.
2987 #endif
2988   default:
2989     break;
2990       }
2991   return frame_decoding_finished;
2992 }
2993 
get_buffer_index(AV1Decoder * pbi,RefCntBuffer * buffer)2994 int get_buffer_index(AV1Decoder *pbi, RefCntBuffer *buffer)
2995 {
2996 	AV1_COMMON *const cm = &pbi->common;
2997 	int i = -1;
2998 
2999 	if (buffer) {
3000 		for (i = 0; i < FRAME_BUFFERS; i++) {
3001 			RefCntBuffer *buf =
3002 				&cm->buffer_pool->frame_bufs[i];
3003 			if (buf == buffer) {
3004 				break;
3005 			}
3006 		}
3007 	}
3008 	return i;
3009 }
3010 
dump_buffer(RefCntBuffer * buf)3011 void dump_buffer(RefCntBuffer *buf)
3012 {
3013 	int i;
3014 	pr_info("ref_count %d, vf_ref %d, order_hint %d, w/h(%d,%d) showable_frame %d frame_type %d canvas(%d,%d) w/h(%d,%d) mi_c/r(%d,%d) header 0x%x ref_deltas(",
3015 	buf->ref_count, buf->buf.vf_ref, buf->order_hint, buf->width, buf->height, buf->showable_frame, buf->frame_type,
3016 	buf->buf.mc_canvas_y, buf->buf.mc_canvas_u_v,
3017 	buf->buf.y_crop_width, buf->buf.y_crop_height,
3018 	buf->mi_cols, buf->mi_rows,
3019 	buf->buf.header_adr);
3020 	for (i = 0; i < REF_FRAMES; i++)
3021 		pr_info("%d,", buf->ref_deltas[i]);
3022 	pr_info("), ref_order_hints(");
3023 
3024 	for (i = 0; i < INTER_REFS_PER_FRAME; i++)
3025 		pr_info("%d ", buf->ref_order_hints[i]);
3026 	pr_info(")");
3027 }
3028 
dump_ref_buffer_info(AV1Decoder * pbi,int i)3029 void dump_ref_buffer_info(AV1Decoder *pbi, int i)
3030 {
3031 	AV1_COMMON *const cm = &pbi->common;
3032 	pr_info("remapped_ref_idx %d, ref_frame_sign_bias %d, ref_frame_id %d, valid_for_referencing %d ref_frame_side %d ref_frame_map idx %d, next_ref_frame_map idx %d",
3033 		cm->remapped_ref_idx[i],
3034 		cm->ref_frame_sign_bias[i],
3035 		cm->ref_frame_id[i],
3036 		cm->valid_for_referencing[i],
3037 		cm->ref_frame_side[i],
3038 		get_buffer_index(pbi, cm->ref_frame_map[i]),
3039 		get_buffer_index(pbi, cm->next_ref_frame_map[i]));
3040 }
3041 
dump_mv_refs(AV1Decoder * pbi)3042 void dump_mv_refs(AV1Decoder *pbi)
3043 {
3044   int i, j;
3045   AV1_COMMON *const cm = &pbi->common;
3046   for (i = 0; i < cm->mv_ref_id_index; i++) {
3047     pr_info("%d: ref_id %d cal_tpl_mvs %d mv_ref_offset: ",
3048       i, cm->mv_ref_id[i], cm->mv_cal_tpl_mvs[i]);
3049     for (j = 0; j < REF_FRAMES; j++)
3050         pr_info("%d ", cm->mv_ref_offset[i][j]);
3051     pr_info("\n");
3052   }
3053 }
3054 
dump_ref_spec_bufs(AV1Decoder * pbi)3055 void dump_ref_spec_bufs(AV1Decoder *pbi)
3056 {
3057   int i;
3058   AV1_COMMON *const cm = &pbi->common;
3059   for (i = 0; i < INTER_REFS_PER_FRAME; ++i) {
3060     PIC_BUFFER_CONFIG *pic_config = av1_get_ref_frame_spec_buf(cm, LAST_FRAME + i);
3061     if (pic_config == NULL) continue;
3062     pr_info("%d: index %d order_hint %d header 0x%x dw_header 0x%x canvas(%d,%d) mv_wr_start 0x%x lcu_total %d\n",
3063       i, pic_config->index,
3064       pic_config->order_hint,
3065       pic_config->header_adr,
3066 #ifdef AOM_AV1_MMU_DW
3067       pic_config->header_dw_adr,
3068 #else
3069       0,
3070 #endif
3071       pic_config->mc_canvas_y,
3072       pic_config->mc_canvas_u_v,
3073       pic_config->mpred_mv_wr_start_addr,
3074       pic_config->lcu_total
3075       );
3076   }
3077 }
3078 
3079 #ifdef SUPPORT_SCALE_FACTOR
dump_scale_factors(AV1Decoder * pbi)3080 void dump_scale_factors(AV1Decoder *pbi)
3081 {
3082   int i;
3083   AV1_COMMON *const cm = &pbi->common;
3084   for (i = LAST_FRAME; i <= ALTREF_FRAME; ++i) {
3085     struct scale_factors *const sf =
3086         get_ref_scale_factors(cm, i);
3087     if (sf)
3088       pr_info("%d: is_scaled %d x_scale_fp %d, y_scale_fp %d\n",
3089         i, av1_is_scaled(sf),
3090         sf->x_scale_fp, sf->y_scale_fp);
3091     else
3092       pr_info("%d: sf null\n", i);
3093   }
3094 }
3095 
3096 #endif
3097 
dump_buffer_status(AV1Decoder * pbi)3098 void dump_buffer_status(AV1Decoder *pbi)
3099 {
3100 	int i;
3101 	AV1_COMMON *const cm = &pbi->common;
3102 	BufferPool *const pool = cm->buffer_pool;
3103 	unsigned long flags;
3104 
3105 	lock_buffer_pool(pool, flags);
3106 
3107 	pr_info("%s: pbi %p cm %p cur_frame %p\n", __func__, pbi, cm, cm->cur_frame);
3108 
3109 	pr_info("Buffer Pool:\n");
3110 	for (i = 0; i < FRAME_BUFFERS; i++) {
3111 		RefCntBuffer *buf =
3112 			&cm->buffer_pool->frame_bufs[i];
3113 		pr_info("%d: ", i);
3114 		if (buf)
3115 			dump_buffer(buf);
3116 		pr_info("\n");
3117 	}
3118 
3119 	if (cm->prev_frame) {
3120 		pr_info("prev_frame (%d): ",
3121 			get_buffer_index(pbi, cm->prev_frame));
3122 		dump_buffer(cm->prev_frame);
3123 		pr_info("\n");
3124 	}
3125 	if (cm->cur_frame) {
3126 		pr_info("cur_frame (%d): ",
3127 			get_buffer_index(pbi, cm->cur_frame));
3128 		dump_buffer(cm->cur_frame);
3129 		pr_info("\n");
3130 	}
3131 	pr_info("REF_FRAMES Info(ref buf is ref_frame_map[remapped_ref_idx[i-1]], i=1~7):\n");
3132 	for (i = 0; i < REF_FRAMES; i++) {
3133 		pr_info("%d: ", i);
3134 		dump_ref_buffer_info(pbi, i);
3135 		pr_info("\n");
3136 	}
3137 	pr_info("Ref Spec Buffers:\n");
3138 	dump_ref_spec_bufs(pbi);
3139 
3140 	pr_info("MV refs:\n");
3141 	dump_mv_refs(pbi);
3142 
3143 #ifdef SUPPORT_SCALE_FACTOR
3144 	pr_info("Scale factors:\n");
3145 	dump_scale_factors(pbi);
3146 #endif
3147 	unlock_buffer_pool(pool, flags);
3148 }
3149 
3150 
3151 struct param_dump_item_s {
3152 	unsigned int size;
3153 	char* name;
3154 	unsigned int adr_off;
3155 } param_dump_items[] = {
3156 	{1, "profile",                                   (unsigned long)&(((union param_u *)0)->p.profile                        )},
3157 	{1, "still_picture",                             (unsigned long)&(((union param_u *)0)->p.still_picture                  )},
3158 	{1, "reduced_still_picture_hdr",                 (unsigned long)&(((union param_u *)0)->p.reduced_still_picture_hdr      )},
3159 	{1, "decoder_model_info_present_flag",           (unsigned long)&(((union param_u *)0)->p.decoder_model_info_present_flag)},
3160 	{1, "max_frame_width",                           (unsigned long)&(((union param_u *)0)->p.max_frame_width                )},
3161 	{1, "max_frame_height",                          (unsigned long)&(((union param_u *)0)->p.max_frame_height               )},
3162 	{1, "frame_id_numbers_present_flag",             (unsigned long)&(((union param_u *)0)->p.frame_id_numbers_present_flag  )},
3163 	{1, "delta_frame_id_length",                     (unsigned long)&(((union param_u *)0)->p.delta_frame_id_length          )},
3164 	{1, "frame_id_length",                           (unsigned long)&(((union param_u *)0)->p.frame_id_length                )},
3165 	{1, "order_hint_bits_minus_1",                   (unsigned long)&(((union param_u *)0)->p.order_hint_bits_minus_1        )},
3166 	{1, "enable_order_hint",                         (unsigned long)&(((union param_u *)0)->p.enable_order_hint              )},
3167 	{1, "enable_dist_wtd_comp",                      (unsigned long)&(((union param_u *)0)->p.enable_dist_wtd_comp           )},
3168 	{1, "enable_ref_frame_mvs",                      (unsigned long)&(((union param_u *)0)->p.enable_ref_frame_mvs           )},
3169 	{1, "enable_superres",                           (unsigned long)&(((union param_u *)0)->p.enable_superres                )},
3170 	{1, "superres_scale_denominator",                (unsigned long)&(((union param_u *)0)->p.superres_scale_denominator     )},
3171 	{1, "show_existing_frame",                       (unsigned long)&(((union param_u *)0)->p.show_existing_frame            )},
3172 	{1, "frame_type",                                (unsigned long)&(((union param_u *)0)->p.frame_type                     )},
3173 	{1, "show_frame",                                (unsigned long)&(((union param_u *)0)->p.show_frame                     )},
3174 	{1, "e.r.r.o.r_resilient_mode",                  (unsigned long)&(((union param_u *)0)->p.error_resilient_mode           )},
3175 	{1, "refresh_frame_flags",                       (unsigned long)&(((union param_u *)0)->p.refresh_frame_flags            )},
3176 	{1, "showable_frame",                            (unsigned long)&(((union param_u *)0)->p.showable_frame                 )},
3177 	{1, "current_frame_id",                          (unsigned long)&(((union param_u *)0)->p.current_frame_id               )},
3178 	{1, "frame_size_override_flag",                  (unsigned long)&(((union param_u *)0)->p.frame_size_override_flag       )},
3179 	{1, "order_hint",                                (unsigned long)&(((union param_u *)0)->p.order_hint                     )},
3180 	{1, "primary_ref_frame",                         (unsigned long)&(((union param_u *)0)->p.primary_ref_frame              )},
3181 	{1, "frame_refs_short_signaling",                (unsigned long)&(((union param_u *)0)->p.frame_refs_short_signaling     )},
3182 	{1, "frame_width",                               (unsigned long)&(((union param_u *)0)->p.frame_width                    )},
3183 	{1, "dec_frame_width",                           (unsigned long)&(((union param_u *)0)->p.dec_frame_width                )},
3184 	{1, "frame_width_scaled",                        (unsigned long)&(((union param_u *)0)->p.frame_width_scaled             )},
3185 	{1, "frame_height",                              (unsigned long)&(((union param_u *)0)->p.frame_height                   )},
3186 	{1, "reference_mode",                            (unsigned long)&(((union param_u *)0)->p.reference_mode                 )},
3187 	{1, "update_parameters",                         (unsigned long)&(((union param_u *)0)->p.update_parameters              )},
3188 	{1, "film_grain_params_ref_idx",                 (unsigned long)&(((union param_u *)0)->p.film_grain_params_ref_idx      )},
3189 	{1, "allow_ref_frame_mvs",                       (unsigned long)&(((union param_u *)0)->p.allow_ref_frame_mvs            )},
3190 	{1, "lst_ref",                                   (unsigned long)&(((union param_u *)0)->p.lst_ref                        )},
3191 	{1, "gld_ref",                                   (unsigned long)&(((union param_u *)0)->p.gld_ref                        )},
3192 	{INTER_REFS_PER_FRAME, "remapped_ref_idx",       (unsigned long)&(((union param_u *)0)->p.remapped_ref_idx[0]            )},
3193 	{INTER_REFS_PER_FRAME, "delta_frame_id_minus_1", (unsigned long)&(((union param_u *)0)->p.delta_frame_id_minus_1[0]      )},
3194 	{REF_FRAMES, "ref_order_hint",                   (unsigned long)&(((union param_u *)0)->p.ref_order_hint[0]              )},
3195 };
3196 
dump_params(AV1Decoder * pbi,union param_u * params)3197 void dump_params(AV1Decoder *pbi, union param_u *params)
3198 {
3199 	int i, j;
3200 	unsigned char *start_adr = (unsigned char*)params;
3201 
3202 	pr_info("============ params:\n");
3203 	for (i = 0; i < sizeof(param_dump_items) / sizeof(param_dump_items[0]); i++) {
3204 		for (j = 0; j < param_dump_items[i].size; j++) {
3205 			if (param_dump_items[i].size > 1)
3206 				pr_info("%s(%d): 0x%x\n",
3207 				param_dump_items[i].name, j,
3208 				*((unsigned short*)(start_adr + param_dump_items[i].adr_off + j * 2)));
3209 			else
3210 				pr_info("%s: 0x%x\n", param_dump_items[i].name,
3211 				*((unsigned short*)(start_adr + param_dump_items[i].adr_off + j * 2)));
3212 		}
3213 	}
3214 }
3215 
3216 /*static void raw_write_image(AV1Decoder *pbi, PIC_BUFFER_CONFIG *sd)
3217 {
3218   printf("$$$$$$$ output image\n");
3219 }*/
3220 
3221 /*
3222   return 0, need decoding data
3223   1, decoding done
3224   -1, decoding error
3225 
3226 */
av1_bufmgr_process(AV1Decoder * pbi,union param_u * params,unsigned char new_compressed_data,int obu_type)3227 int av1_bufmgr_process(AV1Decoder *pbi, union param_u *params,
3228   unsigned char new_compressed_data, int obu_type)
3229 {
3230   AV1_COMMON *const cm = &pbi->common;
3231   int j;
3232   // Release any pending output frames from the previous decoder_decode call.
3233   // We need to do this even if the decoder is being flushed or the input
3234   // arguments are invalid.
3235   BufferPool *const pool = cm->buffer_pool;
3236   int frame_decoded;
3237   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s: pbi %p cm %p cur_frame %p\n", __func__, pbi, cm, cm->cur_frame);
3238   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s: new_compressed_data= %d\n", __func__, new_compressed_data);
3239   for (j = 0; j < pbi->num_output_frames; j++) {
3240     decrease_ref_count(pbi, pbi->output_frames[j], pool);
3241   }
3242   pbi->num_output_frames = 0;
3243   //
3244   if (new_compressed_data) {
3245     if (assign_cur_frame_new_fb(cm) == NULL) {
3246       cm->error.error_code = AOM_CODEC_MEM_ERROR;
3247       return -1;
3248     }
3249     pbi->seen_frame_header = 0;
3250     av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "New_compressed_data (%d)\n", new_compressed_data_count++);
3251 
3252   }
3253 
3254   frame_decoded =
3255       aom_decode_frame_from_obus(pbi, params, obu_type);
3256 
3257   if (pbi->cur_obu_type == OBU_FRAME_HEADER ||
3258           pbi->cur_obu_type == OBU_REDUNDANT_FRAME_HEADER ||
3259           pbi->cur_obu_type == OBU_FRAME) {
3260       if (av1_is_debug(AOM_DEBUG_PRINT_LIST_INFO)) {
3261         pr_info("after bufmgr (frame_decoded %d seen_frame_header %d): ",
3262           frame_decoded, pbi->seen_frame_header);
3263         dump_buffer_status(pbi);
3264       }
3265   }
3266   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s: pbi %p cm %p cur_frame %p\n", __func__, pbi, cm, cm->cur_frame);
3267 
3268   return frame_decoded;
3269 
3270 }
3271 
av1_get_raw_frame(AV1Decoder * pbi,size_t index,PIC_BUFFER_CONFIG ** sd)3272 int av1_get_raw_frame(AV1Decoder *pbi, size_t index, PIC_BUFFER_CONFIG **sd) {
3273   if (index >= pbi->num_output_frames) return -1;
3274   *sd = &pbi->output_frames[index]->buf;
3275   //*grain_params = &pbi->output_frames[index]->film_grain_params;
3276   //aom_clear_system_state();
3277   return 0;
3278 }
3279 
av1_bufmgr_postproc(AV1Decoder * pbi,unsigned char frame_decoded)3280 int av1_bufmgr_postproc(AV1Decoder *pbi, unsigned char frame_decoded)
3281 {
3282     PIC_BUFFER_CONFIG *sd;
3283     int index;
3284 #if 0
3285     if (frame_decoded) {
3286       printf("before swap_frame_buffers: ");
3287       dump_buffer_status(pbi);
3288     }
3289 #endif
3290     swap_frame_buffers(pbi, frame_decoded);
3291     if (frame_decoded) {
3292       if (av1_is_debug(AOM_DEBUG_PRINT_LIST_INFO)) {
3293         pr_info("after swap_frame_buffers: ");
3294         dump_buffer_status(pbi);
3295       }
3296     }
3297     if (frame_decoded) {
3298       pbi->decoding_first_frame = 0;
3299     }
3300 
3301 
3302     for (index = 0;;index++) {
3303       if (av1_get_raw_frame(pbi, index, &sd) < 0)
3304           break;
3305       av1_raw_write_image(pbi, sd);
3306     }
3307     return 0;
3308 }
3309 
aom_realloc_frame_buffer(AV1_COMMON * cm,PIC_BUFFER_CONFIG * pic,int width,int height,unsigned int order_hint)3310 int aom_realloc_frame_buffer(AV1_COMMON *cm, PIC_BUFFER_CONFIG *pic,
3311   int width, int height, unsigned int order_hint)
3312 {
3313   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s, index 0x%x, width 0x%x, height 0x%x order_hint 0x%x\n",
3314     __func__, pic->index, width, height, order_hint);
3315   pic->y_crop_width = width;
3316   pic->y_crop_height = height;
3317   pic->order_hint = order_hint;
3318   return 0;
3319 }
3320 
3321 
av1_frame_is_inter(const AV1_COMMON * const cm)3322 unsigned char av1_frame_is_inter(const AV1_COMMON *const cm) {
3323   unsigned char is_inter = cm->cur_frame && (cm->cur_frame->frame_type != KEY_FRAME)
3324      && (cm->current_frame.frame_type != INTRA_ONLY_FRAME);
3325   return is_inter;
3326 }
3327 
av1_get_ref_frame_spec_buf(const AV1_COMMON * const cm,const MV_REFERENCE_FRAME ref_frame)3328 PIC_BUFFER_CONFIG *av1_get_ref_frame_spec_buf(
3329     const AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame) {
3330   RefCntBuffer *buf = get_ref_frame_buf(cm, ref_frame);
3331   if (buf) {
3332     buf->buf.order_hint = buf->order_hint;
3333     return &(buf->buf);
3334   }
3335   return NULL;
3336 }
3337 
av1_get_ref_scale_factors(AV1_COMMON * const cm,const MV_REFERENCE_FRAME ref_frame)3338 struct scale_factors *av1_get_ref_scale_factors(
3339   AV1_COMMON *const cm, const MV_REFERENCE_FRAME ref_frame)
3340 {
3341   return get_ref_scale_factors(cm, ref_frame);
3342 }
3343 
av1_set_next_ref_frame_map(AV1Decoder * pbi)3344 void av1_set_next_ref_frame_map(AV1Decoder *pbi) {
3345   int ref_index = 0;
3346   int mask;
3347   AV1_COMMON *const cm = &pbi->common;
3348   int check_on_show_existing_frame;
3349   av1_print2(AV1_DEBUG_BUFMGR_DETAIL, "%s, %d, mask 0x%x, show_existing_frame %d, reset_decoder_state %d\n",
3350     __func__, pbi->camera_frame_header_ready,
3351     cm->current_frame.refresh_frame_flags,
3352     cm->show_existing_frame,
3353     pbi->reset_decoder_state
3354     );
3355   if (!pbi->camera_frame_header_ready) {
3356     for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
3357       cm->next_used_ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
3358       ++ref_index;
3359     }
3360 
3361     check_on_show_existing_frame =
3362         !cm->show_existing_frame || pbi->reset_decoder_state;
3363     for (; ref_index < REF_FRAMES && check_on_show_existing_frame;
3364          ++ref_index) {
3365       cm->next_used_ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
3366     }
3367   }
3368 }
3369 
av1_get_next_used_ref_info(const AV1_COMMON * const cm,int i)3370 unsigned int av1_get_next_used_ref_info(
3371     const AV1_COMMON *const cm, int i) {
3372   /*
3373   i = 0~1 orde_hint map
3374   i = 2~10 size map[i-2]
3375   */
3376   unsigned int info = 0;
3377   int j;
3378   if (i < 2) {
3379     /*next_used_ref_frame_map has 8 items*/
3380     for (j = 0; j < 4; j++) {
3381       RefCntBuffer *buf =
3382         cm->next_used_ref_frame_map[(i * 4) + j];
3383       if (buf)
3384         info |= ((buf->buf.order_hint & 0xff)
3385           << (j * 8));
3386     }
3387   } else if (i < 10) {
3388     RefCntBuffer *buf =
3389       cm->next_used_ref_frame_map[i-2];
3390     if (buf)
3391       info = (buf->buf.y_crop_width << 16) | (buf->buf.y_crop_height & 0xffff);
3392   } else {
3393     for (j = 0; j < 4; j++) {
3394       RefCntBuffer *buf =
3395         cm->next_used_ref_frame_map[((i - 10) * 4) + j];
3396       if (buf)
3397         info |= ((buf->buf.index & 0xff)
3398           << (j * 8));
3399     }
3400   }
3401   return info;
3402 }
3403 
av1_get_primary_ref_frame_buf(const AV1_COMMON * const cm)3404 RefCntBuffer *av1_get_primary_ref_frame_buf(
3405   const AV1_COMMON *const cm)
3406 {
3407   return get_primary_ref_frame_buf(cm);
3408 }
3409