1 /*
2 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12 #include <assert.h>
13 #include <limits.h>
14 #include <stdio.h>
15
16 #include "config/av1_rtcd.h"
17 #include "config/aom_dsp_rtcd.h"
18 #include "config/aom_scale_rtcd.h"
19
20 #include "aom_dsp/aom_dsp_common.h"
21 #include "aom_mem/aom_mem.h"
22 #include "aom_ports/system_state.h"
23 #include "aom_ports/aom_once.h"
24 #include "aom_ports/aom_timer.h"
25 #include "aom_scale/aom_scale.h"
26 #include "aom_util/aom_thread.h"
27
28 #include "av1/common/alloccommon.h"
29 #include "av1/common/av1_loopfilter.h"
30 #include "av1/common/onyxc_int.h"
31 #include "av1/common/quant_common.h"
32 #include "av1/common/reconinter.h"
33 #include "av1/common/reconintra.h"
34
35 #include "av1/decoder/decodeframe.h"
36 #include "av1/decoder/decoder.h"
37 #include "av1/decoder/detokenize.h"
38 #include "av1/decoder/obu.h"
39
initialize_dec(void)40 static void initialize_dec(void) {
41 av1_rtcd();
42 aom_dsp_rtcd();
43 aom_scale_rtcd();
44 av1_init_intra_predictors();
45 av1_init_wedge_masks();
46 }
47
dec_setup_mi(AV1_COMMON * cm)48 static void dec_setup_mi(AV1_COMMON *cm) {
49 cm->mi = cm->mip;
50 cm->mi_grid_visible = cm->mi_grid_base;
51 memset(cm->mi_grid_base, 0,
52 cm->mi_stride * cm->mi_rows * sizeof(*cm->mi_grid_base));
53 }
54
av1_dec_alloc_mi(AV1_COMMON * cm,int mi_size)55 static int av1_dec_alloc_mi(AV1_COMMON *cm, int mi_size) {
56 cm->mip = aom_calloc(mi_size, sizeof(*cm->mip));
57 if (!cm->mip) return 1;
58 cm->mi_alloc_size = mi_size;
59 cm->mi_grid_base =
60 (MB_MODE_INFO **)aom_calloc(mi_size, sizeof(MB_MODE_INFO *));
61 if (!cm->mi_grid_base) return 1;
62 return 0;
63 }
64
dec_free_mi(AV1_COMMON * cm)65 static void dec_free_mi(AV1_COMMON *cm) {
66 aom_free(cm->mip);
67 cm->mip = NULL;
68 aom_free(cm->mi_grid_base);
69 cm->mi_grid_base = NULL;
70 cm->mi_alloc_size = 0;
71 }
72
av1_decoder_create(BufferPool * const pool)73 AV1Decoder *av1_decoder_create(BufferPool *const pool) {
74 AV1Decoder *volatile const pbi = aom_memalign(32, sizeof(*pbi));
75 if (!pbi) return NULL;
76 av1_zero(*pbi);
77
78 AV1_COMMON *volatile const cm = &pbi->common;
79
80 // The jmp_buf is valid only for the duration of the function that calls
81 // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
82 // before it returns.
83 if (setjmp(cm->error.jmp)) {
84 cm->error.setjmp = 0;
85 av1_decoder_remove(pbi);
86 return NULL;
87 }
88
89 cm->error.setjmp = 1;
90
91 CHECK_MEM_ERROR(cm, cm->fc,
92 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->fc)));
93 CHECK_MEM_ERROR(
94 cm, cm->default_frame_context,
95 (FRAME_CONTEXT *)aom_memalign(32, sizeof(*cm->default_frame_context)));
96 memset(cm->fc, 0, sizeof(*cm->fc));
97 memset(cm->default_frame_context, 0, sizeof(*cm->default_frame_context));
98
99 pbi->need_resync = 1;
100 aom_once(initialize_dec);
101
102 // Initialize the references to not point to any frame buffers.
103 for (int i = 0; i < REF_FRAMES; i++) {
104 cm->ref_frame_map[i] = NULL;
105 cm->next_ref_frame_map[i] = NULL;
106 }
107
108 cm->current_frame.frame_number = 0;
109 pbi->decoding_first_frame = 1;
110 pbi->common.buffer_pool = pool;
111
112 cm->seq_params.bit_depth = AOM_BITS_8;
113
114 cm->alloc_mi = av1_dec_alloc_mi;
115 cm->free_mi = dec_free_mi;
116 cm->setup_mi = dec_setup_mi;
117
118 av1_loop_filter_init(cm);
119
120 av1_qm_init(cm);
121 av1_loop_restoration_precal();
122 #if CONFIG_ACCOUNTING
123 pbi->acct_enabled = 1;
124 aom_accounting_init(&pbi->accounting);
125 #endif
126
127 cm->error.setjmp = 0;
128
129 aom_get_worker_interface()->init(&pbi->lf_worker);
130 pbi->lf_worker.thread_name = "aom lf worker";
131
132 return pbi;
133 }
134
av1_dealloc_dec_jobs(struct AV1DecTileMTData * tile_mt_info)135 void av1_dealloc_dec_jobs(struct AV1DecTileMTData *tile_mt_info) {
136 if (tile_mt_info != NULL) {
137 #if CONFIG_MULTITHREAD
138 if (tile_mt_info->job_mutex != NULL) {
139 pthread_mutex_destroy(tile_mt_info->job_mutex);
140 aom_free(tile_mt_info->job_mutex);
141 }
142 #endif
143 aom_free(tile_mt_info->job_queue);
144 // clear the structure as the source of this call may be a resize in which
145 // case this call will be followed by an _alloc() which may fail.
146 av1_zero(*tile_mt_info);
147 }
148 }
149
av1_dec_free_cb_buf(AV1Decoder * pbi)150 void av1_dec_free_cb_buf(AV1Decoder *pbi) {
151 aom_free(pbi->cb_buffer_base);
152 pbi->cb_buffer_base = NULL;
153 pbi->cb_buffer_alloc_size = 0;
154 }
155
av1_decoder_remove(AV1Decoder * pbi)156 void av1_decoder_remove(AV1Decoder *pbi) {
157 int i;
158
159 if (!pbi) return;
160
161 // Free the tile list output buffer.
162 aom_free_frame_buffer(&pbi->tile_list_outbuf);
163
164 aom_get_worker_interface()->end(&pbi->lf_worker);
165 aom_free(pbi->lf_worker.data1);
166
167 if (pbi->thread_data) {
168 for (int worker_idx = 0; worker_idx < pbi->max_threads - 1; worker_idx++) {
169 DecWorkerData *const thread_data = pbi->thread_data + worker_idx;
170 av1_free_mc_tmp_buf(thread_data->td);
171 aom_free(thread_data->td);
172 }
173 aom_free(pbi->thread_data);
174 }
175
176 for (i = 0; i < pbi->num_workers; ++i) {
177 AVxWorker *const worker = &pbi->tile_workers[i];
178 aom_get_worker_interface()->end(worker);
179 }
180 #if CONFIG_MULTITHREAD
181 if (pbi->row_mt_mutex_ != NULL) {
182 pthread_mutex_destroy(pbi->row_mt_mutex_);
183 aom_free(pbi->row_mt_mutex_);
184 }
185 if (pbi->row_mt_cond_ != NULL) {
186 pthread_cond_destroy(pbi->row_mt_cond_);
187 aom_free(pbi->row_mt_cond_);
188 }
189 #endif
190 for (i = 0; i < pbi->allocated_tiles; i++) {
191 TileDataDec *const tile_data = pbi->tile_data + i;
192 av1_dec_row_mt_dealloc(&tile_data->dec_row_mt_sync);
193 }
194 aom_free(pbi->tile_data);
195 aom_free(pbi->tile_workers);
196
197 if (pbi->num_workers > 0) {
198 av1_loop_filter_dealloc(&pbi->lf_row_sync);
199 av1_loop_restoration_dealloc(&pbi->lr_row_sync, pbi->num_workers);
200 av1_dealloc_dec_jobs(&pbi->tile_mt_info);
201 }
202
203 av1_dec_free_cb_buf(pbi);
204 #if CONFIG_ACCOUNTING
205 aom_accounting_clear(&pbi->accounting);
206 #endif
207 av1_free_mc_tmp_buf(&pbi->td);
208
209 aom_free(pbi);
210 }
211
av1_visit_palette(AV1Decoder * const pbi,MACROBLOCKD * const xd,int mi_row,int mi_col,aom_reader * r,BLOCK_SIZE bsize,palette_visitor_fn_t visit)212 void av1_visit_palette(AV1Decoder *const pbi, MACROBLOCKD *const xd, int mi_row,
213 int mi_col, aom_reader *r, BLOCK_SIZE bsize,
214 palette_visitor_fn_t visit) {
215 if (!is_inter_block(xd->mi[0])) {
216 for (int plane = 0; plane < AOMMIN(2, av1_num_planes(&pbi->common));
217 ++plane) {
218 const struct macroblockd_plane *const pd = &xd->plane[plane];
219 if (is_chroma_reference(mi_row, mi_col, bsize, pd->subsampling_x,
220 pd->subsampling_y)) {
221 if (xd->mi[0]->palette_mode_info.palette_size[plane])
222 visit(xd, plane, r);
223 } else {
224 assert(xd->mi[0]->palette_mode_info.palette_size[plane] == 0);
225 }
226 }
227 }
228 }
229
equal_dimensions(const YV12_BUFFER_CONFIG * a,const YV12_BUFFER_CONFIG * b)230 static int equal_dimensions(const YV12_BUFFER_CONFIG *a,
231 const YV12_BUFFER_CONFIG *b) {
232 return a->y_height == b->y_height && a->y_width == b->y_width &&
233 a->uv_height == b->uv_height && a->uv_width == b->uv_width;
234 }
235
av1_copy_reference_dec(AV1Decoder * pbi,int idx,YV12_BUFFER_CONFIG * sd)236 aom_codec_err_t av1_copy_reference_dec(AV1Decoder *pbi, int idx,
237 YV12_BUFFER_CONFIG *sd) {
238 AV1_COMMON *cm = &pbi->common;
239 const int num_planes = av1_num_planes(cm);
240
241 const YV12_BUFFER_CONFIG *const cfg = get_ref_frame(cm, idx);
242 if (cfg == NULL) {
243 aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame");
244 return AOM_CODEC_ERROR;
245 }
246 if (!equal_dimensions(cfg, sd))
247 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
248 "Incorrect buffer dimensions");
249 else
250 aom_yv12_copy_frame(cfg, sd, num_planes);
251
252 return cm->error.error_code;
253 }
254
equal_dimensions_and_border(const YV12_BUFFER_CONFIG * a,const YV12_BUFFER_CONFIG * b)255 static int equal_dimensions_and_border(const YV12_BUFFER_CONFIG *a,
256 const YV12_BUFFER_CONFIG *b) {
257 return a->y_height == b->y_height && a->y_width == b->y_width &&
258 a->uv_height == b->uv_height && a->uv_width == b->uv_width &&
259 a->y_stride == b->y_stride && a->uv_stride == b->uv_stride &&
260 a->border == b->border &&
261 (a->flags & YV12_FLAG_HIGHBITDEPTH) ==
262 (b->flags & YV12_FLAG_HIGHBITDEPTH);
263 }
264
av1_set_reference_dec(AV1_COMMON * cm,int idx,int use_external_ref,YV12_BUFFER_CONFIG * sd)265 aom_codec_err_t av1_set_reference_dec(AV1_COMMON *cm, int idx,
266 int use_external_ref,
267 YV12_BUFFER_CONFIG *sd) {
268 const int num_planes = av1_num_planes(cm);
269 YV12_BUFFER_CONFIG *ref_buf = NULL;
270
271 // Get the destination reference buffer.
272 ref_buf = get_ref_frame(cm, idx);
273
274 if (ref_buf == NULL) {
275 aom_internal_error(&cm->error, AOM_CODEC_ERROR, "No reference frame");
276 return AOM_CODEC_ERROR;
277 }
278
279 if (!use_external_ref) {
280 if (!equal_dimensions(ref_buf, sd)) {
281 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
282 "Incorrect buffer dimensions");
283 } else {
284 // Overwrite the reference frame buffer.
285 aom_yv12_copy_frame(sd, ref_buf, num_planes);
286 }
287 } else {
288 if (!equal_dimensions_and_border(ref_buf, sd)) {
289 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
290 "Incorrect buffer dimensions");
291 } else {
292 // Overwrite the reference frame buffer pointers.
293 // Once we no longer need the external reference buffer, these pointers
294 // are restored.
295 ref_buf->store_buf_adr[0] = ref_buf->y_buffer;
296 ref_buf->store_buf_adr[1] = ref_buf->u_buffer;
297 ref_buf->store_buf_adr[2] = ref_buf->v_buffer;
298 ref_buf->y_buffer = sd->y_buffer;
299 ref_buf->u_buffer = sd->u_buffer;
300 ref_buf->v_buffer = sd->v_buffer;
301 ref_buf->use_external_reference_buffers = 1;
302 }
303 }
304
305 return cm->error.error_code;
306 }
307
av1_copy_new_frame_dec(AV1_COMMON * cm,YV12_BUFFER_CONFIG * new_frame,YV12_BUFFER_CONFIG * sd)308 aom_codec_err_t av1_copy_new_frame_dec(AV1_COMMON *cm,
309 YV12_BUFFER_CONFIG *new_frame,
310 YV12_BUFFER_CONFIG *sd) {
311 const int num_planes = av1_num_planes(cm);
312
313 if (!equal_dimensions_and_border(new_frame, sd))
314 aom_internal_error(&cm->error, AOM_CODEC_ERROR,
315 "Incorrect buffer dimensions");
316 else
317 aom_yv12_copy_frame(new_frame, sd, num_planes);
318
319 return cm->error.error_code;
320 }
321
release_frame_buffers(AV1Decoder * pbi)322 static void release_frame_buffers(AV1Decoder *pbi) {
323 AV1_COMMON *const cm = &pbi->common;
324 BufferPool *const pool = cm->buffer_pool;
325
326 cm->cur_frame->buf.corrupted = 1;
327 lock_buffer_pool(pool);
328 // Release all the reference buffers in cm->next_ref_frame_map if the worker
329 // thread is holding them.
330 if (pbi->hold_ref_buf) {
331 for (int ref_index = 0; ref_index < REF_FRAMES; ++ref_index) {
332 decrease_ref_count(cm->next_ref_frame_map[ref_index], pool);
333 cm->next_ref_frame_map[ref_index] = NULL;
334 }
335 pbi->hold_ref_buf = 0;
336 }
337 // Release current frame.
338 decrease_ref_count(cm->cur_frame, pool);
339 unlock_buffer_pool(pool);
340 cm->cur_frame = NULL;
341 }
342
343 // If any buffer updating is signaled it should be done here.
344 // Consumes a reference to cm->cur_frame.
345 //
346 // This functions returns void. It reports failure by setting
347 // cm->error.error_code.
swap_frame_buffers(AV1Decoder * pbi,int frame_decoded)348 static void swap_frame_buffers(AV1Decoder *pbi, int frame_decoded) {
349 int ref_index = 0, mask;
350 AV1_COMMON *const cm = &pbi->common;
351 BufferPool *const pool = cm->buffer_pool;
352
353 if (frame_decoded) {
354 lock_buffer_pool(pool);
355
356 // In ext-tile decoding, the camera frame header is only decoded once. So,
357 // we don't release the references here.
358 if (!pbi->camera_frame_header_ready) {
359 // If we are not holding reference buffers in cm->next_ref_frame_map,
360 // assert that the following two for loops are no-ops.
361 assert(IMPLIES(!pbi->hold_ref_buf,
362 cm->current_frame.refresh_frame_flags == 0));
363 assert(IMPLIES(!pbi->hold_ref_buf,
364 cm->show_existing_frame && !pbi->reset_decoder_state));
365
366 // The following two for loops need to release the reference stored in
367 // cm->ref_frame_map[ref_index] before transferring the reference stored
368 // in cm->next_ref_frame_map[ref_index] to cm->ref_frame_map[ref_index].
369 for (mask = cm->current_frame.refresh_frame_flags; mask; mask >>= 1) {
370 decrease_ref_count(cm->ref_frame_map[ref_index], pool);
371 cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
372 cm->next_ref_frame_map[ref_index] = NULL;
373 ++ref_index;
374 }
375
376 const int check_on_show_existing_frame =
377 !cm->show_existing_frame || pbi->reset_decoder_state;
378 for (; ref_index < REF_FRAMES && check_on_show_existing_frame;
379 ++ref_index) {
380 decrease_ref_count(cm->ref_frame_map[ref_index], pool);
381 cm->ref_frame_map[ref_index] = cm->next_ref_frame_map[ref_index];
382 cm->next_ref_frame_map[ref_index] = NULL;
383 }
384 }
385
386 if (cm->show_existing_frame || cm->show_frame) {
387 if (pbi->output_all_layers) {
388 // Append this frame to the output queue
389 if (pbi->num_output_frames >= MAX_NUM_SPATIAL_LAYERS) {
390 // We can't store the new frame anywhere, so drop it and return an
391 // error
392 cm->cur_frame->buf.corrupted = 1;
393 decrease_ref_count(cm->cur_frame, pool);
394 cm->error.error_code = AOM_CODEC_UNSUP_BITSTREAM;
395 } else {
396 pbi->output_frames[pbi->num_output_frames] = cm->cur_frame;
397 pbi->num_output_frames++;
398 }
399 } else {
400 // Replace any existing output frame
401 assert(pbi->num_output_frames == 0 || pbi->num_output_frames == 1);
402 if (pbi->num_output_frames > 0) {
403 decrease_ref_count(pbi->output_frames[0], pool);
404 }
405 pbi->output_frames[0] = cm->cur_frame;
406 pbi->num_output_frames = 1;
407 }
408 } else {
409 decrease_ref_count(cm->cur_frame, pool);
410 }
411
412 unlock_buffer_pool(pool);
413 } else {
414 // The code here assumes we are not holding reference buffers in
415 // cm->next_ref_frame_map. If this assertion fails, we are leaking the
416 // frame buffer references in cm->next_ref_frame_map.
417 assert(IMPLIES(!pbi->camera_frame_header_ready, !pbi->hold_ref_buf));
418 // Nothing was decoded, so just drop this frame buffer
419 lock_buffer_pool(pool);
420 decrease_ref_count(cm->cur_frame, pool);
421 unlock_buffer_pool(pool);
422 }
423 cm->cur_frame = NULL;
424
425 if (!pbi->camera_frame_header_ready) {
426 pbi->hold_ref_buf = 0;
427
428 // Invalidate these references until the next frame starts.
429 for (ref_index = 0; ref_index < INTER_REFS_PER_FRAME; ref_index++) {
430 cm->remapped_ref_idx[ref_index] = INVALID_IDX;
431 }
432 }
433 }
434
av1_receive_compressed_data(AV1Decoder * pbi,size_t size,const uint8_t ** psource)435 int av1_receive_compressed_data(AV1Decoder *pbi, size_t size,
436 const uint8_t **psource) {
437 AV1_COMMON *volatile const cm = &pbi->common;
438 const uint8_t *source = *psource;
439 cm->error.error_code = AOM_CODEC_OK;
440 cm->error.has_detail = 0;
441
442 if (size == 0) {
443 // This is used to signal that we are missing frames.
444 // We do not know if the missing frame(s) was supposed to update
445 // any of the reference buffers, but we act conservative and
446 // mark only the last buffer as corrupted.
447 //
448 // TODO(jkoleszar): Error concealment is undefined and non-normative
449 // at this point, but if it becomes so, [0] may not always be the correct
450 // thing to do here.
451 RefCntBuffer *ref_buf = get_ref_frame_buf(cm, LAST_FRAME);
452 if (ref_buf != NULL) ref_buf->buf.corrupted = 1;
453 }
454
455 if (assign_cur_frame_new_fb(cm) == NULL) {
456 cm->error.error_code = AOM_CODEC_MEM_ERROR;
457 return 1;
458 }
459
460 if (!pbi->camera_frame_header_ready) pbi->hold_ref_buf = 0;
461
462 // The jmp_buf is valid only for the duration of the function that calls
463 // setjmp(). Therefore, this function must reset the 'setjmp' field to 0
464 // before it returns.
465 if (setjmp(cm->error.jmp)) {
466 const AVxWorkerInterface *const winterface = aom_get_worker_interface();
467 int i;
468
469 cm->error.setjmp = 0;
470
471 // Synchronize all threads immediately as a subsequent decode call may
472 // cause a resize invalidating some allocations.
473 winterface->sync(&pbi->lf_worker);
474 for (i = 0; i < pbi->num_workers; ++i) {
475 winterface->sync(&pbi->tile_workers[i]);
476 }
477
478 release_frame_buffers(pbi);
479 aom_clear_system_state();
480 return -1;
481 }
482
483 cm->error.setjmp = 1;
484
485 int frame_decoded =
486 aom_decode_frame_from_obus(pbi, source, source + size, psource);
487
488 if (frame_decoded < 0) {
489 assert(cm->error.error_code != AOM_CODEC_OK);
490 release_frame_buffers(pbi);
491 cm->error.setjmp = 0;
492 return 1;
493 }
494
495 #if TXCOEFF_TIMER
496 cm->cum_txcoeff_timer += cm->txcoeff_timer;
497 fprintf(stderr,
498 "txb coeff block number: %d, frame time: %ld, cum time %ld in us\n",
499 cm->txb_count, cm->txcoeff_timer, cm->cum_txcoeff_timer);
500 cm->txcoeff_timer = 0;
501 cm->txb_count = 0;
502 #endif
503
504 // Note: At this point, this function holds a reference to cm->cur_frame
505 // in the buffer pool. This reference is consumed by swap_frame_buffers().
506 swap_frame_buffers(pbi, frame_decoded);
507
508 if (frame_decoded) {
509 pbi->decoding_first_frame = 0;
510 }
511
512 if (cm->error.error_code != AOM_CODEC_OK) {
513 cm->error.setjmp = 0;
514 return 1;
515 }
516
517 aom_clear_system_state();
518
519 if (!cm->show_existing_frame) {
520 if (cm->seg.enabled) {
521 if (cm->prev_frame && (cm->mi_rows == cm->prev_frame->mi_rows) &&
522 (cm->mi_cols == cm->prev_frame->mi_cols)) {
523 cm->last_frame_seg_map = cm->prev_frame->seg_map;
524 } else {
525 cm->last_frame_seg_map = NULL;
526 }
527 }
528 }
529
530 // Update progress in frame parallel decode.
531 cm->error.setjmp = 0;
532
533 return 0;
534 }
535
536 // Get the frame at a particular index in the output queue
av1_get_raw_frame(AV1Decoder * pbi,size_t index,YV12_BUFFER_CONFIG ** sd,aom_film_grain_t ** grain_params)537 int av1_get_raw_frame(AV1Decoder *pbi, size_t index, YV12_BUFFER_CONFIG **sd,
538 aom_film_grain_t **grain_params) {
539 if (index >= pbi->num_output_frames) return -1;
540 *sd = &pbi->output_frames[index]->buf;
541 *grain_params = &pbi->output_frames[index]->film_grain_params;
542 aom_clear_system_state();
543 return 0;
544 }
545
546 // Get the highest-spatial-layer output
547 // TODO(david.barker): What should this do?
av1_get_frame_to_show(AV1Decoder * pbi,YV12_BUFFER_CONFIG * frame)548 int av1_get_frame_to_show(AV1Decoder *pbi, YV12_BUFFER_CONFIG *frame) {
549 if (pbi->num_output_frames == 0) return -1;
550
551 *frame = pbi->output_frames[pbi->num_output_frames - 1]->buf;
552 return 0;
553 }
554