1 /*
2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "vpx_config.h"
12 #include "vp8_rtcd.h"
13 #include "./vpx_scale_rtcd.h"
14 #include "onyxd_int.h"
15 #include "vp8/common/header.h"
16 #include "vp8/common/reconintra4x4.h"
17 #include "vp8/common/reconinter.h"
18 #include "detokenize.h"
19 #include "vp8/common/common.h"
20 #include "vp8/common/invtrans.h"
21 #include "vp8/common/alloccommon.h"
22 #include "vp8/common/entropymode.h"
23 #include "vp8/common/quant_common.h"
24 #include "vpx_scale/vpx_scale.h"
25 #include "vp8/common/reconintra.h"
26 #include "vp8/common/setupintrarecon.h"
27
28 #include "decodemv.h"
29 #include "vp8/common/extend.h"
30 #if CONFIG_ERROR_CONCEALMENT
31 #include "error_concealment.h"
32 #endif
33 #include "vpx_mem/vpx_mem.h"
34 #include "vp8/common/threading.h"
35 #include "decoderthreading.h"
36 #include "dboolhuff.h"
37 #include "vpx_dsp/vpx_dsp_common.h"
38
39 #include <assert.h>
40 #include <stdio.h>
41
vp8cx_init_de_quantizer(VP8D_COMP * pbi)42 void vp8cx_init_de_quantizer(VP8D_COMP *pbi) {
43 int Q;
44 VP8_COMMON *const pc = &pbi->common;
45
46 for (Q = 0; Q < QINDEX_RANGE; ++Q) {
47 pc->Y1dequant[Q][0] = (short)vp8_dc_quant(Q, pc->y1dc_delta_q);
48 pc->Y2dequant[Q][0] = (short)vp8_dc2quant(Q, pc->y2dc_delta_q);
49 pc->UVdequant[Q][0] = (short)vp8_dc_uv_quant(Q, pc->uvdc_delta_q);
50
51 pc->Y1dequant[Q][1] = (short)vp8_ac_yquant(Q);
52 pc->Y2dequant[Q][1] = (short)vp8_ac2quant(Q, pc->y2ac_delta_q);
53 pc->UVdequant[Q][1] = (short)vp8_ac_uv_quant(Q, pc->uvac_delta_q);
54 }
55 }
56
vp8_mb_init_dequantizer(VP8D_COMP * pbi,MACROBLOCKD * xd)57 void vp8_mb_init_dequantizer(VP8D_COMP *pbi, MACROBLOCKD *xd) {
58 int i;
59 int QIndex;
60 MB_MODE_INFO *mbmi = &xd->mode_info_context->mbmi;
61 VP8_COMMON *const pc = &pbi->common;
62
63 /* Decide whether to use the default or alternate baseline Q value. */
64 if (xd->segmentation_enabled) {
65 /* Abs Value */
66 if (xd->mb_segement_abs_delta == SEGMENT_ABSDATA) {
67 QIndex = xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
68
69 /* Delta Value */
70 } else {
71 QIndex = pc->base_qindex +
72 xd->segment_feature_data[MB_LVL_ALT_Q][mbmi->segment_id];
73 }
74
75 QIndex = (QIndex >= 0) ? ((QIndex <= MAXQ) ? QIndex : MAXQ)
76 : 0; /* Clamp to valid range */
77 } else {
78 QIndex = pc->base_qindex;
79 }
80
81 /* Set up the macroblock dequant constants */
82 xd->dequant_y1_dc[0] = 1;
83 xd->dequant_y1[0] = pc->Y1dequant[QIndex][0];
84 xd->dequant_y2[0] = pc->Y2dequant[QIndex][0];
85 xd->dequant_uv[0] = pc->UVdequant[QIndex][0];
86
87 for (i = 1; i < 16; ++i) {
88 xd->dequant_y1_dc[i] = xd->dequant_y1[i] = pc->Y1dequant[QIndex][1];
89 xd->dequant_y2[i] = pc->Y2dequant[QIndex][1];
90 xd->dequant_uv[i] = pc->UVdequant[QIndex][1];
91 }
92 }
93
decode_macroblock(VP8D_COMP * pbi,MACROBLOCKD * xd,unsigned int mb_idx)94 static void decode_macroblock(VP8D_COMP *pbi, MACROBLOCKD *xd,
95 unsigned int mb_idx) {
96 MB_PREDICTION_MODE mode;
97 int i;
98 #if CONFIG_ERROR_CONCEALMENT
99 int corruption_detected = 0;
100 #else
101 (void)mb_idx;
102 #endif
103
104 if (xd->mode_info_context->mbmi.mb_skip_coeff) {
105 vp8_reset_mb_tokens_context(xd);
106 } else if (!vp8dx_bool_error(xd->current_bc)) {
107 int eobtotal;
108 eobtotal = vp8_decode_mb_tokens(pbi, xd);
109
110 /* Special case: Force the loopfilter to skip when eobtotal is zero */
111 xd->mode_info_context->mbmi.mb_skip_coeff = (eobtotal == 0);
112 }
113
114 mode = xd->mode_info_context->mbmi.mode;
115
116 if (xd->segmentation_enabled) vp8_mb_init_dequantizer(pbi, xd);
117
118 #if CONFIG_ERROR_CONCEALMENT
119
120 if (pbi->ec_active) {
121 int throw_residual;
122 /* When we have independent partitions we can apply residual even
123 * though other partitions within the frame are corrupt.
124 */
125 throw_residual =
126 (!pbi->independent_partitions && pbi->frame_corrupt_residual);
127 throw_residual = (throw_residual || vp8dx_bool_error(xd->current_bc));
128
129 if ((mb_idx >= pbi->mvs_corrupt_from_mb || throw_residual)) {
130 /* MB with corrupt residuals or corrupt mode/motion vectors.
131 * Better to use the predictor as reconstruction.
132 */
133 pbi->frame_corrupt_residual = 1;
134 memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
135
136 corruption_detected = 1;
137
138 /* force idct to be skipped for B_PRED and use the
139 * prediction only for reconstruction
140 * */
141 memset(xd->eobs, 0, 25);
142 }
143 }
144 #endif
145
146 /* do prediction */
147 if (xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME) {
148 vp8_build_intra_predictors_mbuv_s(
149 xd, xd->recon_above[1], xd->recon_above[2], xd->recon_left[1],
150 xd->recon_left[2], xd->recon_left_stride[1], xd->dst.u_buffer,
151 xd->dst.v_buffer, xd->dst.uv_stride);
152
153 if (mode != B_PRED) {
154 vp8_build_intra_predictors_mby_s(
155 xd, xd->recon_above[0], xd->recon_left[0], xd->recon_left_stride[0],
156 xd->dst.y_buffer, xd->dst.y_stride);
157 } else {
158 short *DQC = xd->dequant_y1;
159 int dst_stride = xd->dst.y_stride;
160
161 /* clear out residual eob info */
162 if (xd->mode_info_context->mbmi.mb_skip_coeff) memset(xd->eobs, 0, 25);
163
164 intra_prediction_down_copy(xd, xd->recon_above[0] + 16);
165
166 for (i = 0; i < 16; ++i) {
167 BLOCKD *b = &xd->block[i];
168 unsigned char *dst = xd->dst.y_buffer + b->offset;
169 B_PREDICTION_MODE b_mode = xd->mode_info_context->bmi[i].as_mode;
170 unsigned char *Above = dst - dst_stride;
171 unsigned char *yleft = dst - 1;
172 int left_stride = dst_stride;
173 unsigned char top_left = Above[-1];
174
175 vp8_intra4x4_predict(Above, yleft, left_stride, b_mode, dst, dst_stride,
176 top_left);
177
178 if (xd->eobs[i]) {
179 if (xd->eobs[i] > 1) {
180 vp8_dequant_idct_add(b->qcoeff, DQC, dst, dst_stride);
181 } else {
182 vp8_dc_only_idct_add(b->qcoeff[0] * DQC[0], dst, dst_stride, dst,
183 dst_stride);
184 memset(b->qcoeff, 0, 2 * sizeof(b->qcoeff[0]));
185 }
186 }
187 }
188 }
189 } else {
190 vp8_build_inter_predictors_mb(xd);
191 }
192
193 #if CONFIG_ERROR_CONCEALMENT
194 if (corruption_detected) {
195 return;
196 }
197 #endif
198
199 if (!xd->mode_info_context->mbmi.mb_skip_coeff) {
200 /* dequantization and idct */
201 if (mode != B_PRED) {
202 short *DQC = xd->dequant_y1;
203
204 if (mode != SPLITMV) {
205 BLOCKD *b = &xd->block[24];
206
207 /* do 2nd order transform on the dc block */
208 if (xd->eobs[24] > 1) {
209 vp8_dequantize_b(b, xd->dequant_y2);
210
211 vp8_short_inv_walsh4x4(&b->dqcoeff[0], xd->qcoeff);
212 memset(b->qcoeff, 0, 16 * sizeof(b->qcoeff[0]));
213 } else {
214 b->dqcoeff[0] = b->qcoeff[0] * xd->dequant_y2[0];
215 vp8_short_inv_walsh4x4_1(&b->dqcoeff[0], xd->qcoeff);
216 memset(b->qcoeff, 0, 2 * sizeof(b->qcoeff[0]));
217 }
218
219 /* override the dc dequant constant in order to preserve the
220 * dc components
221 */
222 DQC = xd->dequant_y1_dc;
223 }
224
225 vp8_dequant_idct_add_y_block(xd->qcoeff, DQC, xd->dst.y_buffer,
226 xd->dst.y_stride, xd->eobs);
227 }
228
229 vp8_dequant_idct_add_uv_block(xd->qcoeff + 16 * 16, xd->dequant_uv,
230 xd->dst.u_buffer, xd->dst.v_buffer,
231 xd->dst.uv_stride, xd->eobs + 16);
232 }
233 }
234
get_delta_q(vp8_reader * bc,int prev,int * q_update)235 static int get_delta_q(vp8_reader *bc, int prev, int *q_update) {
236 int ret_val = 0;
237
238 if (vp8_read_bit(bc)) {
239 ret_val = vp8_read_literal(bc, 4);
240
241 if (vp8_read_bit(bc)) ret_val = -ret_val;
242 }
243
244 /* Trigger a quantizer update if the delta-q value has changed */
245 if (ret_val != prev) *q_update = 1;
246
247 return ret_val;
248 }
249
250 #ifdef PACKET_TESTING
251 #include <stdio.h>
252 FILE *vpxlog = 0;
253 #endif
254
yv12_extend_frame_top_c(YV12_BUFFER_CONFIG * ybf)255 static void yv12_extend_frame_top_c(YV12_BUFFER_CONFIG *ybf) {
256 int i;
257 unsigned char *src_ptr1;
258 unsigned char *dest_ptr1;
259
260 unsigned int Border;
261 int plane_stride;
262
263 /***********/
264 /* Y Plane */
265 /***********/
266 Border = ybf->border;
267 plane_stride = ybf->y_stride;
268 src_ptr1 = ybf->y_buffer - Border;
269 dest_ptr1 = src_ptr1 - (Border * plane_stride);
270
271 for (i = 0; i < (int)Border; ++i) {
272 memcpy(dest_ptr1, src_ptr1, plane_stride);
273 dest_ptr1 += plane_stride;
274 }
275
276 /***********/
277 /* U Plane */
278 /***********/
279 plane_stride = ybf->uv_stride;
280 Border /= 2;
281 src_ptr1 = ybf->u_buffer - Border;
282 dest_ptr1 = src_ptr1 - (Border * plane_stride);
283
284 for (i = 0; i < (int)(Border); ++i) {
285 memcpy(dest_ptr1, src_ptr1, plane_stride);
286 dest_ptr1 += plane_stride;
287 }
288
289 /***********/
290 /* V Plane */
291 /***********/
292
293 src_ptr1 = ybf->v_buffer - Border;
294 dest_ptr1 = src_ptr1 - (Border * plane_stride);
295
296 for (i = 0; i < (int)(Border); ++i) {
297 memcpy(dest_ptr1, src_ptr1, plane_stride);
298 dest_ptr1 += plane_stride;
299 }
300 }
301
yv12_extend_frame_bottom_c(YV12_BUFFER_CONFIG * ybf)302 static void yv12_extend_frame_bottom_c(YV12_BUFFER_CONFIG *ybf) {
303 int i;
304 unsigned char *src_ptr1, *src_ptr2;
305 unsigned char *dest_ptr2;
306
307 unsigned int Border;
308 int plane_stride;
309 int plane_height;
310
311 /***********/
312 /* Y Plane */
313 /***********/
314 Border = ybf->border;
315 plane_stride = ybf->y_stride;
316 plane_height = ybf->y_height;
317
318 src_ptr1 = ybf->y_buffer - Border;
319 src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
320 dest_ptr2 = src_ptr2 + plane_stride;
321
322 for (i = 0; i < (int)Border; ++i) {
323 memcpy(dest_ptr2, src_ptr2, plane_stride);
324 dest_ptr2 += plane_stride;
325 }
326
327 /***********/
328 /* U Plane */
329 /***********/
330 plane_stride = ybf->uv_stride;
331 plane_height = ybf->uv_height;
332 Border /= 2;
333
334 src_ptr1 = ybf->u_buffer - Border;
335 src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
336 dest_ptr2 = src_ptr2 + plane_stride;
337
338 for (i = 0; i < (int)(Border); ++i) {
339 memcpy(dest_ptr2, src_ptr2, plane_stride);
340 dest_ptr2 += plane_stride;
341 }
342
343 /***********/
344 /* V Plane */
345 /***********/
346
347 src_ptr1 = ybf->v_buffer - Border;
348 src_ptr2 = src_ptr1 + (plane_height * plane_stride) - plane_stride;
349 dest_ptr2 = src_ptr2 + plane_stride;
350
351 for (i = 0; i < (int)(Border); ++i) {
352 memcpy(dest_ptr2, src_ptr2, plane_stride);
353 dest_ptr2 += plane_stride;
354 }
355 }
356
yv12_extend_frame_left_right_c(YV12_BUFFER_CONFIG * ybf,unsigned char * y_src,unsigned char * u_src,unsigned char * v_src)357 static void yv12_extend_frame_left_right_c(YV12_BUFFER_CONFIG *ybf,
358 unsigned char *y_src,
359 unsigned char *u_src,
360 unsigned char *v_src) {
361 int i;
362 unsigned char *src_ptr1, *src_ptr2;
363 unsigned char *dest_ptr1, *dest_ptr2;
364
365 unsigned int Border;
366 int plane_stride;
367 int plane_height;
368 int plane_width;
369
370 /***********/
371 /* Y Plane */
372 /***********/
373 Border = ybf->border;
374 plane_stride = ybf->y_stride;
375 plane_height = 16;
376 plane_width = ybf->y_width;
377
378 /* copy the left and right most columns out */
379 src_ptr1 = y_src;
380 src_ptr2 = src_ptr1 + plane_width - 1;
381 dest_ptr1 = src_ptr1 - Border;
382 dest_ptr2 = src_ptr2 + 1;
383
384 for (i = 0; i < plane_height; ++i) {
385 memset(dest_ptr1, src_ptr1[0], Border);
386 memset(dest_ptr2, src_ptr2[0], Border);
387 src_ptr1 += plane_stride;
388 src_ptr2 += plane_stride;
389 dest_ptr1 += plane_stride;
390 dest_ptr2 += plane_stride;
391 }
392
393 /***********/
394 /* U Plane */
395 /***********/
396 plane_stride = ybf->uv_stride;
397 plane_height = 8;
398 plane_width = ybf->uv_width;
399 Border /= 2;
400
401 /* copy the left and right most columns out */
402 src_ptr1 = u_src;
403 src_ptr2 = src_ptr1 + plane_width - 1;
404 dest_ptr1 = src_ptr1 - Border;
405 dest_ptr2 = src_ptr2 + 1;
406
407 for (i = 0; i < plane_height; ++i) {
408 memset(dest_ptr1, src_ptr1[0], Border);
409 memset(dest_ptr2, src_ptr2[0], Border);
410 src_ptr1 += plane_stride;
411 src_ptr2 += plane_stride;
412 dest_ptr1 += plane_stride;
413 dest_ptr2 += plane_stride;
414 }
415
416 /***********/
417 /* V Plane */
418 /***********/
419
420 /* copy the left and right most columns out */
421 src_ptr1 = v_src;
422 src_ptr2 = src_ptr1 + plane_width - 1;
423 dest_ptr1 = src_ptr1 - Border;
424 dest_ptr2 = src_ptr2 + 1;
425
426 for (i = 0; i < plane_height; ++i) {
427 memset(dest_ptr1, src_ptr1[0], Border);
428 memset(dest_ptr2, src_ptr2[0], Border);
429 src_ptr1 += plane_stride;
430 src_ptr2 += plane_stride;
431 dest_ptr1 += plane_stride;
432 dest_ptr2 += plane_stride;
433 }
434 }
435
decode_mb_rows(VP8D_COMP * pbi)436 static void decode_mb_rows(VP8D_COMP *pbi) {
437 VP8_COMMON *const pc = &pbi->common;
438 MACROBLOCKD *const xd = &pbi->mb;
439
440 MODE_INFO *lf_mic = xd->mode_info_context;
441
442 int ibc = 0;
443 int num_part = 1 << pc->multi_token_partition;
444
445 int recon_yoffset, recon_uvoffset;
446 int mb_row, mb_col;
447 int mb_idx = 0;
448
449 YV12_BUFFER_CONFIG *yv12_fb_new = pbi->dec_fb_ref[INTRA_FRAME];
450
451 int recon_y_stride = yv12_fb_new->y_stride;
452 int recon_uv_stride = yv12_fb_new->uv_stride;
453
454 unsigned char *ref_buffer[MAX_REF_FRAMES][3];
455 unsigned char *dst_buffer[3];
456 unsigned char *lf_dst[3];
457 unsigned char *eb_dst[3];
458 int i;
459 int ref_fb_corrupted[MAX_REF_FRAMES];
460
461 ref_fb_corrupted[INTRA_FRAME] = 0;
462
463 for (i = 1; i < MAX_REF_FRAMES; ++i) {
464 YV12_BUFFER_CONFIG *this_fb = pbi->dec_fb_ref[i];
465
466 ref_buffer[i][0] = this_fb->y_buffer;
467 ref_buffer[i][1] = this_fb->u_buffer;
468 ref_buffer[i][2] = this_fb->v_buffer;
469
470 ref_fb_corrupted[i] = this_fb->corrupted;
471 }
472
473 /* Set up the buffer pointers */
474 eb_dst[0] = lf_dst[0] = dst_buffer[0] = yv12_fb_new->y_buffer;
475 eb_dst[1] = lf_dst[1] = dst_buffer[1] = yv12_fb_new->u_buffer;
476 eb_dst[2] = lf_dst[2] = dst_buffer[2] = yv12_fb_new->v_buffer;
477
478 xd->up_available = 0;
479
480 /* Initialize the loop filter for this frame. */
481 if (pc->filter_level) vp8_loop_filter_frame_init(pc, xd, pc->filter_level);
482
483 vp8_setup_intra_recon_top_line(yv12_fb_new);
484
485 /* Decode the individual macro block */
486 for (mb_row = 0; mb_row < pc->mb_rows; ++mb_row) {
487 if (num_part > 1) {
488 xd->current_bc = &pbi->mbc[ibc];
489 ibc++;
490
491 if (ibc == num_part) ibc = 0;
492 }
493
494 recon_yoffset = mb_row * recon_y_stride * 16;
495 recon_uvoffset = mb_row * recon_uv_stride * 8;
496
497 /* reset contexts */
498 xd->above_context = pc->above_context;
499 memset(xd->left_context, 0, sizeof(ENTROPY_CONTEXT_PLANES));
500
501 xd->left_available = 0;
502
503 xd->mb_to_top_edge = -((mb_row * 16) << 3);
504 xd->mb_to_bottom_edge = ((pc->mb_rows - 1 - mb_row) * 16) << 3;
505
506 xd->recon_above[0] = dst_buffer[0] + recon_yoffset;
507 xd->recon_above[1] = dst_buffer[1] + recon_uvoffset;
508 xd->recon_above[2] = dst_buffer[2] + recon_uvoffset;
509
510 xd->recon_left[0] = xd->recon_above[0] - 1;
511 xd->recon_left[1] = xd->recon_above[1] - 1;
512 xd->recon_left[2] = xd->recon_above[2] - 1;
513
514 xd->recon_above[0] -= xd->dst.y_stride;
515 xd->recon_above[1] -= xd->dst.uv_stride;
516 xd->recon_above[2] -= xd->dst.uv_stride;
517
518 /* TODO: move to outside row loop */
519 xd->recon_left_stride[0] = xd->dst.y_stride;
520 xd->recon_left_stride[1] = xd->dst.uv_stride;
521
522 setup_intra_recon_left(xd->recon_left[0], xd->recon_left[1],
523 xd->recon_left[2], xd->dst.y_stride,
524 xd->dst.uv_stride);
525
526 for (mb_col = 0; mb_col < pc->mb_cols; ++mb_col) {
527 /* Distance of Mb to the various image edges.
528 * These are specified to 8th pel as they are always compared to values
529 * that are in 1/8th pel units
530 */
531 xd->mb_to_left_edge = -((mb_col * 16) << 3);
532 xd->mb_to_right_edge = ((pc->mb_cols - 1 - mb_col) * 16) << 3;
533
534 #if CONFIG_ERROR_CONCEALMENT
535 {
536 int corrupt_residual =
537 (!pbi->independent_partitions && pbi->frame_corrupt_residual) ||
538 vp8dx_bool_error(xd->current_bc);
539 if (pbi->ec_active &&
540 xd->mode_info_context->mbmi.ref_frame == INTRA_FRAME &&
541 corrupt_residual) {
542 /* We have an intra block with corrupt coefficients, better to
543 * conceal with an inter block. Interpolate MVs from neighboring
544 * MBs.
545 *
546 * Note that for the first mb with corrupt residual in a frame,
547 * we might not discover that before decoding the residual. That
548 * happens after this check, and therefore no inter concealment
549 * will be done.
550 */
551 vp8_interpolate_motion(xd, mb_row, mb_col, pc->mb_rows, pc->mb_cols);
552 }
553 }
554 #endif
555
556 xd->dst.y_buffer = dst_buffer[0] + recon_yoffset;
557 xd->dst.u_buffer = dst_buffer[1] + recon_uvoffset;
558 xd->dst.v_buffer = dst_buffer[2] + recon_uvoffset;
559
560 if (xd->mode_info_context->mbmi.ref_frame >= LAST_FRAME) {
561 const MV_REFERENCE_FRAME ref = xd->mode_info_context->mbmi.ref_frame;
562 xd->pre.y_buffer = ref_buffer[ref][0] + recon_yoffset;
563 xd->pre.u_buffer = ref_buffer[ref][1] + recon_uvoffset;
564 xd->pre.v_buffer = ref_buffer[ref][2] + recon_uvoffset;
565 } else {
566 // ref_frame is INTRA_FRAME, pre buffer should not be used.
567 xd->pre.y_buffer = 0;
568 xd->pre.u_buffer = 0;
569 xd->pre.v_buffer = 0;
570 }
571
572 /* propagate errors from reference frames */
573 xd->corrupted |= ref_fb_corrupted[xd->mode_info_context->mbmi.ref_frame];
574
575 decode_macroblock(pbi, xd, mb_idx);
576
577 mb_idx++;
578 xd->left_available = 1;
579
580 /* check if the boolean decoder has suffered an error */
581 xd->corrupted |= vp8dx_bool_error(xd->current_bc);
582
583 xd->recon_above[0] += 16;
584 xd->recon_above[1] += 8;
585 xd->recon_above[2] += 8;
586 xd->recon_left[0] += 16;
587 xd->recon_left[1] += 8;
588 xd->recon_left[2] += 8;
589
590 recon_yoffset += 16;
591 recon_uvoffset += 8;
592
593 ++xd->mode_info_context; /* next mb */
594
595 xd->above_context++;
596 }
597
598 /* adjust to the next row of mbs */
599 vp8_extend_mb_row(yv12_fb_new, xd->dst.y_buffer + 16, xd->dst.u_buffer + 8,
600 xd->dst.v_buffer + 8);
601
602 ++xd->mode_info_context; /* skip prediction column */
603 xd->up_available = 1;
604
605 if (pc->filter_level) {
606 if (mb_row > 0) {
607 if (pc->filter_type == NORMAL_LOOPFILTER) {
608 vp8_loop_filter_row_normal(pc, lf_mic, mb_row - 1, recon_y_stride,
609 recon_uv_stride, lf_dst[0], lf_dst[1],
610 lf_dst[2]);
611 } else {
612 vp8_loop_filter_row_simple(pc, lf_mic, mb_row - 1, recon_y_stride,
613 recon_uv_stride, lf_dst[0], lf_dst[1],
614 lf_dst[2]);
615 }
616 if (mb_row > 1) {
617 yv12_extend_frame_left_right_c(yv12_fb_new, eb_dst[0], eb_dst[1],
618 eb_dst[2]);
619
620 eb_dst[0] += recon_y_stride * 16;
621 eb_dst[1] += recon_uv_stride * 8;
622 eb_dst[2] += recon_uv_stride * 8;
623 }
624
625 lf_dst[0] += recon_y_stride * 16;
626 lf_dst[1] += recon_uv_stride * 8;
627 lf_dst[2] += recon_uv_stride * 8;
628 lf_mic += pc->mb_cols;
629 lf_mic++; /* Skip border mb */
630 }
631 } else {
632 if (mb_row > 0) {
633 /**/
634 yv12_extend_frame_left_right_c(yv12_fb_new, eb_dst[0], eb_dst[1],
635 eb_dst[2]);
636 eb_dst[0] += recon_y_stride * 16;
637 eb_dst[1] += recon_uv_stride * 8;
638 eb_dst[2] += recon_uv_stride * 8;
639 }
640 }
641 }
642
643 if (pc->filter_level) {
644 if (pc->filter_type == NORMAL_LOOPFILTER) {
645 vp8_loop_filter_row_normal(pc, lf_mic, mb_row - 1, recon_y_stride,
646 recon_uv_stride, lf_dst[0], lf_dst[1],
647 lf_dst[2]);
648 } else {
649 vp8_loop_filter_row_simple(pc, lf_mic, mb_row - 1, recon_y_stride,
650 recon_uv_stride, lf_dst[0], lf_dst[1],
651 lf_dst[2]);
652 }
653
654 yv12_extend_frame_left_right_c(yv12_fb_new, eb_dst[0], eb_dst[1],
655 eb_dst[2]);
656 eb_dst[0] += recon_y_stride * 16;
657 eb_dst[1] += recon_uv_stride * 8;
658 eb_dst[2] += recon_uv_stride * 8;
659 }
660 yv12_extend_frame_left_right_c(yv12_fb_new, eb_dst[0], eb_dst[1], eb_dst[2]);
661 yv12_extend_frame_top_c(yv12_fb_new);
662 yv12_extend_frame_bottom_c(yv12_fb_new);
663 }
664
read_partition_size(VP8D_COMP * pbi,const unsigned char * cx_size)665 static unsigned int read_partition_size(VP8D_COMP *pbi,
666 const unsigned char *cx_size) {
667 unsigned char temp[3];
668 if (pbi->decrypt_cb) {
669 pbi->decrypt_cb(pbi->decrypt_state, cx_size, temp, 3);
670 cx_size = temp;
671 }
672 return cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16);
673 }
674
read_is_valid(const unsigned char * start,size_t len,const unsigned char * end)675 static int read_is_valid(const unsigned char *start, size_t len,
676 const unsigned char *end) {
677 return (start + len > start && start + len <= end);
678 }
679
read_available_partition_size(VP8D_COMP * pbi,const unsigned char * token_part_sizes,const unsigned char * fragment_start,const unsigned char * first_fragment_end,const unsigned char * fragment_end,int i,int num_part)680 static unsigned int read_available_partition_size(
681 VP8D_COMP *pbi, const unsigned char *token_part_sizes,
682 const unsigned char *fragment_start,
683 const unsigned char *first_fragment_end, const unsigned char *fragment_end,
684 int i, int num_part) {
685 VP8_COMMON *pc = &pbi->common;
686 const unsigned char *partition_size_ptr = token_part_sizes + i * 3;
687 unsigned int partition_size = 0;
688 ptrdiff_t bytes_left = fragment_end - fragment_start;
689 /* Calculate the length of this partition. The last partition
690 * size is implicit. If the partition size can't be read, then
691 * either use the remaining data in the buffer (for EC mode)
692 * or throw an error.
693 */
694 if (i < num_part - 1) {
695 if (read_is_valid(partition_size_ptr, 3, first_fragment_end)) {
696 partition_size = read_partition_size(pbi, partition_size_ptr);
697 } else if (pbi->ec_active) {
698 partition_size = (unsigned int)bytes_left;
699 } else {
700 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
701 "Truncated partition size data");
702 }
703 } else {
704 partition_size = (unsigned int)bytes_left;
705 }
706
707 /* Validate the calculated partition length. If the buffer
708 * described by the partition can't be fully read, then restrict
709 * it to the portion that can be (for EC mode) or throw an error.
710 */
711 if (!read_is_valid(fragment_start, partition_size, fragment_end)) {
712 if (pbi->ec_active) {
713 partition_size = (unsigned int)bytes_left;
714 } else {
715 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
716 "Truncated packet or corrupt partition "
717 "%d length",
718 i + 1);
719 }
720 }
721 return partition_size;
722 }
723
setup_token_decoder(VP8D_COMP * pbi,const unsigned char * token_part_sizes)724 static void setup_token_decoder(VP8D_COMP *pbi,
725 const unsigned char *token_part_sizes) {
726 vp8_reader *bool_decoder = &pbi->mbc[0];
727 unsigned int partition_idx;
728 unsigned int fragment_idx;
729 unsigned int num_token_partitions;
730 const unsigned char *first_fragment_end =
731 pbi->fragments.ptrs[0] + pbi->fragments.sizes[0];
732
733 TOKEN_PARTITION multi_token_partition =
734 (TOKEN_PARTITION)vp8_read_literal(&pbi->mbc[8], 2);
735 if (!vp8dx_bool_error(&pbi->mbc[8])) {
736 pbi->common.multi_token_partition = multi_token_partition;
737 }
738 num_token_partitions = 1 << pbi->common.multi_token_partition;
739
740 /* Check for partitions within the fragments and unpack the fragments
741 * so that each fragment pointer points to its corresponding partition. */
742 for (fragment_idx = 0; fragment_idx < pbi->fragments.count; ++fragment_idx) {
743 unsigned int fragment_size = pbi->fragments.sizes[fragment_idx];
744 const unsigned char *fragment_end =
745 pbi->fragments.ptrs[fragment_idx] + fragment_size;
746 /* Special case for handling the first partition since we have already
747 * read its size. */
748 if (fragment_idx == 0) {
749 /* Size of first partition + token partition sizes element */
750 ptrdiff_t ext_first_part_size = token_part_sizes -
751 pbi->fragments.ptrs[0] +
752 3 * (num_token_partitions - 1);
753 fragment_size -= (unsigned int)ext_first_part_size;
754 if (fragment_size > 0) {
755 pbi->fragments.sizes[0] = (unsigned int)ext_first_part_size;
756 /* The fragment contains an additional partition. Move to
757 * next. */
758 fragment_idx++;
759 pbi->fragments.ptrs[fragment_idx] =
760 pbi->fragments.ptrs[0] + pbi->fragments.sizes[0];
761 }
762 }
763 /* Split the chunk into partitions read from the bitstream */
764 while (fragment_size > 0) {
765 ptrdiff_t partition_size = read_available_partition_size(
766 pbi, token_part_sizes, pbi->fragments.ptrs[fragment_idx],
767 first_fragment_end, fragment_end, fragment_idx - 1,
768 num_token_partitions);
769 pbi->fragments.sizes[fragment_idx] = (unsigned int)partition_size;
770 fragment_size -= (unsigned int)partition_size;
771 assert(fragment_idx <= num_token_partitions);
772 if (fragment_size > 0) {
773 /* The fragment contains an additional partition.
774 * Move to next. */
775 fragment_idx++;
776 pbi->fragments.ptrs[fragment_idx] =
777 pbi->fragments.ptrs[fragment_idx - 1] + partition_size;
778 }
779 }
780 }
781
782 pbi->fragments.count = num_token_partitions + 1;
783
784 for (partition_idx = 1; partition_idx < pbi->fragments.count;
785 ++partition_idx) {
786 if (vp8dx_start_decode(bool_decoder, pbi->fragments.ptrs[partition_idx],
787 pbi->fragments.sizes[partition_idx], pbi->decrypt_cb,
788 pbi->decrypt_state)) {
789 vpx_internal_error(&pbi->common.error, VPX_CODEC_MEM_ERROR,
790 "Failed to allocate bool decoder %d", partition_idx);
791 }
792
793 bool_decoder++;
794 }
795
796 #if CONFIG_MULTITHREAD
797 /* Clamp number of decoder threads */
798 if (pbi->decoding_thread_count > num_token_partitions - 1) {
799 pbi->decoding_thread_count = num_token_partitions - 1;
800 }
801 if ((int)pbi->decoding_thread_count > pbi->common.mb_rows - 1) {
802 assert(pbi->common.mb_rows > 0);
803 pbi->decoding_thread_count = pbi->common.mb_rows - 1;
804 }
805 #endif
806 }
807
init_frame(VP8D_COMP * pbi)808 static void init_frame(VP8D_COMP *pbi) {
809 VP8_COMMON *const pc = &pbi->common;
810 MACROBLOCKD *const xd = &pbi->mb;
811
812 if (pc->frame_type == KEY_FRAME) {
813 /* Various keyframe initializations */
814 memcpy(pc->fc.mvc, vp8_default_mv_context, sizeof(vp8_default_mv_context));
815
816 vp8_init_mbmode_probs(pc);
817
818 vp8_default_coef_probs(pc);
819
820 /* reset the segment feature data to 0 with delta coding (Default state). */
821 memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
822 xd->mb_segement_abs_delta = SEGMENT_DELTADATA;
823
824 /* reset the mode ref deltasa for loop filter */
825 memset(xd->ref_lf_deltas, 0, sizeof(xd->ref_lf_deltas));
826 memset(xd->mode_lf_deltas, 0, sizeof(xd->mode_lf_deltas));
827
828 /* All buffers are implicitly updated on key frames. */
829 pc->refresh_golden_frame = 1;
830 pc->refresh_alt_ref_frame = 1;
831 pc->copy_buffer_to_gf = 0;
832 pc->copy_buffer_to_arf = 0;
833
834 /* Note that Golden and Altref modes cannot be used on a key frame so
835 * ref_frame_sign_bias[] is undefined and meaningless
836 */
837 pc->ref_frame_sign_bias[GOLDEN_FRAME] = 0;
838 pc->ref_frame_sign_bias[ALTREF_FRAME] = 0;
839 } else {
840 /* To enable choice of different interploation filters */
841 if (!pc->use_bilinear_mc_filter) {
842 xd->subpixel_predict = vp8_sixtap_predict4x4;
843 xd->subpixel_predict8x4 = vp8_sixtap_predict8x4;
844 xd->subpixel_predict8x8 = vp8_sixtap_predict8x8;
845 xd->subpixel_predict16x16 = vp8_sixtap_predict16x16;
846 } else {
847 xd->subpixel_predict = vp8_bilinear_predict4x4;
848 xd->subpixel_predict8x4 = vp8_bilinear_predict8x4;
849 xd->subpixel_predict8x8 = vp8_bilinear_predict8x8;
850 xd->subpixel_predict16x16 = vp8_bilinear_predict16x16;
851 }
852
853 if (pbi->decoded_key_frame && pbi->ec_enabled && !pbi->ec_active) {
854 pbi->ec_active = 1;
855 }
856 }
857
858 xd->left_context = &pc->left_context;
859 xd->mode_info_context = pc->mi;
860 xd->frame_type = pc->frame_type;
861 xd->mode_info_context->mbmi.mode = DC_PRED;
862 xd->mode_info_stride = pc->mode_info_stride;
863 xd->corrupted = 0; /* init without corruption */
864
865 xd->fullpixel_mask = 0xffffffff;
866 if (pc->full_pixel) xd->fullpixel_mask = 0xfffffff8;
867 }
868
vp8_decode_frame(VP8D_COMP * pbi)869 int vp8_decode_frame(VP8D_COMP *pbi) {
870 vp8_reader *const bc = &pbi->mbc[8];
871 VP8_COMMON *const pc = &pbi->common;
872 MACROBLOCKD *const xd = &pbi->mb;
873 const unsigned char *data = pbi->fragments.ptrs[0];
874 const unsigned int data_sz = pbi->fragments.sizes[0];
875 const unsigned char *data_end = data + data_sz;
876 ptrdiff_t first_partition_length_in_bytes;
877
878 int i, j, k, l;
879 const int *const mb_feature_data_bits = vp8_mb_feature_data_bits;
880 int corrupt_tokens = 0;
881 int prev_independent_partitions = pbi->independent_partitions;
882
883 YV12_BUFFER_CONFIG *yv12_fb_new = pbi->dec_fb_ref[INTRA_FRAME];
884
885 /* start with no corruption of current frame */
886 xd->corrupted = 0;
887 yv12_fb_new->corrupted = 0;
888
889 if (data_end - data < 3) {
890 if (!pbi->ec_active) {
891 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
892 "Truncated packet");
893 }
894
895 /* Declare the missing frame as an inter frame since it will
896 be handled as an inter frame when we have estimated its
897 motion vectors. */
898 pc->frame_type = INTER_FRAME;
899 pc->version = 0;
900 pc->show_frame = 1;
901 first_partition_length_in_bytes = 0;
902 } else {
903 unsigned char clear_buffer[10];
904 const unsigned char *clear = data;
905 if (pbi->decrypt_cb) {
906 int n = (int)VPXMIN(sizeof(clear_buffer), data_sz);
907 pbi->decrypt_cb(pbi->decrypt_state, data, clear_buffer, n);
908 clear = clear_buffer;
909 }
910
911 pc->frame_type = (FRAME_TYPE)(clear[0] & 1);
912 pc->version = (clear[0] >> 1) & 7;
913 pc->show_frame = (clear[0] >> 4) & 1;
914 first_partition_length_in_bytes =
915 (clear[0] | (clear[1] << 8) | (clear[2] << 16)) >> 5;
916
917 if (!pbi->ec_active && (data + first_partition_length_in_bytes > data_end ||
918 data + first_partition_length_in_bytes < data)) {
919 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
920 "Truncated packet or corrupt partition 0 length");
921 }
922
923 data += 3;
924 clear += 3;
925
926 vp8_setup_version(pc);
927
928 if (pc->frame_type == KEY_FRAME) {
929 /* vet via sync code */
930 /* When error concealment is enabled we should only check the sync
931 * code if we have enough bits available
932 */
933 if (data + 3 < data_end) {
934 if (clear[0] != 0x9d || clear[1] != 0x01 || clear[2] != 0x2a) {
935 vpx_internal_error(&pc->error, VPX_CODEC_UNSUP_BITSTREAM,
936 "Invalid frame sync code");
937 }
938 }
939
940 /* If error concealment is enabled we should only parse the new size
941 * if we have enough data. Otherwise we will end up with the wrong
942 * size.
943 */
944 if (data + 6 < data_end) {
945 pc->Width = (clear[3] | (clear[4] << 8)) & 0x3fff;
946 pc->horiz_scale = clear[4] >> 6;
947 pc->Height = (clear[5] | (clear[6] << 8)) & 0x3fff;
948 pc->vert_scale = clear[6] >> 6;
949 data += 7;
950 } else if (!pbi->ec_active) {
951 vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
952 "Truncated key frame header");
953 } else {
954 /* Error concealment is active, clear the frame. */
955 data = data_end;
956 }
957 } else {
958 memcpy(&xd->pre, yv12_fb_new, sizeof(YV12_BUFFER_CONFIG));
959 memcpy(&xd->dst, yv12_fb_new, sizeof(YV12_BUFFER_CONFIG));
960 }
961 }
962 if ((!pbi->decoded_key_frame && pc->frame_type != KEY_FRAME)) {
963 return -1;
964 }
965
966 init_frame(pbi);
967
968 if (vp8dx_start_decode(bc, data, (unsigned int)(data_end - data),
969 pbi->decrypt_cb, pbi->decrypt_state)) {
970 vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
971 "Failed to allocate bool decoder 0");
972 }
973 if (pc->frame_type == KEY_FRAME) {
974 (void)vp8_read_bit(bc); // colorspace
975 pc->clamp_type = (CLAMP_TYPE)vp8_read_bit(bc);
976 }
977
978 /* Is segmentation enabled */
979 xd->segmentation_enabled = (unsigned char)vp8_read_bit(bc);
980
981 if (xd->segmentation_enabled) {
982 /* Signal whether or not the segmentation map is being explicitly updated
983 * this frame. */
984 xd->update_mb_segmentation_map = (unsigned char)vp8_read_bit(bc);
985 xd->update_mb_segmentation_data = (unsigned char)vp8_read_bit(bc);
986
987 if (xd->update_mb_segmentation_data) {
988 xd->mb_segement_abs_delta = (unsigned char)vp8_read_bit(bc);
989
990 memset(xd->segment_feature_data, 0, sizeof(xd->segment_feature_data));
991
992 /* For each segmentation feature (Quant and loop filter level) */
993 for (i = 0; i < MB_LVL_MAX; ++i) {
994 for (j = 0; j < MAX_MB_SEGMENTS; ++j) {
995 /* Frame level data */
996 if (vp8_read_bit(bc)) {
997 xd->segment_feature_data[i][j] =
998 (signed char)vp8_read_literal(bc, mb_feature_data_bits[i]);
999
1000 if (vp8_read_bit(bc)) {
1001 xd->segment_feature_data[i][j] = -xd->segment_feature_data[i][j];
1002 }
1003 } else {
1004 xd->segment_feature_data[i][j] = 0;
1005 }
1006 }
1007 }
1008 }
1009
1010 if (xd->update_mb_segmentation_map) {
1011 /* Which macro block level features are enabled */
1012 memset(xd->mb_segment_tree_probs, 255, sizeof(xd->mb_segment_tree_probs));
1013
1014 /* Read the probs used to decode the segment id for each macro block. */
1015 for (i = 0; i < MB_FEATURE_TREE_PROBS; ++i) {
1016 /* If not explicitly set value is defaulted to 255 by memset above */
1017 if (vp8_read_bit(bc)) {
1018 xd->mb_segment_tree_probs[i] = (vp8_prob)vp8_read_literal(bc, 8);
1019 }
1020 }
1021 }
1022 } else {
1023 /* No segmentation updates on this frame */
1024 xd->update_mb_segmentation_map = 0;
1025 xd->update_mb_segmentation_data = 0;
1026 }
1027
1028 /* Read the loop filter level and type */
1029 pc->filter_type = (LOOPFILTERTYPE)vp8_read_bit(bc);
1030 pc->filter_level = vp8_read_literal(bc, 6);
1031 pc->sharpness_level = vp8_read_literal(bc, 3);
1032
1033 /* Read in loop filter deltas applied at the MB level based on mode or ref
1034 * frame. */
1035 xd->mode_ref_lf_delta_update = 0;
1036 xd->mode_ref_lf_delta_enabled = (unsigned char)vp8_read_bit(bc);
1037
1038 if (xd->mode_ref_lf_delta_enabled) {
1039 /* Do the deltas need to be updated */
1040 xd->mode_ref_lf_delta_update = (unsigned char)vp8_read_bit(bc);
1041
1042 if (xd->mode_ref_lf_delta_update) {
1043 /* Send update */
1044 for (i = 0; i < MAX_REF_LF_DELTAS; ++i) {
1045 if (vp8_read_bit(bc)) {
1046 /*sign = vp8_read_bit( bc );*/
1047 xd->ref_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
1048
1049 if (vp8_read_bit(bc)) { /* Apply sign */
1050 xd->ref_lf_deltas[i] = xd->ref_lf_deltas[i] * -1;
1051 }
1052 }
1053 }
1054
1055 /* Send update */
1056 for (i = 0; i < MAX_MODE_LF_DELTAS; ++i) {
1057 if (vp8_read_bit(bc)) {
1058 /*sign = vp8_read_bit( bc );*/
1059 xd->mode_lf_deltas[i] = (signed char)vp8_read_literal(bc, 6);
1060
1061 if (vp8_read_bit(bc)) { /* Apply sign */
1062 xd->mode_lf_deltas[i] = xd->mode_lf_deltas[i] * -1;
1063 }
1064 }
1065 }
1066 }
1067 }
1068
1069 setup_token_decoder(pbi, data + first_partition_length_in_bytes);
1070
1071 xd->current_bc = &pbi->mbc[0];
1072
1073 /* Read the default quantizers. */
1074 {
1075 int Q, q_update;
1076
1077 Q = vp8_read_literal(bc, 7); /* AC 1st order Q = default */
1078 pc->base_qindex = Q;
1079 q_update = 0;
1080 pc->y1dc_delta_q = get_delta_q(bc, pc->y1dc_delta_q, &q_update);
1081 pc->y2dc_delta_q = get_delta_q(bc, pc->y2dc_delta_q, &q_update);
1082 pc->y2ac_delta_q = get_delta_q(bc, pc->y2ac_delta_q, &q_update);
1083 pc->uvdc_delta_q = get_delta_q(bc, pc->uvdc_delta_q, &q_update);
1084 pc->uvac_delta_q = get_delta_q(bc, pc->uvac_delta_q, &q_update);
1085
1086 if (q_update) vp8cx_init_de_quantizer(pbi);
1087
1088 /* MB level dequantizer setup */
1089 vp8_mb_init_dequantizer(pbi, &pbi->mb);
1090 }
1091
1092 /* Determine if the golden frame or ARF buffer should be updated and how.
1093 * For all non key frames the GF and ARF refresh flags and sign bias
1094 * flags must be set explicitly.
1095 */
1096 if (pc->frame_type != KEY_FRAME) {
1097 /* Should the GF or ARF be updated from the current frame */
1098 pc->refresh_golden_frame = vp8_read_bit(bc);
1099 #if CONFIG_ERROR_CONCEALMENT
1100 /* Assume we shouldn't refresh golden if the bit is missing */
1101 xd->corrupted |= vp8dx_bool_error(bc);
1102 if (pbi->ec_active && xd->corrupted) pc->refresh_golden_frame = 0;
1103 #endif
1104
1105 pc->refresh_alt_ref_frame = vp8_read_bit(bc);
1106 #if CONFIG_ERROR_CONCEALMENT
1107 /* Assume we shouldn't refresh altref if the bit is missing */
1108 xd->corrupted |= vp8dx_bool_error(bc);
1109 if (pbi->ec_active && xd->corrupted) pc->refresh_alt_ref_frame = 0;
1110 #endif
1111
1112 /* Buffer to buffer copy flags. */
1113 pc->copy_buffer_to_gf = 0;
1114
1115 if (!pc->refresh_golden_frame) {
1116 pc->copy_buffer_to_gf = vp8_read_literal(bc, 2);
1117 }
1118
1119 #if CONFIG_ERROR_CONCEALMENT
1120 /* Assume we shouldn't copy to the golden if the bit is missing */
1121 xd->corrupted |= vp8dx_bool_error(bc);
1122 if (pbi->ec_active && xd->corrupted) pc->copy_buffer_to_gf = 0;
1123 #endif
1124
1125 pc->copy_buffer_to_arf = 0;
1126
1127 if (!pc->refresh_alt_ref_frame) {
1128 pc->copy_buffer_to_arf = vp8_read_literal(bc, 2);
1129 }
1130
1131 #if CONFIG_ERROR_CONCEALMENT
1132 /* Assume we shouldn't copy to the alt-ref if the bit is missing */
1133 xd->corrupted |= vp8dx_bool_error(bc);
1134 if (pbi->ec_active && xd->corrupted) pc->copy_buffer_to_arf = 0;
1135 #endif
1136
1137 pc->ref_frame_sign_bias[GOLDEN_FRAME] = vp8_read_bit(bc);
1138 pc->ref_frame_sign_bias[ALTREF_FRAME] = vp8_read_bit(bc);
1139 }
1140
1141 pc->refresh_entropy_probs = vp8_read_bit(bc);
1142 #if CONFIG_ERROR_CONCEALMENT
1143 /* Assume we shouldn't refresh the probabilities if the bit is
1144 * missing */
1145 xd->corrupted |= vp8dx_bool_error(bc);
1146 if (pbi->ec_active && xd->corrupted) pc->refresh_entropy_probs = 0;
1147 #endif
1148 if (pc->refresh_entropy_probs == 0) {
1149 memcpy(&pc->lfc, &pc->fc, sizeof(pc->fc));
1150 }
1151
1152 pc->refresh_last_frame = pc->frame_type == KEY_FRAME || vp8_read_bit(bc);
1153
1154 #if CONFIG_ERROR_CONCEALMENT
1155 /* Assume we should refresh the last frame if the bit is missing */
1156 xd->corrupted |= vp8dx_bool_error(bc);
1157 if (pbi->ec_active && xd->corrupted) pc->refresh_last_frame = 1;
1158 #endif
1159
1160 if (0) {
1161 FILE *z = fopen("decodestats.stt", "a");
1162 fprintf(z, "%6d F:%d,G:%d,A:%d,L:%d,Q:%d\n", pc->current_video_frame,
1163 pc->frame_type, pc->refresh_golden_frame, pc->refresh_alt_ref_frame,
1164 pc->refresh_last_frame, pc->base_qindex);
1165 fclose(z);
1166 }
1167
1168 {
1169 pbi->independent_partitions = 1;
1170
1171 /* read coef probability tree */
1172 for (i = 0; i < BLOCK_TYPES; ++i) {
1173 for (j = 0; j < COEF_BANDS; ++j) {
1174 for (k = 0; k < PREV_COEF_CONTEXTS; ++k) {
1175 for (l = 0; l < ENTROPY_NODES; ++l) {
1176 vp8_prob *const p = pc->fc.coef_probs[i][j][k] + l;
1177
1178 if (vp8_read(bc, vp8_coef_update_probs[i][j][k][l])) {
1179 *p = (vp8_prob)vp8_read_literal(bc, 8);
1180 }
1181 if (k > 0 && *p != pc->fc.coef_probs[i][j][k - 1][l]) {
1182 pbi->independent_partitions = 0;
1183 }
1184 }
1185 }
1186 }
1187 }
1188 }
1189
1190 /* clear out the coeff buffer */
1191 memset(xd->qcoeff, 0, sizeof(xd->qcoeff));
1192
1193 vp8_decode_mode_mvs(pbi);
1194
1195 #if CONFIG_ERROR_CONCEALMENT
1196 if (pbi->ec_active &&
1197 pbi->mvs_corrupt_from_mb < (unsigned int)pc->mb_cols * pc->mb_rows) {
1198 /* Motion vectors are missing in this frame. We will try to estimate
1199 * them and then continue decoding the frame as usual */
1200 vp8_estimate_missing_mvs(pbi);
1201 }
1202 #endif
1203
1204 memset(pc->above_context, 0, sizeof(ENTROPY_CONTEXT_PLANES) * pc->mb_cols);
1205 pbi->frame_corrupt_residual = 0;
1206
1207 #if CONFIG_MULTITHREAD
1208 if (pbi->b_multithreaded_rd && pc->multi_token_partition != ONE_PARTITION) {
1209 unsigned int thread;
1210 vp8mt_decode_mb_rows(pbi, xd);
1211 vp8_yv12_extend_frame_borders(yv12_fb_new);
1212 for (thread = 0; thread < pbi->decoding_thread_count; ++thread) {
1213 corrupt_tokens |= pbi->mb_row_di[thread].mbd.corrupted;
1214 }
1215 } else
1216 #endif
1217 {
1218 decode_mb_rows(pbi);
1219 corrupt_tokens |= xd->corrupted;
1220 }
1221
1222 /* Collect information about decoder corruption. */
1223 /* 1. Check first boolean decoder for errors. */
1224 yv12_fb_new->corrupted = vp8dx_bool_error(bc);
1225 /* 2. Check the macroblock information */
1226 yv12_fb_new->corrupted |= corrupt_tokens;
1227
1228 if (!pbi->decoded_key_frame) {
1229 if (pc->frame_type == KEY_FRAME && !yv12_fb_new->corrupted) {
1230 pbi->decoded_key_frame = 1;
1231 } else {
1232 vpx_internal_error(&pbi->common.error, VPX_CODEC_CORRUPT_FRAME,
1233 "A stream must start with a complete key frame");
1234 }
1235 }
1236
1237 /* vpx_log("Decoder: Frame Decoded, Size Roughly:%d bytes
1238 * \n",bc->pos+pbi->bc2.pos); */
1239
1240 if (pc->refresh_entropy_probs == 0) {
1241 memcpy(&pc->fc, &pc->lfc, sizeof(pc->fc));
1242 pbi->independent_partitions = prev_independent_partitions;
1243 }
1244
1245 #ifdef PACKET_TESTING
1246 {
1247 FILE *f = fopen("decompressor.VP8", "ab");
1248 unsigned int size = pbi->bc2.pos + pbi->bc.pos + 8;
1249 fwrite((void *)&size, 4, 1, f);
1250 fwrite((void *)pbi->Source, size, 1, f);
1251 fclose(f);
1252 }
1253 #endif
1254
1255 return 0;
1256 }
1257