1 /*
2 INTEL CONFIDENTIAL
3 Copyright 2009 Intel Corporation All Rights Reserved.
4 The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
5
6 No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
7 */
8
9
10 #include <glib.h>
11 #include <dlfcn.h>
12
13 #include <string.h>
14 #include "vbp_loader.h"
15 #include "vbp_utils.h"
16 #include "vbp_mp42_parser.h"
17 #include "../codecs/mp4/parser/viddec_mp4_parse.h"
18
19 #define MIX_VBP_COMP "mixvbp"
20
21 /*
22 * Some divX avi files contains 2 frames in one gstbuffer.
23 */
24 #define MAX_NUM_PICTURES_MP42 8
25
26 uint32 vbp_get_sc_pos_mp42(uint8 *buf, uint32 length,
27 uint32* sc_phase, uint32 *sc_end_pos, uint8 *is_normal_sc);
28
29 void vbp_on_vop_mp42(vbp_context *pcontext, int list_index);
30 void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index);
31 void vbp_dump_query_data(vbp_context *pcontext, int list_index);
32
33 uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index);
34 uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index);
35
36 /* This is coppied from DHG mp42 parser */
37 static inline mp4_Status_t
38 vbp_sprite_trajectory_mp42(void *parent, mp4_VideoObjectLayer_t *vidObjLay,
39 mp4_VideoObjectPlane_t *vidObjPlane);
40
41 /* This is coppied from DHG mp42 parser */
42 static inline int32_t vbp_sprite_dmv_length_mp42(void * parent,
43 int32_t *dmv_length);
44
45 /**
46 *
47 */
vbp_init_parser_entries_mp42(vbp_context * pcontext)48 uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext)
49 {
50 if (NULL == pcontext->parser_ops)
51 {
52 /* absolutely impossible, just sanity check */
53 return VBP_PARM;
54 }
55 pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init");
56 if (pcontext->parser_ops->init == NULL)
57 {
58 ETRACE ("Failed to set entry point." );
59 return VBP_LOAD;
60 }
61
62 pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4");
63 if (pcontext->parser_ops->parse_sc == NULL)
64 {
65 ETRACE ("Failed to set entry point." );
66 return VBP_LOAD;
67 }
68
69 pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse");
70 if (pcontext->parser_ops->parse_syntax == NULL)
71 {
72 ETRACE ("Failed to set entry point." );
73 return VBP_LOAD;
74 }
75
76 pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size");
77 if (pcontext->parser_ops->get_cxt_size == NULL)
78 {
79 ETRACE ("Failed to set entry point." );
80 return VBP_LOAD;
81 }
82
83 pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done");
84 if (pcontext->parser_ops->is_wkld_done == NULL)
85 {
86 ETRACE ("Failed to set entry point." );
87 return VBP_LOAD;
88 }
89
90 return VBP_OK;
91 }
92
93
94 /*
95 * For the codec_data passed by gstreamer
96 */
vbp_parse_init_data_mp42(vbp_context * pcontext)97 uint32 vbp_parse_init_data_mp42(vbp_context *pcontext)
98 {
99 VTRACE ("begin\n");
100 vbp_parse_start_code_mp42(pcontext);
101 VTRACE ("end\n");
102
103 return VBP_OK;
104 }
105
vbp_process_parsing_result_mp42(vbp_context * pcontext,int list_index)106 uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index)
107 {
108 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
109 viddec_mp4_parser_t *parser =
110 (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
111
112 uint8 is_svh = 0;
113 uint32 current_sc = parser->current_sc;
114 is_svh = parser->cur_sc_prefix ? false : true;
115
116 VTRACE ("begin\n");
117
118 VTRACE ("current_sc = 0x%x profile_and_level_indication = 0x%x\n",
119 parser->current_sc, parser->info.profile_and_level_indication);
120
121 if (!is_svh)
122 {
123 /* remove prefix from current_sc */
124 current_sc &= 0x0FF;
125 switch (current_sc)
126 {
127 case MP4_SC_VISUAL_OBJECT_SEQUENCE:
128 VTRACE ("MP4_SC_VISUAL_OBJECT_SEQUENCE\n");
129
130 query_data->codec_data.profile_and_level_indication
131 = parser->info.profile_and_level_indication;
132
133 break;
134 case MP4_SC_VIDEO_OBJECT_PLANE:
135 VTRACE ("MP4_SC_VIDEO_OBJECT_PLANE\n");
136 vbp_on_vop_mp42(pcontext, list_index);
137 break;
138 default: {
139 if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (current_sc
140 <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) {
141 query_data->codec_data.profile_and_level_indication
142 = parser->info.profile_and_level_indication;
143 } else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX) {
144 if (parser->sc_seen == MP4_SC_SEEN_SVH) {
145 VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
146 vbp_on_vop_svh_mp42(pcontext, list_index);
147 }
148 }
149 }
150 break;
151 }
152
153 } else {
154 if (parser->sc_seen == MP4_SC_SEEN_SVH) {
155 VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
156 vbp_on_vop_svh_mp42(pcontext, list_index);
157 }
158 }
159
160 VTRACE ("End\n");
161
162 return VBP_OK;
163 }
164
165 /*
166 * This function fills viddec_pm_cxt_t by start codes
167 * I may change the codes to make it more efficient later
168 */
169
vbp_parse_start_code_mp42(vbp_context * pcontext)170 uint32 vbp_parse_start_code_mp42(vbp_context *pcontext)
171 {
172 viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
173 /*viddec_parser_ops_t *func = pcontext->parser_ops; */
174 uint8 *buf = NULL;
175 uint32 size = 0;
176 uint32 sc_phase = 0;
177 uint32 sc_end_pos = -1;
178
179 uint32 bytes_parsed = 0;
180
181 viddec_mp4_parser_t *pinfo = NULL;
182
183 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
184 /* reset query data for the new sample buffer */
185 query_data->number_pictures = 0;
186
187 /* emulation prevention byte is always present */
188 cxt->getbits.is_emul_reqd = 1;
189
190 cxt->list.num_items = 0;
191 cxt->list.data[0].stpos = 0;
192 cxt->list.data[0].edpos = cxt->parse_cubby.size;
193
194 buf = cxt->parse_cubby.buf;
195 size = cxt->parse_cubby.size;
196
197 pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]);
198
199 uint8 is_normal_sc = 0;
200
201 uint32 found_sc = 0;
202
203 VTRACE ("begin cxt->parse_cubby.size= %d\n", size);
204
205 while (1) {
206
207 sc_phase = 0;
208
209 found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size
210 - bytes_parsed, &sc_phase, &sc_end_pos, &is_normal_sc);
211
212 if (found_sc) {
213
214 VTRACE ("sc_end_pos = %d\n", sc_end_pos);
215
216 cxt->list.data[cxt->list.num_items].stpos = bytes_parsed
217 + sc_end_pos - 3;
218 if (cxt->list.num_items != 0) {
219 cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed
220 + sc_end_pos - 3;
221 }
222 bytes_parsed += sc_end_pos;
223
224 cxt->list.num_items++;
225 pinfo->cur_sc_prefix = is_normal_sc;
226
227 } else {
228
229 if (cxt->list.num_items != 0) {
230 cxt->list.data[cxt->list.num_items - 1].edpos
231 = cxt->parse_cubby.size;
232 break;
233 } else {
234
235 VTRACE ("I didn't find any sc in cubby buffer! The size of cubby is %d\n",
236 size);
237
238 cxt->list.num_items = 1;
239 cxt->list.data[0].stpos = 0;
240 cxt->list.data[0].edpos = cxt->parse_cubby.size;
241 break;
242 }
243 }
244 }
245
246 return VBP_OK;
247 }
248
vbp_populate_query_data_mp42(vbp_context * pcontext)249 uint32 vbp_populate_query_data_mp42(vbp_context *pcontext)
250 {
251 #if 0
252 vbp_dump_query_data(pcontext);
253 #endif
254 return VBP_OK;
255 }
256
vbp_fill_codec_data(vbp_context * pcontext,int list_index)257 void vbp_fill_codec_data(vbp_context *pcontext, int list_index)
258 {
259
260 /* fill vbp_codec_data_mp42 data */
261 viddec_mp4_parser_t *parser =
262 (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
263 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
264 query_data->codec_data.profile_and_level_indication
265 = parser->info.profile_and_level_indication;
266 }
267
vbp_fill_slice_data(vbp_context * pcontext,int list_index)268 void vbp_fill_slice_data(vbp_context *pcontext, int list_index)
269 {
270
271 viddec_mp4_parser_t *parser =
272 (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
273
274 if (!parser->info.VisualObject.VideoObject.short_video_header) {
275 vbp_process_slices_mp42(pcontext, list_index);
276 } else {
277 vbp_process_slices_svh_mp42(pcontext, list_index);
278 }
279 }
280
vbp_fill_picture_param(vbp_context * pcontext,int list_index)281 void vbp_fill_picture_param(vbp_context *pcontext, int list_index)
282 {
283
284 viddec_mp4_parser_t *parser =
285 (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
286 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
287
288 vbp_picture_data_mp42 *picture_data = NULL;
289 VAPictureParameterBufferMPEG4 *picture_param = NULL;
290
291 picture_data = &(query_data->picture_data[query_data->number_pictures]);
292
293 picture_param = &(picture_data->picture_param);
294
295 uint8 idx = 0;
296
297 picture_data->vop_coded
298 = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded;
299 VTRACE ("vop_coded = %d\n", picture_data->vop_coded);
300
301 /*
302 * fill picture_param
303 */
304
305 /* NOTE: for short video header, the parser saves vop_width and vop_height
306 * to VOL->video_object_layer_width and VOL->video_object_layer_height
307 */
308 picture_param->vop_width
309 = parser->info.VisualObject.VideoObject.video_object_layer_width;
310 picture_param->vop_height
311 = parser->info.VisualObject.VideoObject.video_object_layer_height;
312
313 picture_param->forward_reference_picture = VA_INVALID_SURFACE;
314 picture_param->backward_reference_picture = VA_INVALID_SURFACE;
315
316 /*
317 * VAPictureParameterBufferMPEG4::vol_fields
318 */
319 picture_param->vol_fields.bits.short_video_header
320 = parser->info.VisualObject.VideoObject.short_video_header;
321 picture_param->vol_fields.bits.chroma_format
322 = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format;
323
324 /* TODO: find out why testsuite always set this value to be 0 */
325 // picture_param->vol_fields.bits.chroma_format = 0;
326
327 picture_param->vol_fields.bits.interlaced
328 = parser->info.VisualObject.VideoObject.interlaced;
329 picture_param->vol_fields.bits.obmc_disable
330 = parser->info.VisualObject.VideoObject.obmc_disable;
331 picture_param->vol_fields.bits.sprite_enable
332 = parser->info.VisualObject.VideoObject.sprite_enable;
333 picture_param->vol_fields.bits.sprite_warping_accuracy
334 = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy;
335 picture_param->vol_fields.bits.quant_type
336 = parser->info.VisualObject.VideoObject.quant_type;
337 picture_param->vol_fields.bits.quarter_sample
338 = parser->info.VisualObject.VideoObject.quarter_sample;
339 picture_param->vol_fields.bits.data_partitioned
340 = parser->info.VisualObject.VideoObject.data_partitioned;
341 picture_param->vol_fields.bits.reversible_vlc
342 = parser->info.VisualObject.VideoObject.reversible_vlc;
343 picture_param->vol_fields.bits.resync_marker_disable
344 = parser->info.VisualObject.VideoObject.resync_marker_disable;
345
346 picture_param->no_of_sprite_warping_points
347 = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points;
348
349 for (idx = 0; idx < 3; idx++) {
350 picture_param->sprite_trajectory_du[idx]
351 = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx];
352 picture_param->sprite_trajectory_dv[idx]
353 = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx];
354 }
355
356 picture_param->quant_precision
357 = parser->info.VisualObject.VideoObject.quant_precision;
358
359 /*
360 * VAPictureParameterBufferMPEG4::vop_fields
361 */
362
363 if (!parser->info.VisualObject.VideoObject.short_video_header) {
364 picture_param->vop_fields.bits.vop_coding_type
365 = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type;
366 } else {
367 picture_param->vop_fields.bits.vop_coding_type
368 = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type;
369 }
370
371 /* TODO:
372 * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type
373 * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7
374 */
375
376 if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) {
377 picture_param->vop_fields.bits.backward_reference_vop_coding_type
378 = picture_param->vop_fields.bits.vop_coding_type;
379 }
380
381 picture_param->vop_fields.bits.vop_rounding_type
382 = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type;
383 picture_param->vop_fields.bits.intra_dc_vlc_thr
384 = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr;
385 picture_param->vop_fields.bits.top_field_first
386 = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first;
387 picture_param->vop_fields.bits.alternate_vertical_scan_flag
388 = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag;
389
390 picture_param->vop_fcode_forward
391 = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward;
392 picture_param->vop_fcode_backward
393 = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward;
394 picture_param->vop_time_increment_resolution
395 = parser->info.VisualObject.VideoObject.vop_time_increment_resolution;
396
397 /* short header related */
398 picture_param->num_gobs_in_vop
399 = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop;
400 picture_param->num_macroblocks_in_gob
401 = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob;
402
403 /* for direct mode prediction */
404 picture_param->TRB = parser->info.VisualObject.VideoObject.TRB;
405 picture_param->TRD = parser->info.VisualObject.VideoObject.TRD;
406
407 #if 0
408 printf(
409 "parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable = %d\n",
410 parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable);
411
412 printf("parser->info.VisualObject.VideoObject.data_partitioned = %d\n",
413 parser->info.VisualObject.VideoObject.data_partitioned);
414
415 printf(
416 "####parser->info.VisualObject.VideoObject.resync_marker_disable = %d####\n",
417 parser->info.VisualObject.VideoObject.resync_marker_disable);
418 #endif
419 }
420
vbp_fill_iq_matrix_buffer(vbp_context * pcontext,int list_index)421 void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index)
422 {
423
424 viddec_mp4_parser_t *parser =
425 (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
426 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
427
428 mp4_VOLQuant_mat_t *quant_mat_info =
429 &(parser->info.VisualObject.VideoObject.quant_mat_info);
430
431 vbp_picture_data_mp42 *picture_data = NULL;
432 VAIQMatrixBufferMPEG4 *iq_matrix = NULL;
433
434 picture_data = &(query_data->picture_data[query_data->number_pictures]);
435 iq_matrix = &(picture_data->iq_matrix_buffer);
436
437 iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat;
438 iq_matrix->load_non_intra_quant_mat
439 = quant_mat_info->load_nonintra_quant_mat;
440 memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64);
441 memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat,
442 64);
443 }
444
vbp_on_vop_mp42(vbp_context * pcontext,int list_index)445 void vbp_on_vop_mp42(vbp_context *pcontext, int list_index)
446 {
447 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
448
449 vbp_fill_codec_data(pcontext, list_index);
450
451 vbp_fill_picture_param(pcontext, list_index);
452 vbp_fill_iq_matrix_buffer(pcontext, list_index);
453 vbp_fill_slice_data(pcontext, list_index);
454
455 query_data->number_pictures++;
456 }
457
vbp_on_vop_svh_mp42(vbp_context * pcontext,int list_index)458 void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index)
459 {
460 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
461
462 vbp_fill_codec_data(pcontext, list_index);
463
464 vbp_fill_picture_param(pcontext, list_index);
465 vbp_fill_iq_matrix_buffer(pcontext, list_index);
466 vbp_fill_slice_data(pcontext, list_index);
467
468 query_data->number_pictures++;
469 }
470
vbp_get_sc_pos_mp42(uint8 * buf,uint32 length,uint32 * sc_phase,uint32 * sc_end_pos,uint8 * is_normal_sc)471 uint32 vbp_get_sc_pos_mp42(
472 uint8 *buf,
473 uint32 length,
474 uint32* sc_phase,
475 uint32 *sc_end_pos,
476 uint8 *is_normal_sc)
477 {
478 uint8 *ptr = buf;
479 uint32 size;
480 uint32 data_left = 0, phase = 0, ret = 0;
481 size = 0;
482
483 data_left = length;
484 phase = *sc_phase;
485 *sc_end_pos = -1;
486
487 /* parse until there is more data and start code not found */
488 while ((data_left > 0) && (phase < 3)) {
489 /* Check if we are byte aligned & phase=0, if thats the case we can check
490 work at a time instead of byte*/
491 if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) {
492 while (data_left > 3) {
493 uint32 data;
494 char mask1 = 0, mask2 = 0;
495
496 data = *((uint32 *) ptr);
497 #ifndef MFDBIGENDIAN
498 data = SWAP_WORD(data);
499 #endif
500 mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
501 mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
502 /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
503 two consecutive zero bytes for a start code pattern */
504 if (mask1 && mask2) {/* Success so skip 4 bytes and start over */
505 ptr += 4;
506 size += 4;
507 data_left -= 4;
508 continue;
509 } else {
510 break;
511 }
512 }
513 }
514
515 /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
516 two zero bytes in the word so we look one byte at a time*/
517 if (data_left > 0) {
518 if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */
519 phase++;
520 ptr++;
521 size++;
522 data_left--;
523 if (phase > 2) {
524 phase = 2;
525
526 if ((((uint32) ptr) & 0x3) == 0) {
527 while (data_left > 3) {
528 if (*((uint32 *) ptr) != 0) {
529 break;
530 }
531 ptr += 4;
532 size += 4;
533 data_left -= 4;
534 }
535 }
536 }
537 } else {
538 uint8 normal_sc = 0, short_sc = 0;
539 if (phase == 2) {
540 normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
541 short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC));
542
543 VTRACE ("short_sc = %d\n", short_sc);
544
545 *is_normal_sc = normal_sc;
546 }
547
548 if (!(normal_sc | short_sc)) {
549 phase = 0;
550 } else {/* Match for start code so update context with byte position */
551 *sc_end_pos = size;
552 phase = 3;
553
554 if (normal_sc) {
555 } else {
556 /* For short start code since start code is in one nibble just return at this point */
557 phase += 1;
558 ret = 1;
559 break;
560 }
561 }
562 ptr++;
563 size++;
564 data_left--;
565 }
566 }
567 }
568 if ((data_left > 0) && (phase == 3)) {
569 (*sc_end_pos)++;
570 phase++;
571 ret = 1;
572 }
573 *sc_phase = phase;
574 /* Return SC found only if phase is 4, else always success */
575 return ret;
576 }
577
vbp_macroblock_number_length_mp42(uint32 numOfMbs)578 uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs)
579 {
580 uint32 length = 0;
581 numOfMbs--;
582 do {
583 numOfMbs >>= 1;
584 length++;
585 } while (numOfMbs);
586 return length;
587 }
588
vbp_video_packet_header_mp42(void * parent,viddec_mp4_parser_t * parser_cxt,uint16_t * quant_scale,uint32 * macroblock_number)589 mp4_Status_t vbp_video_packet_header_mp42(
590 void *parent,
591 viddec_mp4_parser_t *parser_cxt,
592 uint16_t *quant_scale,
593 uint32 *macroblock_number)
594 {
595
596 mp4_Status_t ret = MP4_STATUS_OK;
597 mp4_Info_t *pInfo = &(parser_cxt->info);
598 mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
599 mp4_VideoObjectPlane_t *vidObjPlane =
600 &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
601
602 uint32 code = 0;
603 int32_t getbits = 0;
604
605 uint16_t _quant_scale = 0;
606 uint32 _macroblock_number = 0;
607 uint32 header_extension_codes = 0;
608 uint8 vop_coding_type = vidObjPlane->vop_coding_type;
609
610 do {
611 if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) {
612 ret = MP4_STATUS_NOTSUPPORT;
613 break;
614 }
615
616 /* get macroblock_number */
617 {
618 uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
619 uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
620 uint32 length = vbp_macroblock_number_length_mp42(mbs_x
621 * mbs_y);
622
623 getbits = viddec_pm_get_bits(parent, &code, length);
624 BREAK_GETBITS_FAIL(getbits, ret);
625
626 length = code;
627 }
628
629 /* quant_scale */
630 if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) {
631 getbits = viddec_pm_get_bits(parent, &code,
632 vidObjLay->quant_precision);
633 BREAK_GETBITS_FAIL(getbits, ret);
634 _quant_scale = code;
635 }
636
637 /* header_extension_codes */
638 if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) {
639 getbits = viddec_pm_get_bits(parent, &code, 1);
640 BREAK_GETBITS_FAIL(getbits, ret);
641 header_extension_codes = code;
642 }
643
644 if (header_extension_codes) {
645 do {
646 getbits = viddec_pm_get_bits(parent, &code, 1);
647 BREAK_GETBITS_FAIL(getbits, ret);
648 } while (code);
649
650 /* marker_bit */
651 getbits = viddec_pm_get_bits(parent, &code, 1);
652 BREAK_GETBITS_FAIL(getbits, ret);
653
654 /* vop_time_increment */
655 {
656 uint32 numbits = 0;
657 numbits = vidObjLay->vop_time_increment_resolution_bits;
658 if (numbits == 0) {
659 numbits = 1;
660 }
661 getbits = viddec_pm_get_bits(parent, &code, numbits);
662 BREAK_GETBITS_FAIL(getbits, ret);
663 }
664 /* marker_bit */
665 getbits = viddec_pm_get_bits(parent, &code, 1);
666 BREAK_GETBITS_FAIL(getbits, ret);
667
668 /* vop_coding_type */
669 getbits = viddec_pm_get_bits(parent, &code, 2);
670 BREAK_GETBITS_FAIL(getbits, ret);
671
672 vop_coding_type = code & 0x3;
673
674 /* Fixed Klocwork issue: Code is unreachable.
675 * Comment the following codes because we have
676 * already checked video_object_layer_shape
677 */
678 /* if (vidObjLay->video_object_layer_shape
679 != MP4_SHAPE_TYPE_RECTANGULAR) {
680 ret = MP4_STATUS_NOTSUPPORT;
681 break;
682 }
683 */
684 if (vidObjLay->video_object_layer_shape
685 != MP4_SHAPE_TYPE_BINARYONLY) {
686 /* intra_dc_vlc_thr */
687 getbits = viddec_pm_get_bits(parent, &code, 3);
688 BREAK_GETBITS_FAIL(getbits, ret);
689 if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC)
690 && (vop_coding_type == MP4_VOP_TYPE_S)
691 && (vidObjLay->sprite_info.no_of_sprite_warping_points
692 > 0)) {
693 if (vbp_sprite_trajectory_mp42(parent, vidObjLay,
694 vidObjPlane) != MP4_STATUS_OK) {
695 break;
696 }
697 }
698
699 if (vidObjLay->reduced_resolution_vop_enable
700 && (vidObjLay->video_object_layer_shape
701 == MP4_SHAPE_TYPE_RECTANGULAR)
702 && ((vop_coding_type == MP4_VOP_TYPE_I)
703 || (vop_coding_type == MP4_VOP_TYPE_P))) {
704 /* vop_reduced_resolution */
705 getbits = viddec_pm_get_bits(parent, &code, 1);
706 BREAK_GETBITS_FAIL(getbits, ret);
707 }
708
709 if (vop_coding_type == MP4_VOP_TYPE_I) {
710 /* vop_fcode_forward */
711 getbits = viddec_pm_get_bits(parent, &code, 3);
712 BREAK_GETBITS_FAIL(getbits, ret);
713 }
714
715 if (vop_coding_type == MP4_VOP_TYPE_B) {
716 /* vop_fcode_backward */
717 getbits = viddec_pm_get_bits(parent, &code, 3);
718 BREAK_GETBITS_FAIL(getbits, ret);
719 }
720 }
721 }
722
723 if (vidObjLay->newpred_enable) {
724 /* New pred mode not supported in HW, but, does libva support this? */
725 ret = MP4_STATUS_NOTSUPPORT;
726 break;
727 }
728
729 *quant_scale = _quant_scale;
730 *macroblock_number = _macroblock_number;
731 } while (0);
732 return ret;
733 }
734
vbp_resync_marker_Length_mp42(viddec_mp4_parser_t * parser_cxt)735 uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt)
736 {
737
738 mp4_Info_t *pInfo = &(parser_cxt->info);
739 mp4_VideoObjectPlane_t *vidObjPlane =
740 &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
741
742 uint32 resync_marker_length = 0;
743 if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) {
744 resync_marker_length = 17;
745 } else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) {
746 uint8 fcode_max = vidObjPlane->vop_fcode_forward;
747 if (fcode_max < vidObjPlane->vop_fcode_backward) {
748 fcode_max = vidObjPlane->vop_fcode_backward;
749 }
750 resync_marker_length = 16 + fcode_max;
751 } else {
752 resync_marker_length = 16 + vidObjPlane->vop_fcode_forward;
753 }
754 return resync_marker_length;
755 }
756
vbp_process_slices_svh_mp42(vbp_context * pcontext,int list_index)757 uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index)
758 {
759 uint32 ret = MP4_STATUS_OK;
760
761 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
762 viddec_pm_cxt_t *parent = pcontext->parser_cxt;
763 viddec_mp4_parser_t *parser_cxt =
764 (viddec_mp4_parser_t *) &(parent->codec_data[0]);
765
766 VTRACE ("begin\n");
767
768 vbp_picture_data_mp42 *picture_data =
769 &(query_data->picture_data[query_data->number_pictures]);
770 vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data[0]);
771 VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param);
772
773 picture_data->number_slices = 1;
774
775 uint8 is_emul = 0;
776 uint32 bit_offset = 0;
777 uint32 byte_offset = 0;
778
779 /* The offsets are relative to parent->parse_cubby.buf */
780 viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
781
782 slice_data->buffer_addr = parent->parse_cubby.buf;
783
784 slice_data->slice_offset = byte_offset
785 + parent->list.data[list_index].stpos;
786 slice_data->slice_size = parent->list.data[list_index].edpos
787 - parent->list.data[list_index].stpos - byte_offset;
788
789 slice_param->slice_data_size = slice_data->slice_size;
790 slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
791 slice_param->slice_data_offset = 0;
792 slice_param->macroblock_offset = bit_offset;
793 slice_param->macroblock_number = 0;
794 slice_param->quant_scale
795 = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant;
796
797 VTRACE ("end\n");
798
799 return ret;
800 }
801
vbp_process_slices_mp42(vbp_context * pcontext,int list_index)802 mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index)
803 {
804
805 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
806 viddec_pm_cxt_t *parent = pcontext->parser_cxt;
807 viddec_mp4_parser_t *parser_cxt =
808 (viddec_mp4_parser_t *) &(parent->codec_data[0]);
809
810 vbp_picture_data_mp42 *picture_data = NULL;
811 vbp_slice_data_mp42 *slice_data = NULL;
812 VASliceParameterBufferMPEG4* slice_param = NULL;
813
814 uint32 ret = MP4_STATUS_OK;
815
816 uint8 is_emul = 0;
817 uint32 bit_offset = 0;
818 uint32 byte_offset = 0;
819
820 uint32 code = 0;
821 int32_t getbits = 0;
822 uint32 resync_marker_length = 0;
823
824 uint32 slice_index = 0;
825
826 #ifdef VBP_TRACE
827 uint32 list_size_at_index = parent->list.data[list_index].edpos
828 - parent->list.data[list_index].stpos;
829 #endif
830
831 VTRACE ("list_index = %d list_size_at_index = %d\n", list_index,
832 list_size_at_index);
833
834 VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index,
835 parent->list.data[list_index].edpos,
836 parent->list.data[list_index].stpos);
837
838 /* The offsets are relative to parent->parse_cubby.buf */
839 viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
840
841 #if 0
842 if (is_emul) {
843 g_print("*** emul != 0\n");
844 /*byte_offset += 1;*/
845 }
846 #endif
847
848 picture_data = &(query_data->picture_data[query_data->number_pictures]);
849 slice_data = &(picture_data->slice_data[slice_index]);
850 slice_param = &(slice_data->slice_param);
851
852 slice_data->buffer_addr = parent->parse_cubby.buf;
853
854 slice_data->slice_offset = byte_offset
855 + parent->list.data[list_index].stpos;
856 slice_data->slice_size = parent->list.data[list_index].edpos
857 - parent->list.data[list_index].stpos - byte_offset;
858
859 slice_param->slice_data_size = slice_data->slice_size;
860 slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
861 slice_param->slice_data_offset = 0;
862 slice_param->macroblock_offset = bit_offset;
863 slice_param->macroblock_number = 0;
864 slice_param->quant_scale
865 = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant;
866
867 slice_index++;
868 picture_data->number_slices = slice_index;
869
870 /*
871 * scan for resync_marker
872 */
873
874 if (!parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) {
875
876 viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
877 if (bit_offset) {
878 getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
879 if (getbits == -1) {
880 ret = MP4_STATUS_PARSE_ERROR;
881 return ret;
882 }
883 }
884
885 /*
886 * get resync_marker_length
887 */
888 resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
889
890 while (1) {
891
892 uint16_t quant_scale = 0;
893 uint32 macroblock_number = 0;
894
895 getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
896 BREAK_GETBITS_FAIL(getbits, ret);
897
898 if (code != 1) {
899 getbits = viddec_pm_get_bits(parent, &code, 8);
900 BREAK_GETBITS_FAIL(getbits, ret);
901 continue;
902 }
903
904 /*
905 * We found resync_marker
906 */
907
908 viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
909
910 slice_data->slice_size -= (parent->list.data[list_index].edpos
911 - parent->list.data[list_index].stpos - byte_offset);
912 slice_param->slice_data_size = slice_data->slice_size;
913
914 slice_data = &(picture_data->slice_data[slice_index]);
915 slice_param = &(slice_data->slice_param);
916
917 /*
918 * parse video_packet_header
919 */
920 getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
921 BREAK_GETBITS_FAIL(getbits, ret);
922
923 vbp_video_packet_header_mp42(parent, parser_cxt,
924 &quant_scale, ¯oblock_number);
925
926 viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
927
928 slice_data->buffer_addr = parent->parse_cubby.buf;
929
930 slice_data->slice_offset = byte_offset
931 + parent->list.data[list_index].stpos;
932 slice_data->slice_size = parent->list.data[list_index].edpos
933 - parent->list.data[list_index].stpos - byte_offset;
934
935 slice_param->slice_data_size = slice_data->slice_size;
936 slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
937 slice_param->slice_data_offset = 0;
938 slice_param->macroblock_offset = bit_offset;
939 slice_param->macroblock_number = macroblock_number;
940 slice_param->quant_scale = quant_scale;
941
942 slice_index++;
943
944 if (slice_index >= MAX_NUM_SLICES) {
945 ret = MP4_STATUS_PARSE_ERROR;
946 break;
947 }
948
949 picture_data->number_slices = slice_index;
950 }
951 }
952 return ret;
953 }
954
955 /* This is coppied from DHG MP42 parser */
vbp_sprite_dmv_length_mp42(void * parent,int32_t * dmv_length)956 static inline int32_t vbp_sprite_dmv_length_mp42(
957 void * parent,
958 int32_t *dmv_length)
959 {
960 uint32 code, skip;
961 int32_t getbits = 0;
962 mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
963 *dmv_length = 0;
964 skip = 3;
965 do {
966 getbits = viddec_pm_peek_bits(parent, &code, skip);
967 BREAK_GETBITS_FAIL(getbits, ret);
968
969 if (code == 7) {
970 viddec_pm_skip_bits(parent, skip);
971 getbits = viddec_pm_peek_bits(parent, &code, 9);
972 BREAK_GETBITS_FAIL(getbits, ret);
973
974 skip = 1;
975 while ((code & 256) != 0) {/* count number of 1 bits */
976 code <<= 1;
977 skip++;
978 }
979 *dmv_length = 5 + skip;
980 } else {
981 skip = (code <= 1) ? 2 : 3;
982 *dmv_length = code - 1;
983 }
984 viddec_pm_skip_bits(parent, skip);
985 ret = MP4_STATUS_OK;
986
987 } while (0);
988 return ret;
989 }
990
991 /* This is coppied from DHG MP42 parser */
vbp_sprite_trajectory_mp42(void * parent,mp4_VideoObjectLayer_t * vidObjLay,mp4_VideoObjectPlane_t * vidObjPlane)992 static inline mp4_Status_t vbp_sprite_trajectory_mp42(
993 void *parent,
994 mp4_VideoObjectLayer_t *vidObjLay,
995 mp4_VideoObjectPlane_t *vidObjPlane)
996 {
997 uint32 code, i;
998 int32_t dmv_length = 0, dmv_code = 0, getbits = 0;
999 mp4_Status_t ret = MP4_STATUS_OK;
1000 for (i = 0; i
1001 < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) {
1002 ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
1003 if (ret != MP4_STATUS_OK) {
1004 break;
1005 }
1006 if (dmv_length <= 0) {
1007 dmv_code = 0;
1008 } else {
1009 getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
1010 BREAK_GETBITS_FAIL(getbits, ret);
1011 dmv_code = (int32_t) code;
1012 if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
1013 dmv_code -= (1 << dmv_length) - 1;
1014 }
1015 }
1016 getbits = viddec_pm_get_bits(parent, &code, 1);
1017 BREAK_GETBITS_FAIL(getbits, ret);
1018 if (code != 1) {
1019 ret = MP4_STATUS_PARSE_ERROR;
1020 break;
1021 }
1022 vidObjPlane->warping_mv_code_du[i] = dmv_code;
1023 /* TODO: create another inline function to avoid code duplication */
1024 ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
1025 if (ret != MP4_STATUS_OK) {
1026 break;
1027 }
1028 if (dmv_length <= 0) {
1029 dmv_code = 0;
1030 } else {
1031 getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
1032 BREAK_GETBITS_FAIL(getbits, ret);
1033 dmv_code = (int32_t) code;
1034 if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
1035 dmv_code -= (1 << dmv_length) - 1;
1036 }
1037 }
1038 getbits = viddec_pm_get_bits(parent, &code, 1);
1039 BREAK_GETBITS_FAIL(getbits, ret);
1040 if (code != 1) {
1041 ret = MP4_STATUS_PARSE_ERROR;
1042 break;
1043 }
1044 vidObjPlane->warping_mv_code_dv[i] = dmv_code;
1045
1046 }
1047 return ret;
1048 }
1049
1050 /*
1051 * free memory of vbp_data_mp42 structure and its members
1052 */
vbp_free_query_data_mp42(vbp_context * pcontext)1053 uint32 vbp_free_query_data_mp42(vbp_context *pcontext)
1054 {
1055
1056 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
1057 gint idx = 0;
1058
1059 if (query_data) {
1060 if (query_data->picture_data) {
1061 for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
1062 g_free(query_data->picture_data[idx].slice_data);
1063 }
1064 g_free(query_data->picture_data);
1065 }
1066
1067 g_free(query_data);
1068 }
1069
1070 pcontext->query_data = NULL;
1071 return VBP_OK;
1072 }
1073
1074 /*
1075 * Allocate memory for vbp_data_mp42 structure and all its members.
1076 */
vbp_allocate_query_data_mp42(vbp_context * pcontext)1077 uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext)
1078 {
1079
1080 gint idx = 0;
1081 vbp_data_mp42 *query_data;
1082 pcontext->query_data = NULL;
1083
1084 query_data = g_try_new0(vbp_data_mp42, 1);
1085 if (query_data == NULL) {
1086 goto cleanup;
1087 }
1088
1089 query_data->picture_data = g_try_new0(vbp_picture_data_mp42,
1090 MAX_NUM_PICTURES_MP42);
1091 if (NULL == query_data->picture_data) {
1092 goto cleanup;
1093 }
1094
1095 for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
1096 query_data->picture_data[idx].number_slices = 0;
1097 query_data->picture_data[idx].slice_data = g_try_new0(
1098 vbp_slice_data_mp42, MAX_NUM_SLICES);
1099
1100 if (query_data->picture_data[idx].slice_data == NULL) {
1101 goto cleanup;
1102 }
1103 }
1104
1105 pcontext->query_data = (void *) query_data;
1106 return VBP_OK;
1107
1108 cleanup:
1109
1110 if (query_data) {
1111 if (query_data->picture_data) {
1112 for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
1113 g_free(query_data->picture_data[idx].slice_data);
1114 }
1115 g_free(query_data->picture_data);
1116 }
1117
1118 g_free(query_data);
1119 }
1120
1121 return VBP_MEM;
1122 }
1123
vbp_dump_query_data(vbp_context * pcontext,int list_index)1124 void vbp_dump_query_data(vbp_context *pcontext, int list_index)
1125 {
1126 vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
1127
1128 vbp_picture_data_mp42 *picture_data = NULL;
1129 VAPictureParameterBufferMPEG4 *picture_param = NULL;
1130 vbp_slice_data_mp42 *slice_data = NULL;
1131
1132 uint32 idx = 0, jdx = 0;
1133
1134 for (idx = 0; idx < query_data->number_pictures; idx++) {
1135
1136 picture_data = &(query_data->picture_data[idx]);
1137 picture_param = &(picture_data->picture_param);
1138 slice_data = &(picture_data->slice_data[0]);
1139
1140 g_print("======================= dump_begin ======================\n\n");
1141 g_print("======================= codec_data ======================\n");
1142
1143 /* codec_data */
1144 g_print("codec_data.profile_and_level_indication = 0x%x\n",
1145 query_data->codec_data.profile_and_level_indication);
1146
1147 g_print("==================== picture_param =======================\n");
1148
1149 /* picture_param */
1150 g_print("picture_param->vop_width = %d\n", picture_param->vop_width);
1151 g_print("picture_param->vop_height = %d\n", picture_param->vop_height);
1152
1153 g_print("picture_param->vol_fields.bits.short_video_header = %d\n",
1154 picture_param->vol_fields.bits.short_video_header);
1155 g_print("picture_param->vol_fields.bits.chroma_format = %d\n",
1156 picture_param->vol_fields.bits.chroma_format);
1157 g_print("picture_param->vol_fields.bits.interlaced = %d\n",
1158 picture_param->vol_fields.bits.interlaced);
1159 g_print("picture_param->vol_fields.bits.obmc_disable = %d\n",
1160 picture_param->vol_fields.bits.obmc_disable);
1161 g_print("picture_param->vol_fields.bits.sprite_enable = %d\n",
1162 picture_param->vol_fields.bits.sprite_enable);
1163 g_print(
1164 "picture_param->vol_fields.bits.sprite_warping_accuracy = %d\n",
1165 picture_param->vol_fields.bits.sprite_warping_accuracy);
1166 g_print("picture_param->vol_fields.bits.quant_type = %d\n",
1167 picture_param->vol_fields.bits.quant_type);
1168 g_print("picture_param->vol_fields.bits.quarter_sample = %d\n",
1169 picture_param->vol_fields.bits.quarter_sample);
1170 g_print("picture_param->vol_fields.bits.data_partitioned = %d\n",
1171 picture_param->vol_fields.bits.data_partitioned);
1172 g_print("picture_param->vol_fields.bits.reversible_vlc = %d\n",
1173 picture_param->vol_fields.bits.reversible_vlc);
1174
1175 g_print("picture_param->no_of_sprite_warping_points = %d\n",
1176 picture_param->no_of_sprite_warping_points);
1177 g_print("picture_param->quant_precision = %d\n",
1178 picture_param->quant_precision);
1179 g_print("picture_param->sprite_trajectory_du = %d, %d, %d\n",
1180 picture_param->sprite_trajectory_du[0],
1181 picture_param->sprite_trajectory_du[1],
1182 picture_param->sprite_trajectory_du[2]);
1183 g_print("picture_param->sprite_trajectory_dv = %d, %d, %d\n",
1184 picture_param->sprite_trajectory_dv[0],
1185 picture_param->sprite_trajectory_dv[1],
1186 picture_param->sprite_trajectory_dv[2]);
1187
1188 g_print("picture_param->vop_fields.bits.vop_coding_type = %d\n",
1189 picture_param->vop_fields.bits.vop_coding_type);
1190 g_print(
1191 "picture_param->vop_fields.bits.backward_reference_vop_coding_type = %d\n",
1192 picture_param->vop_fields.bits.backward_reference_vop_coding_type);
1193 g_print("picture_param->vop_fields.bits.vop_rounding_type = %d\n",
1194 picture_param->vop_fields.bits.vop_rounding_type);
1195 g_print("picture_param->vop_fields.bits.intra_dc_vlc_thr = %d\n",
1196 picture_param->vop_fields.bits.intra_dc_vlc_thr);
1197 g_print("picture_param->vop_fields.bits.top_field_first = %d\n",
1198 picture_param->vop_fields.bits.top_field_first);
1199 g_print(
1200 "picture_param->vop_fields.bits.alternate_vertical_scan_flag = %d\n",
1201 picture_param->vop_fields.bits.alternate_vertical_scan_flag);
1202
1203 g_print("picture_param->vop_fcode_forward = %d\n",
1204 picture_param->vop_fcode_forward);
1205 g_print("picture_param->vop_fcode_backward = %d\n",
1206 picture_param->vop_fcode_backward);
1207 g_print("picture_param->num_gobs_in_vop = %d\n",
1208 picture_param->num_gobs_in_vop);
1209 g_print("picture_param->num_macroblocks_in_gob = %d\n",
1210 picture_param->num_macroblocks_in_gob);
1211 g_print("picture_param->TRB = %d\n", picture_param->TRB);
1212 g_print("picture_param->TRD = %d\n", picture_param->TRD);
1213
1214 g_print("==================== slice_data ==========================\n");
1215
1216 g_print("slice_data.buffer_addr = 0x%x\n",
1217 (unsigned int) slice_data->buffer_addr);
1218 g_print("slice_data.slice_offset = 0x%x\n", slice_data->slice_offset);
1219 g_print("slice_data.slice_size = 0x%x\n", slice_data->slice_size);
1220
1221 g_print("slice_data.slice_param.macroblock_number = %d\n",
1222 slice_data->slice_param.macroblock_number);
1223 g_print("slice_data.slice_param.macroblock_offset = 0x%x\n",
1224 slice_data->slice_param.macroblock_offset);
1225 g_print("slice_data.slice_param.quant_scale = %d\n",
1226 slice_data->slice_param.quant_scale);
1227 g_print("slice_data.slice_param.slice_data_flag = %d\n",
1228 slice_data->slice_param.slice_data_flag);
1229 g_print("slice_data.slice_param.slice_data_offset = %d\n",
1230 slice_data->slice_param.slice_data_offset);
1231 g_print("slice_data.slice_param.slice_data_size = %d\n",
1232 slice_data->slice_param.slice_data_size);
1233
1234 g_print("================= iq_matrix_buffer ======================\n");
1235 g_print("iq_matrix_buffer.load_intra_quant_mat = %d\n",
1236 picture_data->iq_matrix_buffer.load_intra_quant_mat);
1237 g_print("iq_matrix_buffer.load_non_intra_quant_mat = %d\n",
1238 picture_data->iq_matrix_buffer.load_non_intra_quant_mat);
1239
1240 g_print("------- iq_matrix_buffer.intra_quant_mat ----------\n");
1241 for (jdx = 0; jdx < 64; jdx++) {
1242
1243 g_print("%02x ",
1244 picture_data->iq_matrix_buffer.intra_quant_mat[jdx]);
1245
1246 if ((jdx + 1) % 8 == 0) {
1247 g_print("\n");
1248 }
1249 }
1250
1251 g_print("----- iq_matrix_buffer.non_intra_quant_mat --------\n");
1252 for (jdx = 0; jdx < 64; jdx++) {
1253
1254 g_print("%02x ",
1255 picture_data->iq_matrix_buffer.non_intra_quant_mat[jdx]);
1256
1257 if ((jdx + 1) % 8 == 0) {
1258 g_print("\n");
1259 }
1260 }
1261
1262 g_print("-------- slice buffer begin ------------\n");
1263
1264 for (jdx = 0; jdx < 64; jdx++) {
1265 g_print("%02x ", *(slice_data->buffer_addr
1266 + slice_data->slice_offset + jdx));
1267 if ((jdx + 1) % 8 == 0) {
1268 g_print("\n");
1269 }
1270 }
1271 g_print("-------- slice buffer begin ------------\n");
1272
1273 g_print("\n\n============== dump_end ==========================\n\n");
1274
1275 }
1276 }
1277
1278