• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2012 Intel Corporation. All Rights Reserved.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the
6  * "Software"), to deal in the Software without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sub license, and/or sell copies of the Software, and to
9  * permit persons to whom the Software is furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the
13  * next paragraph) shall be included in all copies or substantial portions
14  * of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17  * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19  * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20  * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22  * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23  */
24 /*
25  * Simple AVC encoder based on libVA.
26  *
27  * Usage:
28  * ./avcenc <width> <height> <input file> <output file> [qp]
29  */
30 
31 #include <stdbool.h>
32 #include <stdio.h>
33 #include <string.h>
34 #include <stdlib.h>
35 #include <getopt.h>
36 #include <unistd.h>
37 
38 #include <sys/time.h>
39 #include <sys/types.h>
40 #include <sys/stat.h>
41 #include <fcntl.h>
42 #include <assert.h>
43 #include <time.h>
44 
45 #include <pthread.h>
46 
47 #include <va/va.h>
48 #include <va/va_enc_h264.h>
49 #include "va_display.h"
50 
51 #define NAL_REF_IDC_NONE        0
52 #define NAL_REF_IDC_LOW         1
53 #define NAL_REF_IDC_MEDIUM      2
54 #define NAL_REF_IDC_HIGH        3
55 
56 #define NAL_NON_IDR             1
57 #define NAL_IDR                 5
58 #define NAL_SPS                 7
59 #define NAL_PPS                 8
60 #define NAL_SEI                 6
61 #define NAL_DELIMITER           9
62 
63 
64 #define SLICE_TYPE_P            0
65 #define SLICE_TYPE_B            1
66 #define SLICE_TYPE_I            2
67 
68 #define FRAME_IDR 7
69 
70 #define ENTROPY_MODE_CAVLC      0
71 #define ENTROPY_MODE_CABAC      1
72 
73 #define PROFILE_IDC_BASELINE    66
74 #define PROFILE_IDC_MAIN        77
75 #define PROFILE_IDC_HIGH        100
76 
77 #define CHECK_VASTATUS(va_status,func)                                  \
78     if (va_status != VA_STATUS_SUCCESS) {                               \
79         fprintf(stderr,"%s:%s (%d) failed,exit\n", __func__, func, __LINE__); \
80         exit(1);                                                        \
81     }
82 
83 #define CHECK_CONDITION(cond)                                                \
84     if(!(cond))                                                              \
85     {                                                                        \
86         fprintf(stderr, "Unexpected condition: %s:%d\n", __func__, __LINE__); \
87         exit(1);                                                             \
88     }
89 
90 static VADisplay va_dpy;
91 
92 static int picture_width, picture_width_in_mbs;
93 static int picture_height, picture_height_in_mbs;
94 static int frame_size;
95 static unsigned char *newImageBuffer = 0;
96 
97 static int qp_value = 26;
98 
99 static int intra_period = 30;
100 static int frame_bit_rate = -1;
101 static int frame_rate = 30;
102 static int ip_period = 1;
103 static int roi_test_enable = 0;
104 static int aud_nal_enable = 1;
105 
106 static VAEntrypoint select_entrypoint = VAEntrypointEncSlice;
107 
108 #define MAX_SLICES      32
109 
110 
111 static  unsigned int MaxFrameNum = (1 << 12);
112 static  unsigned int Log2MaxFrameNum = 12;
113 static  unsigned int Log2MaxPicOrderCntLsb = 8;
114 
115 static const struct option longopts[] = {
116     {"qp", required_argument, 0, 1},
117     {"fb", required_argument, 0, 2},
118     {"mode", required_argument, 0, 3},
119     {"low-power", no_argument, 0, 4},
120     {"roi-test", no_argument, 0, 5},
121     {"frames", required_argument, 0, 6},
122     { NULL, 0, NULL, 0}
123 };
124 
125 static int
126 build_packed_pic_buffer(unsigned char **header_buffer);
127 
128 static int
129 build_packed_seq_buffer(unsigned char **header_buffer);
130 
131 static int
132 build_nal_delimiter(unsigned char **header_buffer);
133 
134 static int
135 build_packed_sei_pic_timing(unsigned int cpb_removal_length,
136                             unsigned int dpb_output_length,
137                             unsigned char **sei_buffer);
138 
139 static int
140 build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,
141                                    unsigned int cpb_removal_length,
142                                    unsigned int dpb_output_length,
143                                    unsigned char **sei_buffer);
144 
145 struct upload_thread_param {
146     FILE *yuv_fp;
147     VASurfaceID surface_id;
148 };
149 
150 static void
151 upload_yuv_to_surface(FILE *yuv_fp, VASurfaceID surface_id);
152 
153 static struct {
154     VAProfile profile;
155     int constraint_set_flag;
156     VAEncSequenceParameterBufferH264 seq_param;
157     VAEncPictureParameterBufferH264 pic_param;
158     VAEncSliceParameterBufferH264 slice_param[MAX_SLICES];
159     VAContextID context_id;
160     VAConfigID config_id;
161     VABufferID seq_param_buf_id;                /* Sequence level parameter */
162     VABufferID pic_param_buf_id;                /* Picture level parameter */
163     VABufferID slice_param_buf_id[MAX_SLICES];  /* Slice level parameter, multil slices */
164     VABufferID codedbuf_buf_id;                 /* Output buffer, compressed data */
165     VABufferID packed_seq_header_param_buf_id;
166     VABufferID packed_seq_buf_id;
167     VABufferID packed_pic_header_param_buf_id;
168     VABufferID packed_pic_buf_id;
169     VABufferID packed_sei_header_param_buf_id;   /* the SEI buffer */
170     VABufferID packed_sei_buf_id;
171     VABufferID misc_parameter_hrd_buf_id;
172     VABufferID misc_parameter_roi_buf_id;
173     VABufferID packed_aud_header_param_buf_id;
174     VABufferID packed_aud_buf_id;
175 
176     int num_slices;
177     int codedbuf_i_size;
178     int codedbuf_pb_size;
179     int current_input_surface;
180     int rate_control_method;
181     struct upload_thread_param upload_thread_param;
182     pthread_t upload_thread_id;
183     int upload_thread_value;
184     int i_initial_cpb_removal_delay;
185     int i_initial_cpb_removal_delay_offset;
186     int i_initial_cpb_removal_delay_length;
187     int i_cpb_removal_delay;
188     int i_cpb_removal_delay_length;
189     int i_dpb_output_delay_length;
190     int time_offset_length;
191 
192     unsigned long long idr_frame_num;
193     unsigned long long prev_idr_cpb_removal;
194     unsigned long long current_idr_cpb_removal;
195     unsigned long long current_cpb_removal;
196     /* This is relative to the current_cpb_removal */
197     unsigned int current_dpb_removal_delta;
198 } avcenc_context;
199 
200 static  VAPictureH264 ReferenceFrames[16], RefPicList0[32], RefPicList1[32];
201 
create_encode_pipe()202 static void create_encode_pipe()
203 {
204     VAEntrypoint entrypoints[5];
205     int num_entrypoints, slice_entrypoint;
206     VAConfigAttrib attrib[3];
207     int major_ver, minor_ver;
208     VAStatus va_status;
209 
210     va_dpy = va_open_display();
211     va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
212     CHECK_VASTATUS(va_status, "vaInitialize");
213 
214     vaQueryConfigEntrypoints(va_dpy, avcenc_context.profile, entrypoints,
215                              &num_entrypoints);
216 
217     for (slice_entrypoint = 0; slice_entrypoint < num_entrypoints; slice_entrypoint++) {
218         if (entrypoints[slice_entrypoint] == select_entrypoint)
219             break;
220     }
221 
222     if (slice_entrypoint == num_entrypoints) {
223         /* not find Slice entry point */
224         assert(0);
225     }
226 
227     /* find out the format for the render target, and rate control mode */
228     attrib[0].type = VAConfigAttribRTFormat;
229     attrib[1].type = VAConfigAttribRateControl;
230 
231     /* This is to query whether the ROI is supported */
232     attrib[2].type = VAConfigAttribEncROI;
233     vaGetConfigAttributes(va_dpy, avcenc_context.profile, select_entrypoint,
234                           &attrib[0], 3);
235 
236     if ((attrib[0].value & VA_RT_FORMAT_YUV420) == 0) {
237         /* not find desired YUV420 RT format */
238         assert(0);
239     }
240 
241     if ((attrib[1].value & avcenc_context.rate_control_method) == 0) {
242         /* Can't find matched RC mode */
243         printf("Can't find the desired RC mode, exit\n");
244         assert(0);
245     }
246 
247     if (roi_test_enable) {
248         if (attrib[2].value != VA_ATTRIB_NOT_SUPPORTED) {
249             VAConfigAttribValEncROI *roi_config = (VAConfigAttribValEncROI *) & (attrib[2].value);
250             if (roi_config->bits.num_roi_regions == 0 ||
251                 roi_config->bits.roi_rc_qp_delta_support == 0) {
252                 roi_test_enable = 0;
253                 printf("WARNING: do not support ROI or ROI delta QP ! \n");
254             }
255         } else {
256             roi_test_enable = 0;
257             printf("WARNING: do not support VAConfigAttribValEncROI! \n");
258         }
259     }
260 
261     attrib[0].value = VA_RT_FORMAT_YUV420; /* set to desired RT format */
262     attrib[1].value = avcenc_context.rate_control_method; /* set to desired RC mode */
263 
264     if (roi_test_enable) {
265         va_status = vaCreateConfig(va_dpy, avcenc_context.profile, select_entrypoint,
266                                    &attrib[0], 3, &avcenc_context.config_id);
267     } else {
268         va_status = vaCreateConfig(va_dpy, avcenc_context.profile, select_entrypoint,
269                                    &attrib[0], 2, &avcenc_context.config_id);
270     }
271     CHECK_VASTATUS(va_status, "vaCreateConfig");
272 
273     /* Create a context for this decode pipe */
274     va_status = vaCreateContext(va_dpy, avcenc_context.config_id,
275                                 picture_width, picture_height,
276                                 VA_PROGRESSIVE,
277                                 0, 0,
278                                 &avcenc_context.context_id);
279     CHECK_VASTATUS(va_status, "vaCreateContext");
280 }
281 
destory_encode_pipe()282 static void destory_encode_pipe()
283 {
284     vaDestroyContext(va_dpy, avcenc_context.context_id);
285     vaDestroyConfig(va_dpy, avcenc_context.config_id);
286     vaTerminate(va_dpy);
287     va_close_display(va_dpy);
288 }
289 
290 /***************************************************
291  *
292  *  The encode pipe resource define
293  *
294  ***************************************************/
295 #define SID_INPUT_PICTURE_0                     0
296 #define SID_INPUT_PICTURE_1                     1
297 #define SID_REFERENCE_PICTURE_L0                2
298 #define SID_REFERENCE_PICTURE_L1                3
299 #define SID_RECON_PICTURE                       4
300 #define SID_NUMBER                              SID_RECON_PICTURE + 1
301 
302 #define SURFACE_NUM 16 /* 16 surfaces for reference */
303 
304 static  VASurfaceID surface_ids[SID_NUMBER];
305 static  VASurfaceID ref_surface[SURFACE_NUM];
306 static  int use_slot[SURFACE_NUM];
307 
308 static  unsigned long long current_frame_display = 0;
309 static  unsigned long long current_IDR_display = 0;
310 
311 static  VAPictureH264 CurrentCurrPic;
312 
313 #define current_slot (current_frame_display % SURFACE_NUM)
314 
315 static int frame_number;
316 static unsigned long long enc_frame_number;
317 static int current_frame_type;
318 static int current_frame_num;
319 static unsigned int current_poc;
320 
321 static  unsigned int num_ref_frames = 2;
322 static  unsigned int numShortTerm = 0;
323 /***************************************************/
324 
get_free_slot()325 static int get_free_slot()
326 {
327     int i, index = -1;
328 
329     for (i = 0; i < SURFACE_NUM; i++) {
330         if (use_slot[i] == 0) {
331             index = i;
332             break;
333         }
334     }
335     if (index < 0) {
336         printf("WARNING: No free slot to store the reconstructed frame \n");
337         index = SURFACE_NUM - 1;
338     }
339     return index;
340 }
341 
342 static void *
upload_thread_function(void * data)343 upload_thread_function(void *data)
344 {
345     struct upload_thread_param *param = data;
346 
347     upload_yuv_to_surface(param->yuv_fp, param->surface_id);
348 
349     return NULL;
350 }
351 
alloc_encode_resource(FILE * yuv_fp)352 static void alloc_encode_resource(FILE *yuv_fp)
353 {
354     VAStatus va_status;
355 
356     // Create surface
357     va_status = vaCreateSurfaces(
358                     va_dpy,
359                     VA_RT_FORMAT_YUV420, picture_width, picture_height,
360                     surface_ids, SID_NUMBER,
361                     NULL, 0
362                 );
363 
364     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
365 
366     // Create surface
367     va_status = vaCreateSurfaces(
368                     va_dpy,
369                     VA_RT_FORMAT_YUV420, picture_width, picture_height,
370                     ref_surface, SURFACE_NUM,
371                     NULL, 0
372                 );
373 
374     CHECK_VASTATUS(va_status, "vaCreateSurfaces");
375 
376 
377     newImageBuffer = (unsigned char *)malloc(frame_size);
378 
379     /* firstly upload YUV data to SID_INPUT_PICTURE_1 */
380     avcenc_context.upload_thread_param.yuv_fp = yuv_fp;
381     avcenc_context.upload_thread_param.surface_id = surface_ids[SID_INPUT_PICTURE_1];
382 
383     avcenc_context.upload_thread_value = pthread_create(&avcenc_context.upload_thread_id,
384                                          NULL,
385                                          upload_thread_function,
386                                          (void*)&avcenc_context.upload_thread_param);
387 }
388 
release_encode_resource()389 static void release_encode_resource()
390 {
391     pthread_join(avcenc_context.upload_thread_id, NULL);
392     free(newImageBuffer);
393 
394     // Release all the surfaces resource
395     vaDestroySurfaces(va_dpy, surface_ids, SID_NUMBER);
396     // Release all the reference surfaces
397     vaDestroySurfaces(va_dpy, ref_surface, SURFACE_NUM);
398 }
399 
avcenc_update_sei_param(int is_idr)400 static void avcenc_update_sei_param(int is_idr)
401 {
402     VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
403     unsigned int length_in_bits;
404     unsigned char *packed_sei_buffer = NULL;
405     VAStatus va_status;
406 
407     if (is_idr)
408         length_in_bits = build_packed_idr_sei_buffer_timing(
409                              avcenc_context.i_initial_cpb_removal_delay_length,
410                              avcenc_context.i_cpb_removal_delay_length,
411                              avcenc_context.i_dpb_output_delay_length,
412                              &packed_sei_buffer);
413     else
414         length_in_bits = build_packed_sei_pic_timing(
415                              avcenc_context.i_cpb_removal_delay_length,
416                              avcenc_context.i_dpb_output_delay_length,
417                              &packed_sei_buffer);
418 
419     packed_header_param_buffer.type = VAEncPackedHeaderRawData;
420     packed_header_param_buffer.bit_length = length_in_bits;
421     packed_header_param_buffer.has_emulation_bytes = 0;
422 
423     va_status = vaCreateBuffer(va_dpy,
424                                avcenc_context.context_id,
425                                VAEncPackedHeaderParameterBufferType,
426                                sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
427                                &avcenc_context.packed_sei_header_param_buf_id);
428     CHECK_VASTATUS(va_status, "vaCreateBuffer");
429 
430     va_status = vaCreateBuffer(va_dpy,
431                                avcenc_context.context_id,
432                                VAEncPackedHeaderDataBufferType,
433                                (length_in_bits + 7) / 8, 1, packed_sei_buffer,
434                                &avcenc_context.packed_sei_buf_id);
435     CHECK_VASTATUS(va_status, "vaCreateBuffer");
436     free(packed_sei_buffer);
437     return;
438 }
439 
440 #define partition(ref, field, key, ascending)   \
441     while (i <= j) {                            \
442         if (ascending) {                        \
443             while (ref[i].field < key)          \
444                 i++;                            \
445             while (ref[j].field > key)          \
446                 j--;                            \
447         } else {                                \
448             while (ref[i].field > key)          \
449                 i++;                            \
450             while (ref[j].field < key)          \
451                 j--;                            \
452         }                                       \
453         if (i <= j) {                           \
454             tmp = ref[i];                       \
455             ref[i] = ref[j];                    \
456             ref[j] = tmp;                       \
457             i++;                                \
458             j--;                                \
459         }                                       \
460     }                                           \
461 
sort_one(VAPictureH264 ref[],int left,int right,int ascending,int frame_idx)462 static void sort_one(VAPictureH264 ref[], int left, int right,
463                      int ascending, int frame_idx)
464 {
465     int i = left, j = right;
466     unsigned int key;
467     VAPictureH264 tmp;
468 
469     if (frame_idx) {
470         key = ref[(left + right) / 2].frame_idx;
471         partition(ref, frame_idx, key, ascending);
472     } else {
473         key = ref[(left + right) / 2].TopFieldOrderCnt;
474         partition(ref, TopFieldOrderCnt, (signed int)key, ascending);
475     }
476 
477     /* recursion */
478     if (left < j)
479         sort_one(ref, left, j, ascending, frame_idx);
480 
481     if (i < right)
482         sort_one(ref, i, right, ascending, frame_idx);
483 }
484 
sort_two(VAPictureH264 ref[],int left,int right,unsigned int key,unsigned int frame_idx,int partition_ascending,int list0_ascending,int list1_ascending)485 static void sort_two(VAPictureH264 ref[], int left, int right, unsigned int key, unsigned int frame_idx,
486                      int partition_ascending, int list0_ascending, int list1_ascending)
487 {
488     int i = left, j = right;
489     VAPictureH264 tmp;
490 
491     if (frame_idx) {
492         partition(ref, frame_idx, key, partition_ascending);
493     } else {
494         partition(ref, TopFieldOrderCnt, (signed int)key, partition_ascending);
495     }
496 
497     sort_one(ref, left, i - 1, list0_ascending, frame_idx);
498     sort_one(ref, j + 1, right, list1_ascending, frame_idx);
499 }
500 
update_RefPicList()501 static int update_RefPicList()
502 {
503 
504     if (current_frame_type == SLICE_TYPE_P) {
505         memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
506         sort_one(RefPicList0, 0, numShortTerm - 1, 0, 1);
507     }
508 
509     if (current_frame_type == SLICE_TYPE_B) {
510         memcpy(RefPicList0, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
511         sort_two(RefPicList0, 0, numShortTerm - 1, current_poc, 0,
512                  1, 0, 1);
513 
514         memcpy(RefPicList1, ReferenceFrames, numShortTerm * sizeof(VAPictureH264));
515         sort_two(RefPicList1, 0, numShortTerm - 1, current_poc, 0,
516                  0, 1, 0);
517     }
518 
519     return 0;
520 }
521 
avcenc_update_picture_parameter(int slice_type,int is_idr)522 static void avcenc_update_picture_parameter(int slice_type, int is_idr)
523 {
524     VAEncPictureParameterBufferH264 *pic_param;
525     VAStatus va_status;
526     int recon_index;
527 
528     recon_index = get_free_slot();
529     // Picture level
530     pic_param = &avcenc_context.pic_param;
531 
532     pic_param->CurrPic.picture_id = ref_surface[recon_index];
533     pic_param->CurrPic.frame_idx = current_frame_num;
534     pic_param->CurrPic.flags = 0;
535 
536     pic_param->CurrPic.TopFieldOrderCnt = current_poc;
537     pic_param->CurrPic.BottomFieldOrderCnt = pic_param->CurrPic.TopFieldOrderCnt;
538 
539     assert(avcenc_context.codedbuf_buf_id != VA_INVALID_ID);
540     pic_param->coded_buf = avcenc_context.codedbuf_buf_id;
541     pic_param->frame_num = current_frame_num;
542     pic_param->pic_fields.bits.idr_pic_flag = !!is_idr;
543     pic_param->pic_fields.bits.reference_pic_flag = (slice_type != SLICE_TYPE_B);
544     CurrentCurrPic = pic_param->CurrPic;
545 
546     if (slice_type == SLICE_TYPE_P || slice_type == SLICE_TYPE_B)
547         memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
548 
549     if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
550         pic_param->ReferenceFrames[0] = RefPicList0[0];
551     }
552     if (slice_type == SLICE_TYPE_B) {
553         pic_param->ReferenceFrames[1] = RefPicList1[0];
554     }
555 
556     va_status = vaCreateBuffer(va_dpy,
557                                avcenc_context.context_id,
558                                VAEncPictureParameterBufferType,
559                                sizeof(*pic_param), 1, pic_param,
560                                &avcenc_context.pic_param_buf_id);
561     CHECK_VASTATUS(va_status, "vaCreateBuffer");
562 
563 }
564 
565 #ifndef VA_FOURCC_I420
566 #define VA_FOURCC_I420          0x30323449
567 #endif
568 
upload_yuv_to_surface(FILE * yuv_fp,VASurfaceID surface_id)569 static void upload_yuv_to_surface(FILE *yuv_fp, VASurfaceID surface_id)
570 {
571     VAImage surface_image;
572     VAStatus va_status;
573     void *surface_p = NULL;
574     unsigned char *y_src, *u_src, *v_src;
575     unsigned char *y_dst, *u_dst, *v_dst;
576     int y_size = picture_width * picture_height;
577     int u_size = (picture_width >> 1) * (picture_height >> 1);
578     int row, col;
579     size_t n_items;
580 
581     do {
582         n_items = fread(newImageBuffer, frame_size, 1, yuv_fp);
583     } while (n_items != 1);
584 
585     va_status = vaDeriveImage(va_dpy, surface_id, &surface_image);
586     CHECK_VASTATUS(va_status, "vaDeriveImage");
587 
588     vaMapBuffer(va_dpy, surface_image.buf, &surface_p);
589     assert(VA_STATUS_SUCCESS == va_status);
590 
591     y_src = newImageBuffer;
592     u_src = newImageBuffer + y_size; /* UV offset for NV12 */
593     v_src = newImageBuffer + y_size + u_size;
594 
595     y_dst = (unsigned char *)surface_p + surface_image.offsets[0];
596     u_dst = (unsigned char *)surface_p +
597             surface_image.offsets[1]; /* UV offset for NV12 */
598     v_dst = (unsigned char *)surface_p + surface_image.offsets[2];
599 
600     /* Y plane */
601     for (row = 0; row < surface_image.height; row++) {
602         memcpy(y_dst, y_src, surface_image.width);
603         y_dst += surface_image.pitches[0];
604         y_src += picture_width;
605     }
606 
607     if (surface_image.format.fourcc == VA_FOURCC_NV12) { /* UV plane */
608         for (row = 0; row < surface_image.height / 2; row++) {
609             for (col = 0; col < surface_image.width / 2; col++) {
610                 u_dst[col * 2] = u_src[col];
611                 u_dst[col * 2 + 1] = v_src[col];
612             }
613 
614             u_dst += surface_image.pitches[1];
615             u_src += (picture_width / 2);
616             v_src += (picture_width / 2);
617         }
618     } else if (surface_image.format.fourcc == VA_FOURCC_YV12 ||
619                surface_image.format.fourcc == VA_FOURCC_I420) {
620         const int U = surface_image.format.fourcc == VA_FOURCC_I420 ? 1 : 2;
621         const int V = surface_image.format.fourcc == VA_FOURCC_I420 ? 2 : 1;
622 
623         u_dst = (unsigned char *)surface_p + surface_image.offsets[U];
624         v_dst = (unsigned char *)surface_p + surface_image.offsets[V];
625 
626         for (row = 0; row < surface_image.height / 2; row++) {
627             memcpy(u_dst, u_src, surface_image.width / 2);
628             memcpy(v_dst, v_src, surface_image.width / 2);
629             u_dst += surface_image.pitches[U];
630             v_dst += surface_image.pitches[V];
631             u_src += (picture_width / 2);
632             v_src += (picture_width / 2);
633         }
634     }
635 
636     vaUnmapBuffer(va_dpy, surface_image.buf);
637     vaDestroyImage(va_dpy, surface_image.image_id);
638 }
639 
avcenc_update_slice_parameter(int slice_type)640 static void avcenc_update_slice_parameter(int slice_type)
641 {
642     VAEncSliceParameterBufferH264 *slice_param;
643     VAStatus va_status;
644     int i;
645 
646     // Slice level
647     i = 0;
648     slice_param = &avcenc_context.slice_param[i];
649     slice_param->macroblock_address = 0;
650     slice_param->num_macroblocks = picture_height_in_mbs * picture_width_in_mbs;
651     slice_param->pic_parameter_set_id = 0;
652     slice_param->slice_type = slice_type;
653     slice_param->direct_spatial_mv_pred_flag = 1;
654     slice_param->num_ref_idx_l0_active_minus1 = 0;      /* FIXME: ??? */
655     slice_param->num_ref_idx_l1_active_minus1 = 0;
656     slice_param->cabac_init_idc = 0;
657     slice_param->slice_qp_delta = 0;
658     slice_param->disable_deblocking_filter_idc = 0;
659     slice_param->slice_alpha_c0_offset_div2 = 2;
660     slice_param->slice_beta_offset_div2 = 2;
661     slice_param->idr_pic_id = 0;
662 
663     /* FIXME: fill other fields */
664     if ((slice_type == SLICE_TYPE_P) || (slice_type == SLICE_TYPE_B)) {
665         memset(slice_param->RefPicList0, 0xFF, 32 * sizeof(VAPictureH264));
666         slice_param->RefPicList0[0] = RefPicList0[0];
667     }
668 
669     if (slice_type == SLICE_TYPE_B) {
670         memset(slice_param->RefPicList1, 0xFF, 32 * sizeof(VAPictureH264));
671         slice_param->RefPicList1[0] = RefPicList1[0];
672     }
673 
674     va_status = vaCreateBuffer(va_dpy,
675                                avcenc_context.context_id,
676                                VAEncSliceParameterBufferType,
677                                sizeof(*slice_param), 1, slice_param,
678                                &avcenc_context.slice_param_buf_id[i]);
679     CHECK_VASTATUS(va_status, "vaCreateBuffer");;
680     i++;
681 
682 #if 0
683     slice_param = &avcenc_context.slice_param[i];
684     slice_param->macroblock_address = picture_height_in_mbs * picture_width_in_mbs / 2;
685     slice_param->num_macroblocks = picture_height_in_mbs * picture_width_in_mbs / 2;
686     slice_param->pic_parameter_set_id = 0;
687     slice_param->slice_type = slice_type;
688     slice_param->direct_spatial_mv_pred_flag = 0;
689     slice_param->num_ref_idx_l0_active_minus1 = 0;      /* FIXME: ??? */
690     slice_param->num_ref_idx_l1_active_minus1 = 0;
691     slice_param->cabac_init_idc = 0;
692     slice_param->slice_qp_delta = 0;
693     slice_param->disable_deblocking_filter_idc = 0;
694     slice_param->slice_alpha_c0_offset_div2 = 2;
695     slice_param->slice_beta_offset_div2 = 2;
696     slice_param->idr_pic_id = 0;
697 
698     /* FIXME: fill other fields */
699 
700     va_status = vaCreateBuffer(va_dpy,
701                                avcenc_context.context_id,
702                                VAEncSliceParameterBufferType,
703                                sizeof(*slice_param), 1, slice_param,
704                                &avcenc_context.slice_param_buf_id[i]);
705     CHECK_VASTATUS(va_status, "vaCreateBuffer");;
706     i++;
707 #endif
708 
709     avcenc_context.num_slices = i;
710 }
711 
update_ReferenceFrames(void)712 static int update_ReferenceFrames(void)
713 {
714     int i;
715     /* B-frame is not used for reference */
716     if (current_frame_type == SLICE_TYPE_B)
717         return 0;
718 
719     CurrentCurrPic.flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
720     numShortTerm++;
721     if (numShortTerm > num_ref_frames)
722         numShortTerm = num_ref_frames;
723     for (i = numShortTerm - 1; i > 0; i--)
724         ReferenceFrames[i] = ReferenceFrames[i - 1];
725     ReferenceFrames[0] = CurrentCurrPic;
726 
727     if (current_frame_type != SLICE_TYPE_B)
728         current_frame_num++;
729     if (current_frame_num > MaxFrameNum)
730         current_frame_num = 0;
731 
732     /* Update the use_slot. Only when the surface is used in reference
733      * frame list, the use_slot[index] is set
734      */
735     for (i = 0; i < SURFACE_NUM; i++) {
736         int j;
737         bool found;
738 
739         found = false;
740         for (j = 0; j < numShortTerm; j++) {
741             if (ref_surface[i] == ReferenceFrames[j].picture_id) {
742                 found = true;
743                 break;
744             }
745         }
746         if (found)
747             use_slot[i] = 1;
748         else
749             use_slot[i] = 0;
750     }
751 
752     return 0;
753 }
754 
begin_picture(FILE * yuv_fp,int frame_num,int display_num,int slice_type,int is_idr)755 static int begin_picture(FILE *yuv_fp, int frame_num, int display_num, int slice_type, int is_idr)
756 {
757     VAStatus va_status;
758 
759     if (avcenc_context.upload_thread_value != 0) {
760         fprintf(stderr, "FATAL error!!!\n");
761         exit(1);
762     }
763 
764     pthread_join(avcenc_context.upload_thread_id, NULL);
765 
766     avcenc_context.upload_thread_value = -1;
767 
768     if (avcenc_context.current_input_surface == SID_INPUT_PICTURE_0)
769         avcenc_context.current_input_surface = SID_INPUT_PICTURE_1;
770     else
771         avcenc_context.current_input_surface = SID_INPUT_PICTURE_0;
772 
773     if (aud_nal_enable) {
774         VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
775         unsigned int length_in_bits;
776         unsigned char *packed_buffer = NULL;
777 
778         length_in_bits = build_nal_delimiter(&packed_buffer);
779         packed_header_param_buffer.type = VAEncPackedHeaderRawData;
780         packed_header_param_buffer.bit_length = length_in_bits;
781         packed_header_param_buffer.has_emulation_bytes = 1;
782         va_status = vaCreateBuffer(va_dpy,
783                                    avcenc_context.context_id,
784                                    VAEncPackedHeaderParameterBufferType,
785                                    sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
786                                    &avcenc_context.packed_aud_header_param_buf_id);
787         CHECK_VASTATUS(va_status, "vaCreateBuffer");
788 
789         va_status = vaCreateBuffer(va_dpy,
790                                    avcenc_context.context_id,
791                                    VAEncPackedHeaderDataBufferType,
792                                    (length_in_bits + 7) / 8, 1, packed_buffer,
793                                    &avcenc_context.packed_aud_buf_id);
794         CHECK_VASTATUS(va_status, "vaCreateBuffer");
795 
796         free(packed_buffer);
797     }
798 
799     if (is_idr) {
800         VAEncPackedHeaderParameterBuffer packed_header_param_buffer;
801         unsigned int length_in_bits;
802         unsigned char *packed_seq_buffer = NULL, *packed_pic_buffer = NULL;
803 
804         assert(slice_type == SLICE_TYPE_I);
805         length_in_bits = build_packed_seq_buffer(&packed_seq_buffer);
806         packed_header_param_buffer.type = VAEncPackedHeaderSequence;
807         packed_header_param_buffer.bit_length = length_in_bits;
808         packed_header_param_buffer.has_emulation_bytes = 0;
809         va_status = vaCreateBuffer(va_dpy,
810                                    avcenc_context.context_id,
811                                    VAEncPackedHeaderParameterBufferType,
812                                    sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
813                                    &avcenc_context.packed_seq_header_param_buf_id);
814         CHECK_VASTATUS(va_status, "vaCreateBuffer");
815 
816         va_status = vaCreateBuffer(va_dpy,
817                                    avcenc_context.context_id,
818                                    VAEncPackedHeaderDataBufferType,
819                                    (length_in_bits + 7) / 8, 1, packed_seq_buffer,
820                                    &avcenc_context.packed_seq_buf_id);
821         CHECK_VASTATUS(va_status, "vaCreateBuffer");
822 
823         length_in_bits = build_packed_pic_buffer(&packed_pic_buffer);
824         packed_header_param_buffer.type = VAEncPackedHeaderPicture;
825         packed_header_param_buffer.bit_length = length_in_bits;
826         packed_header_param_buffer.has_emulation_bytes = 0;
827 
828         va_status = vaCreateBuffer(va_dpy,
829                                    avcenc_context.context_id,
830                                    VAEncPackedHeaderParameterBufferType,
831                                    sizeof(packed_header_param_buffer), 1, &packed_header_param_buffer,
832                                    &avcenc_context.packed_pic_header_param_buf_id);
833         CHECK_VASTATUS(va_status, "vaCreateBuffer");
834 
835         va_status = vaCreateBuffer(va_dpy,
836                                    avcenc_context.context_id,
837                                    VAEncPackedHeaderDataBufferType,
838                                    (length_in_bits + 7) / 8, 1, packed_pic_buffer,
839                                    &avcenc_context.packed_pic_buf_id);
840         CHECK_VASTATUS(va_status, "vaCreateBuffer");
841 
842         free(packed_seq_buffer);
843         free(packed_pic_buffer);
844     }
845 
846     /* sequence parameter set */
847     VAEncSequenceParameterBufferH264 *seq_param = &avcenc_context.seq_param;
848     va_status = vaCreateBuffer(va_dpy,
849                                avcenc_context.context_id,
850                                VAEncSequenceParameterBufferType,
851                                sizeof(*seq_param), 1, seq_param,
852                                &avcenc_context.seq_param_buf_id);
853     CHECK_VASTATUS(va_status, "vaCreateBuffer");
854 
855 
856     /* hrd parameter */
857     VAEncMiscParameterBuffer *misc_param;
858     VAEncMiscParameterHRD *misc_hrd_param;
859     va_status = vaCreateBuffer(va_dpy,
860                    avcenc_context.context_id,
861                    VAEncMiscParameterBufferType,
862                    sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterRateControl),
863                    1,
864                    NULL,
865                    &avcenc_context.misc_parameter_hrd_buf_id);
866     CHECK_VASTATUS(va_status, "vaCreateBuffer");
867 
868     vaMapBuffer(va_dpy,
869                 avcenc_context.misc_parameter_hrd_buf_id,
870                 (void **)&misc_param);
871     misc_param->type = VAEncMiscParameterTypeHRD;
872     misc_hrd_param = (VAEncMiscParameterHRD *)misc_param->data;
873 
874     if (frame_bit_rate > 0) {
875         misc_hrd_param->initial_buffer_fullness = frame_bit_rate * 1000 * 4;
876         misc_hrd_param->buffer_size = frame_bit_rate * 1000 * 8;
877     } else {
878         misc_hrd_param->initial_buffer_fullness = 0;
879         misc_hrd_param->buffer_size = 0;
880     }
881 
882     vaUnmapBuffer(va_dpy, avcenc_context.misc_parameter_hrd_buf_id);
883 
884     /* ROI parameter: hard code for test on only one region (0,0,120,120) with qp_delta=4 */
885     if (roi_test_enable) {
886         VAEncMiscParameterBufferROI *misc_roi_param;
887 
888         int roi_num = 1;
889         va_status = vaCreateBuffer(va_dpy,
890                        avcenc_context.context_id,
891                        VAEncMiscParameterBufferType,
892                        sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterBufferROI) + roi_num * sizeof(VAEncROI),
893                        1,
894                        NULL,
895                        &avcenc_context.misc_parameter_roi_buf_id);
896         CHECK_VASTATUS(va_status, "vaCreateBuffer");
897         vaMapBuffer(va_dpy,
898                     avcenc_context.misc_parameter_roi_buf_id,
899                     (void **)&misc_param);
900         misc_param->type = VAEncMiscParameterTypeROI;
901         misc_roi_param = (VAEncMiscParameterBufferROI *)misc_param->data;
902         {
903             misc_roi_param->roi_flags.bits.roi_value_is_qp_delta = 1;
904             /*
905             * Max/Min delta_qp is only used in CBR mode. It is ingored under CQP mode.
906             * max_delta_qp means the allowed upper bound of qp delta. (qp + X)
907             * min_delta_qp means the allowed lower bound of qp delta. (qp -X)
908             * So it will be better that it is positive. Otherwise the driver will
909             * use the default bound setting.
910             */
911             misc_roi_param->max_delta_qp = 3;
912             misc_roi_param->min_delta_qp = 3;
913             /* one example of ROI region conf.
914             * please change it on the fly.
915             */
916             VAEncROI *region_roi = (VAEncROI *)((char *)misc_param + sizeof(VAEncMiscParameterBuffer) +
917                                                 sizeof(VAEncMiscParameterBufferROI));
918 
919             /*
920             * Under CQP mode roi_value specifies the qp_delta that is added to frame qp
921             * Under CBR mode roi_value specifies the important level (positive means that
922             * it is important. negative means that it is less important).
923             */
924             region_roi->roi_value = 4;
925             region_roi->roi_rectangle.x = 0;
926             region_roi->roi_rectangle.y = 0;
927             region_roi->roi_rectangle.width = (120 < picture_width / 4) ? 120 : picture_width / 4;
928             region_roi->roi_rectangle.height = (120 < picture_height / 4) ? 120 : picture_height / 4;
929 
930             misc_roi_param->roi = region_roi;
931             misc_roi_param->num_roi = 1;
932         }
933 
934         vaUnmapBuffer(va_dpy, avcenc_context.misc_parameter_roi_buf_id);
935     }
936     return 0;
937 }
938 
avcenc_render_picture()939 int avcenc_render_picture()
940 {
941     VAStatus va_status;
942     VABufferID va_buffers[20];
943     unsigned int num_va_buffers = 0;
944     int i;
945 
946     if (avcenc_context.packed_aud_header_param_buf_id != VA_INVALID_ID)
947         va_buffers[num_va_buffers++] =  avcenc_context.packed_aud_header_param_buf_id;
948 
949     if (avcenc_context.packed_aud_buf_id != VA_INVALID_ID)
950         va_buffers[num_va_buffers++] =  avcenc_context.packed_aud_buf_id;
951 
952     va_buffers[num_va_buffers++] = avcenc_context.seq_param_buf_id;
953     va_buffers[num_va_buffers++] = avcenc_context.pic_param_buf_id;
954 
955     if (avcenc_context.packed_seq_header_param_buf_id != VA_INVALID_ID)
956         va_buffers[num_va_buffers++] = avcenc_context.packed_seq_header_param_buf_id;
957 
958     if (avcenc_context.packed_seq_buf_id != VA_INVALID_ID)
959         va_buffers[num_va_buffers++] = avcenc_context.packed_seq_buf_id;
960 
961     if (avcenc_context.packed_pic_header_param_buf_id != VA_INVALID_ID)
962         va_buffers[num_va_buffers++] = avcenc_context.packed_pic_header_param_buf_id;
963 
964     if (avcenc_context.packed_pic_buf_id != VA_INVALID_ID)
965         va_buffers[num_va_buffers++] = avcenc_context.packed_pic_buf_id;
966 
967     if (avcenc_context.packed_sei_header_param_buf_id != VA_INVALID_ID)
968         va_buffers[num_va_buffers++] = avcenc_context.packed_sei_header_param_buf_id;
969 
970     if (avcenc_context.packed_sei_buf_id != VA_INVALID_ID)
971         va_buffers[num_va_buffers++] = avcenc_context.packed_sei_buf_id;
972 
973     if (avcenc_context.misc_parameter_hrd_buf_id != VA_INVALID_ID)
974         va_buffers[num_va_buffers++] =  avcenc_context.misc_parameter_hrd_buf_id;
975 
976     if (avcenc_context.misc_parameter_roi_buf_id != VA_INVALID_ID)
977         va_buffers[num_va_buffers++] =  avcenc_context.misc_parameter_roi_buf_id;
978 
979 
980     va_status = vaBeginPicture(va_dpy,
981                                avcenc_context.context_id,
982                                surface_ids[avcenc_context.current_input_surface]);
983     CHECK_VASTATUS(va_status, "vaBeginPicture");
984 
985     va_status = vaRenderPicture(va_dpy,
986                                 avcenc_context.context_id,
987                                 va_buffers,
988                                 num_va_buffers);
989     CHECK_VASTATUS(va_status, "vaRenderPicture");
990 
991     for (i = 0; i < avcenc_context.num_slices; i++) {
992         va_status = vaRenderPicture(va_dpy,
993                                     avcenc_context.context_id,
994                                     &avcenc_context.slice_param_buf_id[i],
995                                     1);
996         CHECK_VASTATUS(va_status, "vaRenderPicture");
997     }
998 
999     va_status = vaEndPicture(va_dpy, avcenc_context.context_id);
1000     CHECK_VASTATUS(va_status, "vaEndPicture");
1001 
1002     return 0;
1003 }
1004 
avcenc_destroy_buffers(VABufferID * va_buffers,unsigned int num_va_buffers)1005 static int avcenc_destroy_buffers(VABufferID *va_buffers, unsigned int num_va_buffers)
1006 {
1007     VAStatus va_status;
1008     unsigned int i;
1009 
1010     for (i = 0; i < num_va_buffers; i++) {
1011         if (va_buffers[i] != VA_INVALID_ID) {
1012             va_status = vaDestroyBuffer(va_dpy, va_buffers[i]);
1013             CHECK_VASTATUS(va_status, "vaDestroyBuffer");
1014             va_buffers[i] = VA_INVALID_ID;
1015         }
1016     }
1017 
1018     return 0;
1019 }
1020 
end_picture(void)1021 static void end_picture(void)
1022 {
1023 
1024     update_ReferenceFrames();
1025     avcenc_destroy_buffers(&avcenc_context.seq_param_buf_id, 1);
1026     avcenc_destroy_buffers(&avcenc_context.pic_param_buf_id, 1);
1027     avcenc_destroy_buffers(&avcenc_context.packed_seq_header_param_buf_id, 1);
1028     avcenc_destroy_buffers(&avcenc_context.packed_seq_buf_id, 1);
1029     avcenc_destroy_buffers(&avcenc_context.packed_pic_header_param_buf_id, 1);
1030     avcenc_destroy_buffers(&avcenc_context.packed_pic_buf_id, 1);
1031     avcenc_destroy_buffers(&avcenc_context.packed_sei_header_param_buf_id, 1);
1032     avcenc_destroy_buffers(&avcenc_context.packed_sei_buf_id, 1);
1033     avcenc_destroy_buffers(&avcenc_context.slice_param_buf_id[0], avcenc_context.num_slices);
1034     avcenc_destroy_buffers(&avcenc_context.codedbuf_buf_id, 1);
1035     avcenc_destroy_buffers(&avcenc_context.misc_parameter_hrd_buf_id, 1);
1036     avcenc_destroy_buffers(&avcenc_context.misc_parameter_roi_buf_id, 1);
1037     avcenc_destroy_buffers(&avcenc_context.packed_aud_header_param_buf_id, 1);
1038     avcenc_destroy_buffers(&avcenc_context.packed_aud_buf_id, 1);
1039 
1040     memset(avcenc_context.slice_param, 0, sizeof(avcenc_context.slice_param));
1041     avcenc_context.num_slices = 0;
1042 }
1043 
1044 #define BITSTREAM_ALLOCATE_STEPPING     4096
1045 
1046 struct __bitstream {
1047     unsigned int *buffer;
1048     int bit_offset;
1049     int max_size_in_dword;
1050 };
1051 
1052 typedef struct __bitstream bitstream;
1053 
1054 #if 0
1055 static int
1056 get_coded_bitsteam_length(unsigned char *buffer, int buffer_length)
1057 {
1058     int i;
1059 
1060     for (i = 0; i < buffer_length - 3; i++) {
1061         if (!buffer[i] &&
1062             !buffer[i + 1] &&
1063             !buffer[i + 2] &&
1064             !buffer[i + 3])
1065             break;
1066     }
1067 
1068     return i;
1069 }
1070 #endif
1071 
1072 static unsigned int
va_swap32(unsigned int val)1073 va_swap32(unsigned int val)
1074 {
1075     unsigned char *pval = (unsigned char *)&val;
1076 
1077     return ((pval[0] << 24)     |
1078             (pval[1] << 16)     |
1079             (pval[2] << 8)      |
1080             (pval[3] << 0));
1081 }
1082 
1083 static void
bitstream_start(bitstream * bs)1084 bitstream_start(bitstream *bs)
1085 {
1086     bs->max_size_in_dword = BITSTREAM_ALLOCATE_STEPPING;
1087     bs->buffer = calloc(bs->max_size_in_dword * sizeof(int), 1);
1088     assert(bs->buffer);
1089     bs->bit_offset = 0;
1090 }
1091 
1092 static void
bitstream_end(bitstream * bs)1093 bitstream_end(bitstream *bs)
1094 {
1095     int pos = (bs->bit_offset >> 5);
1096     int bit_offset = (bs->bit_offset & 0x1f);
1097     int bit_left = 32 - bit_offset;
1098 
1099     if (bit_offset) {
1100         bs->buffer[pos] = va_swap32((bs->buffer[pos] << bit_left));
1101     }
1102 }
1103 
1104 static void
bitstream_put_ui(bitstream * bs,unsigned int val,int size_in_bits)1105 bitstream_put_ui(bitstream *bs, unsigned int val, int size_in_bits)
1106 {
1107     int pos = (bs->bit_offset >> 5);
1108     int bit_offset = (bs->bit_offset & 0x1f);
1109     int bit_left = 32 - bit_offset;
1110 
1111     if (!size_in_bits)
1112         return;
1113 
1114     bs->bit_offset += size_in_bits;
1115 
1116     if (bit_left > size_in_bits) {
1117         bs->buffer[pos] = (bs->buffer[pos] << size_in_bits | val);
1118     } else {
1119         size_in_bits -= bit_left;
1120         bs->buffer[pos] = (bs->buffer[pos] << bit_left) | (val >> size_in_bits);
1121         bs->buffer[pos] = va_swap32(bs->buffer[pos]);
1122 
1123         if (pos + 1 == bs->max_size_in_dword) {
1124             bs->max_size_in_dword += BITSTREAM_ALLOCATE_STEPPING;
1125             bs->buffer = realloc(bs->buffer, bs->max_size_in_dword * sizeof(unsigned int));
1126             assert(bs->buffer);
1127         }
1128 
1129         bs->buffer[pos + 1] = val;
1130     }
1131 }
1132 
1133 static void
bitstream_put_ue(bitstream * bs,unsigned int val)1134 bitstream_put_ue(bitstream *bs, unsigned int val)
1135 {
1136     int size_in_bits = 0;
1137     int tmp_val = ++val;
1138 
1139     while (tmp_val) {
1140         tmp_val >>= 1;
1141         size_in_bits++;
1142     }
1143 
1144     bitstream_put_ui(bs, 0, size_in_bits - 1); // leading zero
1145     bitstream_put_ui(bs, val, size_in_bits);
1146 }
1147 
1148 static void
bitstream_put_se(bitstream * bs,int val)1149 bitstream_put_se(bitstream *bs, int val)
1150 {
1151     unsigned int new_val;
1152 
1153     if (val <= 0)
1154         new_val = -2 * val;
1155     else
1156         new_val = 2 * val - 1;
1157 
1158     bitstream_put_ue(bs, new_val);
1159 }
1160 
1161 static void
bitstream_byte_aligning(bitstream * bs,int bit)1162 bitstream_byte_aligning(bitstream *bs, int bit)
1163 {
1164     int bit_offset = (bs->bit_offset & 0x7);
1165     int bit_left = 8 - bit_offset;
1166     int new_val;
1167 
1168     if (!bit_offset)
1169         return;
1170 
1171     assert(bit == 0 || bit == 1);
1172 
1173     if (bit)
1174         new_val = (1 << bit_left) - 1;
1175     else
1176         new_val = 0;
1177 
1178     bitstream_put_ui(bs, new_val, bit_left);
1179 }
1180 
1181 static void
rbsp_trailing_bits(bitstream * bs)1182 rbsp_trailing_bits(bitstream *bs)
1183 {
1184     bitstream_put_ui(bs, 1, 1);
1185     bitstream_byte_aligning(bs, 0);
1186 }
1187 
nal_start_code_prefix(bitstream * bs)1188 static void nal_start_code_prefix(bitstream *bs)
1189 {
1190     bitstream_put_ui(bs, 0x00000001, 32);
1191 }
1192 
nal_header(bitstream * bs,int nal_ref_idc,int nal_unit_type)1193 static void nal_header(bitstream *bs, int nal_ref_idc, int nal_unit_type)
1194 {
1195     bitstream_put_ui(bs, 0, 1);                /* forbidden_zero_bit: 0 */
1196     bitstream_put_ui(bs, nal_ref_idc, 2);
1197     bitstream_put_ui(bs, nal_unit_type, 5);
1198 }
1199 
sps_rbsp(bitstream * bs)1200 static void sps_rbsp(bitstream *bs)
1201 {
1202     VAEncSequenceParameterBufferH264 *seq_param = &avcenc_context.seq_param;
1203     int profile_idc = PROFILE_IDC_BASELINE;
1204 
1205     if (avcenc_context.profile == VAProfileH264High)
1206         profile_idc = PROFILE_IDC_HIGH;
1207     else if (avcenc_context.profile == VAProfileH264Main)
1208         profile_idc = PROFILE_IDC_MAIN;
1209 
1210     bitstream_put_ui(bs, profile_idc, 8);               /* profile_idc */
1211     bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 1), 1);                         /* constraint_set0_flag */
1212     bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 2), 1);                         /* constraint_set1_flag */
1213     bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 4), 1);                         /* constraint_set2_flag */
1214     bitstream_put_ui(bs, !!(avcenc_context.constraint_set_flag & 8), 1);                         /* constraint_set3_flag */
1215     bitstream_put_ui(bs, 0, 4);                         /* reserved_zero_4bits */
1216     bitstream_put_ui(bs, seq_param->level_idc, 8);      /* level_idc */
1217     bitstream_put_ue(bs, seq_param->seq_parameter_set_id);      /* seq_parameter_set_id */
1218 
1219     if (profile_idc == PROFILE_IDC_HIGH) {
1220         bitstream_put_ue(bs, 1);        /* chroma_format_idc = 1, 4:2:0 */
1221         bitstream_put_ue(bs, 0);        /* bit_depth_luma_minus8 */
1222         bitstream_put_ue(bs, 0);        /* bit_depth_chroma_minus8 */
1223         bitstream_put_ui(bs, 0, 1);     /* qpprime_y_zero_transform_bypass_flag */
1224         bitstream_put_ui(bs, 0, 1);     /* seq_scaling_matrix_present_flag */
1225     }
1226 
1227     bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
1228     bitstream_put_ue(bs, seq_param->seq_fields.bits.pic_order_cnt_type);        /* pic_order_cnt_type */
1229 
1230     if (seq_param->seq_fields.bits.pic_order_cnt_type == 0)
1231         bitstream_put_ue(bs, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4);     /* log2_max_pic_order_cnt_lsb_minus4 */
1232     else {
1233         assert(0);
1234     }
1235 
1236     bitstream_put_ue(bs, seq_param->max_num_ref_frames);        /* num_ref_frames */
1237     bitstream_put_ui(bs, 0, 1);                                 /* gaps_in_frame_num_value_allowed_flag */
1238 
1239     bitstream_put_ue(bs, seq_param->picture_width_in_mbs - 1);  /* pic_width_in_mbs_minus1 */
1240     bitstream_put_ue(bs, seq_param->picture_height_in_mbs - 1); /* pic_height_in_map_units_minus1 */
1241     bitstream_put_ui(bs, seq_param->seq_fields.bits.frame_mbs_only_flag, 1);    /* frame_mbs_only_flag */
1242 
1243     if (!seq_param->seq_fields.bits.frame_mbs_only_flag) {
1244         assert(0);
1245     }
1246 
1247     bitstream_put_ui(bs, seq_param->seq_fields.bits.direct_8x8_inference_flag, 1);      /* direct_8x8_inference_flag */
1248     bitstream_put_ui(bs, seq_param->frame_cropping_flag, 1);            /* frame_cropping_flag */
1249 
1250     if (seq_param->frame_cropping_flag) {
1251         bitstream_put_ue(bs, seq_param->frame_crop_left_offset);        /* frame_crop_left_offset */
1252         bitstream_put_ue(bs, seq_param->frame_crop_right_offset);       /* frame_crop_right_offset */
1253         bitstream_put_ue(bs, seq_param->frame_crop_top_offset);         /* frame_crop_top_offset */
1254         bitstream_put_ue(bs, seq_param->frame_crop_bottom_offset);      /* frame_crop_bottom_offset */
1255     }
1256 
1257     if (frame_bit_rate < 0) {
1258         bitstream_put_ui(bs, 0, 1); /* vui_parameters_present_flag */
1259     } else {
1260         bitstream_put_ui(bs, 1, 1); /* vui_parameters_present_flag */
1261         bitstream_put_ui(bs, 0, 1); /* aspect_ratio_info_present_flag */
1262         bitstream_put_ui(bs, 0, 1); /* overscan_info_present_flag */
1263         bitstream_put_ui(bs, 0, 1); /* video_signal_type_present_flag */
1264         bitstream_put_ui(bs, 0, 1); /* chroma_loc_info_present_flag */
1265         bitstream_put_ui(bs, 1, 1); /* timing_info_present_flag */
1266         {
1267             bitstream_put_ui(bs, 1, 32);
1268             bitstream_put_ui(bs, frame_rate * 2, 32);
1269             bitstream_put_ui(bs, 1, 1);
1270         }
1271         bitstream_put_ui(bs, 1, 1); /* nal_hrd_parameters_present_flag */
1272         {
1273             // hrd_parameters
1274             bitstream_put_ue(bs, 0);    /* cpb_cnt_minus1 */
1275             bitstream_put_ui(bs, 0, 4); /* bit_rate_scale */
1276             bitstream_put_ui(bs, 2, 4); /* cpb_size_scale */
1277 
1278             /* the frame_bit_rate is in kbps */
1279             bitstream_put_ue(bs, (((frame_bit_rate * 1000) >> 6) - 1)); /* bit_rate_value_minus1[0] */
1280             bitstream_put_ue(bs, ((frame_bit_rate * 8000) >> 6) - 1); /* cpb_size_value_minus1[0] */
1281             bitstream_put_ui(bs, 1, 1);  /* cbr_flag[0] */
1282 
1283             /* initial_cpb_removal_delay_length_minus1 */
1284             bitstream_put_ui(bs,
1285                              (avcenc_context.i_initial_cpb_removal_delay_length - 1), 5);
1286             /* cpb_removal_delay_length_minus1 */
1287             bitstream_put_ui(bs,
1288                              (avcenc_context.i_cpb_removal_delay_length - 1), 5);
1289             /* dpb_output_delay_length_minus1 */
1290             bitstream_put_ui(bs,
1291                              (avcenc_context.i_dpb_output_delay_length - 1), 5);
1292             /* time_offset_length  */
1293             bitstream_put_ui(bs,
1294                              (avcenc_context.time_offset_length - 1), 5);
1295         }
1296         bitstream_put_ui(bs, 0, 1);   /* vcl_hrd_parameters_present_flag */
1297         bitstream_put_ui(bs, 0, 1);   /* low_delay_hrd_flag */
1298 
1299         bitstream_put_ui(bs, 0, 1); /* pic_struct_present_flag */
1300         bitstream_put_ui(bs, 0, 1); /* bitstream_restriction_flag */
1301     }
1302 
1303     rbsp_trailing_bits(bs);     /* rbsp_trailing_bits */
1304 }
1305 
1306 #if 0
1307 static void build_nal_sps(FILE *avc_fp)
1308 {
1309     bitstream bs;
1310 
1311     bitstream_start(&bs);
1312     nal_start_code_prefix(&bs);
1313     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
1314     sps_rbsp(&bs);
1315     bitstream_end(&bs, avc_fp);
1316 }
1317 #endif
1318 
pps_rbsp(bitstream * bs)1319 static void pps_rbsp(bitstream *bs)
1320 {
1321     VAEncPictureParameterBufferH264 *pic_param = &avcenc_context.pic_param;
1322 
1323     bitstream_put_ue(bs, pic_param->pic_parameter_set_id);      /* pic_parameter_set_id */
1324     bitstream_put_ue(bs, pic_param->seq_parameter_set_id);      /* seq_parameter_set_id */
1325 
1326     bitstream_put_ui(bs, pic_param->pic_fields.bits.entropy_coding_mode_flag, 1);  /* entropy_coding_mode_flag */
1327 
1328     bitstream_put_ui(bs, 0, 1);                         /* pic_order_present_flag: 0 */
1329 
1330     bitstream_put_ue(bs, 0);                            /* num_slice_groups_minus1 */
1331 
1332     bitstream_put_ue(bs, pic_param->num_ref_idx_l0_active_minus1);      /* num_ref_idx_l0_active_minus1 */
1333     bitstream_put_ue(bs, pic_param->num_ref_idx_l1_active_minus1);      /* num_ref_idx_l1_active_minus1 1 */
1334 
1335     bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_pred_flag, 1);     /* weighted_pred_flag: 0 */
1336     bitstream_put_ui(bs, pic_param->pic_fields.bits.weighted_bipred_idc, 2);    /* weighted_bipred_idc: 0 */
1337 
1338     bitstream_put_se(bs, pic_param->pic_init_qp - 26);  /* pic_init_qp_minus26 */
1339     bitstream_put_se(bs, 0);                            /* pic_init_qs_minus26 */
1340     bitstream_put_se(bs, 0);                            /* chroma_qp_index_offset */
1341 
1342     bitstream_put_ui(bs, pic_param->pic_fields.bits.deblocking_filter_control_present_flag, 1); /* deblocking_filter_control_present_flag */
1343     bitstream_put_ui(bs, 0, 1);                         /* constrained_intra_pred_flag */
1344     bitstream_put_ui(bs, 0, 1);                         /* redundant_pic_cnt_present_flag */
1345 
1346     /* more_rbsp_data */
1347     bitstream_put_ui(bs, pic_param->pic_fields.bits.transform_8x8_mode_flag, 1);    /*transform_8x8_mode_flag */
1348     bitstream_put_ui(bs, 0, 1);                         /* pic_scaling_matrix_present_flag */
1349     bitstream_put_se(bs, pic_param->second_chroma_qp_index_offset);     /*second_chroma_qp_index_offset */
1350 
1351     rbsp_trailing_bits(bs);
1352 }
1353 
1354 #if 0
1355 static void build_nal_pps(FILE *avc_fp)
1356 {
1357     bitstream bs;
1358 
1359     bitstream_start(&bs);
1360     nal_start_code_prefix(&bs);
1361     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
1362     pps_rbsp(&bs);
1363     bitstream_end(&bs, avc_fp);
1364 }
1365 
1366 static void
1367 build_header(FILE *avc_fp)
1368 {
1369     build_nal_sps(avc_fp);
1370     build_nal_pps(avc_fp);
1371 }
1372 #endif
1373 
nal_delimiter(bitstream * bs,int slice_type)1374 static void nal_delimiter(bitstream *bs, int slice_type)
1375 {
1376     if (slice_type == SLICE_TYPE_I || slice_type == FRAME_IDR)
1377         bitstream_put_ui(bs, 0, 3);
1378     else if (slice_type == SLICE_TYPE_P)
1379         bitstream_put_ui(bs, 1, 3);
1380     else if (slice_type == SLICE_TYPE_B)
1381         bitstream_put_ui(bs, 2, 3);
1382     else
1383         assert(0);
1384     bitstream_put_ui(bs, 1, 1);
1385     bitstream_put_ui(bs, 0, 4);
1386 }
1387 
build_nal_delimiter(unsigned char ** header_buffer)1388 static int build_nal_delimiter(unsigned char **header_buffer)
1389 {
1390     bitstream bs;
1391 
1392     bitstream_start(&bs);
1393     nal_start_code_prefix(&bs);
1394     nal_header(&bs, NAL_REF_IDC_NONE, NAL_DELIMITER);
1395     nal_delimiter(&bs, current_frame_type);
1396     bitstream_end(&bs);
1397     *header_buffer = (unsigned char *)bs.buffer;
1398     return bs.bit_offset;
1399 }
1400 
1401 
1402 static int
build_packed_pic_buffer(unsigned char ** header_buffer)1403 build_packed_pic_buffer(unsigned char **header_buffer)
1404 {
1405     bitstream bs;
1406 
1407     bitstream_start(&bs);
1408     nal_start_code_prefix(&bs);
1409     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_PPS);
1410     pps_rbsp(&bs);
1411     bitstream_end(&bs);
1412 
1413     *header_buffer = (unsigned char *)bs.buffer;
1414     return bs.bit_offset;
1415 }
1416 
1417 static int
build_packed_seq_buffer(unsigned char ** header_buffer)1418 build_packed_seq_buffer(unsigned char **header_buffer)
1419 {
1420     bitstream bs;
1421 
1422     bitstream_start(&bs);
1423     nal_start_code_prefix(&bs);
1424     nal_header(&bs, NAL_REF_IDC_HIGH, NAL_SPS);
1425     sps_rbsp(&bs);
1426     bitstream_end(&bs);
1427 
1428     *header_buffer = (unsigned char *)bs.buffer;
1429     return bs.bit_offset;
1430 }
1431 
1432 static int
build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,unsigned int cpb_removal_length,unsigned int dpb_output_length,unsigned char ** sei_buffer)1433 build_packed_idr_sei_buffer_timing(unsigned int init_cpb_removal_delay_length,
1434                                    unsigned int cpb_removal_length,
1435                                    unsigned int dpb_output_length,
1436                                    unsigned char **sei_buffer)
1437 {
1438     unsigned char *byte_buf;
1439     int bp_byte_size, i, pic_byte_size;
1440     unsigned int cpb_removal_delay;
1441 
1442     bitstream nal_bs;
1443     bitstream sei_bp_bs, sei_pic_bs;
1444 
1445     bitstream_start(&sei_bp_bs);
1446     bitstream_put_ue(&sei_bp_bs, 0);       /*seq_parameter_set_id*/
1447     /* SEI buffer period info */
1448     /* NALHrdBpPresentFlag == 1 */
1449     bitstream_put_ui(&sei_bp_bs, avcenc_context.i_initial_cpb_removal_delay,
1450                      init_cpb_removal_delay_length);
1451     bitstream_put_ui(&sei_bp_bs, avcenc_context.i_initial_cpb_removal_delay_offset,
1452                      init_cpb_removal_delay_length);
1453     if (sei_bp_bs.bit_offset & 0x7) {
1454         bitstream_put_ui(&sei_bp_bs, 1, 1);
1455     }
1456     bitstream_end(&sei_bp_bs);
1457     bp_byte_size = (sei_bp_bs.bit_offset + 7) / 8;
1458 
1459     /* SEI pic timing info */
1460     bitstream_start(&sei_pic_bs);
1461     /* The info of CPB and DPB delay is controlled by CpbDpbDelaysPresentFlag,
1462      * which is derived as 1 if one of the following conditions is true:
1463      * nal_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
1464      * vcl_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
1465      */
1466     cpb_removal_delay = (avcenc_context.current_cpb_removal - avcenc_context.prev_idr_cpb_removal);
1467     bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
1468     bitstream_put_ui(&sei_pic_bs, avcenc_context.current_dpb_removal_delta,
1469                      dpb_output_length);
1470     if (sei_pic_bs.bit_offset & 0x7) {
1471         bitstream_put_ui(&sei_pic_bs, 1, 1);
1472     }
1473     /* The pic_structure_present_flag determines whether the pic_structure
1474      * info is written into the SEI pic timing info.
1475      * Currently it is set to zero.
1476      */
1477     bitstream_end(&sei_pic_bs);
1478     pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
1479 
1480     bitstream_start(&nal_bs);
1481     nal_start_code_prefix(&nal_bs);
1482     nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
1483 
1484     /* Write the SEI buffer period data */
1485     bitstream_put_ui(&nal_bs, 0, 8);
1486     bitstream_put_ui(&nal_bs, bp_byte_size, 8);
1487 
1488     byte_buf = (unsigned char *)sei_bp_bs.buffer;
1489     for (i = 0; i < bp_byte_size; i++) {
1490         bitstream_put_ui(&nal_bs, byte_buf[i], 8);
1491     }
1492     free(byte_buf);
1493     /* write the SEI pic timing data */
1494     bitstream_put_ui(&nal_bs, 0x01, 8);
1495     bitstream_put_ui(&nal_bs, pic_byte_size, 8);
1496 
1497     byte_buf = (unsigned char *)sei_pic_bs.buffer;
1498     for (i = 0; i < pic_byte_size; i++) {
1499         bitstream_put_ui(&nal_bs, byte_buf[i], 8);
1500     }
1501     free(byte_buf);
1502 
1503     rbsp_trailing_bits(&nal_bs);
1504     bitstream_end(&nal_bs);
1505 
1506     *sei_buffer = (unsigned char *)nal_bs.buffer;
1507 
1508     return nal_bs.bit_offset;
1509 }
1510 
1511 static int
build_packed_sei_pic_timing(unsigned int cpb_removal_length,unsigned int dpb_output_length,unsigned char ** sei_buffer)1512 build_packed_sei_pic_timing(unsigned int cpb_removal_length,
1513                             unsigned int dpb_output_length,
1514                             unsigned char **sei_buffer)
1515 {
1516     unsigned char *byte_buf;
1517     int i, pic_byte_size;
1518     unsigned int cpb_removal_delay;
1519 
1520     bitstream nal_bs;
1521     bitstream sei_pic_bs;
1522 
1523     bitstream_start(&sei_pic_bs);
1524     /* The info of CPB and DPB delay is controlled by CpbDpbDelaysPresentFlag,
1525      * which is derived as 1 if one of the following conditions is true:
1526      * nal_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
1527      * vcl_hrd_parameters_present_flag is present in the bitstream and is equal to 1,
1528      */
1529     cpb_removal_delay = (avcenc_context.current_cpb_removal - avcenc_context.current_idr_cpb_removal);
1530     bitstream_put_ui(&sei_pic_bs, cpb_removal_delay, cpb_removal_length);
1531     bitstream_put_ui(&sei_pic_bs, avcenc_context.current_dpb_removal_delta,
1532                      dpb_output_length);
1533     if (sei_pic_bs.bit_offset & 0x7) {
1534         bitstream_put_ui(&sei_pic_bs, 1, 1);
1535     }
1536 
1537     /* The pic_structure_present_flag determines whether the pic_structure
1538      * info is written into the SEI pic timing info.
1539      * Currently it is set to zero.
1540      */
1541     bitstream_end(&sei_pic_bs);
1542     pic_byte_size = (sei_pic_bs.bit_offset + 7) / 8;
1543 
1544     bitstream_start(&nal_bs);
1545     nal_start_code_prefix(&nal_bs);
1546     nal_header(&nal_bs, NAL_REF_IDC_NONE, NAL_SEI);
1547 
1548     /* write the SEI Pic timing data */
1549     bitstream_put_ui(&nal_bs, 0x01, 8);
1550     bitstream_put_ui(&nal_bs, pic_byte_size, 8);
1551 
1552     byte_buf = (unsigned char *)sei_pic_bs.buffer;
1553     for (i = 0; i < pic_byte_size; i++) {
1554         bitstream_put_ui(&nal_bs, byte_buf[i], 8);
1555     }
1556     free(byte_buf);
1557 
1558     rbsp_trailing_bits(&nal_bs);
1559     bitstream_end(&nal_bs);
1560 
1561     *sei_buffer = (unsigned char *)nal_bs.buffer;
1562 
1563     return nal_bs.bit_offset;
1564 }
1565 
1566 #if 0
1567 static void
1568 slice_header(bitstream *bs, int frame_num, int display_frame, int slice_type, int nal_ref_idc, int is_idr)
1569 {
1570     VAEncSequenceParameterBufferH264 *seq_param = &avcenc_context.seq_param;
1571     VAEncPictureParameterBufferH264 *pic_param = &avcenc_context.pic_param;
1572     int is_cabac = (pic_param->pic_fields.bits.entropy_coding_mode_flag == ENTROPY_MODE_CABAC);
1573 
1574     bitstream_put_ue(bs, 0);                   /* first_mb_in_slice: 0 */
1575     bitstream_put_ue(bs, slice_type);          /* slice_type */
1576     bitstream_put_ue(bs, 0);                   /* pic_parameter_set_id: 0 */
1577     bitstream_put_ui(bs, frame_num & 0x0F, seq_param->seq_fields.bits.log2_max_frame_num_minus4 + 4);    /* frame_num */
1578 
1579     /* frame_mbs_only_flag == 1 */
1580     if (!seq_param->seq_fields.bits.frame_mbs_only_flag) {
1581         /* FIXME: */
1582         assert(0);
1583     }
1584 
1585     if (is_idr)
1586         bitstream_put_ue(bs, 0);        /* idr_pic_id: 0 */
1587 
1588     if (seq_param->seq_fields.bits.pic_order_cnt_type == 0) {
1589         bitstream_put_ui(bs, (display_frame * 2) & 0x3F, seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 + 4);
1590         /* only support frame */
1591     } else {
1592         /* FIXME: */
1593         assert(0);
1594     }
1595 
1596     /* redundant_pic_cnt_present_flag == 0 */
1597 
1598     /* slice type */
1599     if (slice_type == SLICE_TYPE_P) {
1600         bitstream_put_ui(bs, 0, 1);            /* num_ref_idx_active_override_flag: 0 */
1601         /* ref_pic_list_reordering */
1602         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
1603     } else if (slice_type == SLICE_TYPE_B) {
1604         bitstream_put_ui(bs, 1, 1);            /* direct_spatial_mv_pred: 1 */
1605         bitstream_put_ui(bs, 0, 1);            /* num_ref_idx_active_override_flag: 0 */
1606         /* ref_pic_list_reordering */
1607         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l0: 0 */
1608         bitstream_put_ui(bs, 0, 1);            /* ref_pic_list_reordering_flag_l1: 0 */
1609     }
1610 
1611     /* weighted_pred_flag == 0 */
1612 
1613     /* dec_ref_pic_marking */
1614     if (nal_ref_idc != 0) {
1615         if (is_idr) {
1616             bitstream_put_ui(bs, 0, 1);            /* no_output_of_prior_pics_flag: 0 */
1617             bitstream_put_ui(bs, 0, 1);            /* long_term_reference_flag: 0 */
1618         } else {
1619             bitstream_put_ui(bs, 0, 1);            /* adaptive_ref_pic_marking_mode_flag: 0 */
1620         }
1621     }
1622 
1623     if (is_cabac && (slice_type != SLICE_TYPE_I))
1624         bitstream_put_ue(bs, 0);               /* cabac_init_idc: 0 */
1625 
1626     bitstream_put_se(bs, 0);                   /* slice_qp_delta: 0 */
1627 
1628     if (pic_param->pic_fields.bits.deblocking_filter_control_present_flag == 1) {
1629         bitstream_put_ue(bs, 0);               /* disable_deblocking_filter_idc: 0 */
1630         bitstream_put_se(bs, 2);               /* slice_alpha_c0_offset_div2: 2 */
1631         bitstream_put_se(bs, 2);               /* slice_beta_offset_div2: 2 */
1632     }
1633 }
1634 
1635 static void
1636 slice_data(bitstream *bs)
1637 {
1638     VACodedBufferSegment *coded_buffer_segment;
1639     unsigned char *coded_mem;
1640     int i, slice_data_length;
1641     VAStatus va_status;
1642     VASurfaceStatus surface_status;
1643 
1644     va_status = vaSyncSurface(va_dpy, surface_ids[avcenc_context.current_input_surface]);
1645     CHECK_VASTATUS(va_status, "vaSyncSurface");
1646 
1647     surface_status = 0;
1648     va_status = vaQuerySurfaceStatus(va_dpy, surface_ids[avcenc_context.current_input_surface], &surface_status);
1649     CHECK_VASTATUS(va_status, "vaQuerySurfaceStatus");
1650 
1651     va_status = vaMapBuffer(va_dpy, avcenc_context.codedbuf_buf_id, (void **)(&coded_buffer_segment));
1652     CHECK_VASTATUS(va_status, "vaMapBuffer");
1653     coded_mem = coded_buffer_segment->buf;
1654 
1655     slice_data_length = get_coded_bitsteam_length(coded_mem, codedbuf_size);
1656 
1657     for (i = 0; i < slice_data_length; i++) {
1658         bitstream_put_ui(bs, *coded_mem, 8);
1659         coded_mem++;
1660     }
1661 
1662     vaUnmapBuffer(va_dpy, avcenc_context.codedbuf_buf_id);
1663 }
1664 
1665 static void
1666 build_nal_slice(FILE *avc_fp, int frame_num, int display_frame, int slice_type, int is_idr)
1667 {
1668     bitstream bs;
1669 
1670     bitstream_start(&bs);
1671     slice_data(&bs);
1672     bitstream_end(&bs, avc_fp);
1673 }
1674 
1675 #endif
1676 
1677 static int
store_coded_buffer(FILE * avc_fp,int slice_type)1678 store_coded_buffer(FILE *avc_fp, int slice_type)
1679 {
1680     VACodedBufferSegment *coded_buffer_segment;
1681     unsigned char *coded_mem;
1682     int slice_data_length;
1683     VAStatus va_status;
1684     VASurfaceStatus surface_status;
1685     size_t w_items;
1686 
1687     va_status = vaSyncSurface(va_dpy, surface_ids[avcenc_context.current_input_surface]);
1688     CHECK_VASTATUS(va_status, "vaSyncSurface");
1689 
1690     surface_status = 0;
1691     va_status = vaQuerySurfaceStatus(va_dpy, surface_ids[avcenc_context.current_input_surface], &surface_status);
1692     CHECK_VASTATUS(va_status, "vaQuerySurfaceStatus");
1693 
1694     va_status = vaMapBuffer(va_dpy, avcenc_context.codedbuf_buf_id, (void **)(&coded_buffer_segment));
1695     CHECK_VASTATUS(va_status, "vaMapBuffer");
1696     coded_mem = coded_buffer_segment->buf;
1697 
1698     if (coded_buffer_segment->status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK) {
1699         if (slice_type == SLICE_TYPE_I)
1700             avcenc_context.codedbuf_i_size *= 2;
1701         else
1702             avcenc_context.codedbuf_pb_size *= 2;
1703 
1704         vaUnmapBuffer(va_dpy, avcenc_context.codedbuf_buf_id);
1705         return -1;
1706     }
1707 
1708     slice_data_length = coded_buffer_segment->size;
1709 
1710     do {
1711         w_items = fwrite(coded_mem, slice_data_length, 1, avc_fp);
1712     } while (w_items != 1);
1713 
1714     vaUnmapBuffer(va_dpy, avcenc_context.codedbuf_buf_id);
1715 
1716     return 0;
1717 }
1718 
1719 /*
1720  * It is from the h264encode.c but it simplifies something.
1721  * For example: When one frame is encoded as I-frame under the scenario with
1722  * P-B frames, it will be regarded as IDR frame(key-frame) and then new GOP is
1723  * started. If the video clip is encoded as all I-frames, the first frame
1724  * is regarded as IDR and the remaining is regarded as I-frame.
1725  *
1726  */
1727 
encoding2display_order(unsigned long long encoding_order,int gop_size,int ip_period,unsigned long long * displaying_order,int * frame_type)1728 static void encoding2display_order(
1729     unsigned long long encoding_order, int gop_size,
1730     int ip_period,
1731     unsigned long long *displaying_order,
1732     int *frame_type)
1733 {
1734     int encoding_order_gop = 0;
1735 
1736     /* When ip_period is 0, all are I/IDR frames */
1737     if (ip_period == 0) { /* all are I/IDR frames */
1738         if (encoding_order == 0)
1739             *frame_type = FRAME_IDR;
1740         else
1741             *frame_type = SLICE_TYPE_I;
1742 
1743         *displaying_order = encoding_order;
1744         return;
1745     }
1746 
1747     /* new sequence like
1748      * IDR PPPPP IDRPPPPP
1749      * IDR (PBB)(PBB)(PBB)(PBB) IDR (PBB)(PBB)(PBB)(PBB)
1750      */
1751     encoding_order_gop = encoding_order % gop_size;
1752 
1753     if (encoding_order_gop == 0) { /* the first frame */
1754         *frame_type = FRAME_IDR;
1755         *displaying_order = encoding_order;
1756     } else {
1757         int gop_delta;
1758 
1759         gop_delta = 1;
1760 
1761         if ((ip_period != 1) && ((gop_size - 1) % ip_period)) {
1762             int ipb_size;
1763             ipb_size = (gop_size - 1) / ip_period * ip_period + 1;
1764             if (encoding_order_gop >= ipb_size) {
1765                 gop_delta = ipb_size;
1766                 ip_period = gop_size - ipb_size;
1767             }
1768         }
1769 
1770         if (((encoding_order_gop - gop_delta) % ip_period) == 0) { /* P frames */
1771             *frame_type = SLICE_TYPE_P;
1772             *displaying_order = encoding_order + ip_period - 1;
1773         } else {
1774             *frame_type = SLICE_TYPE_B;
1775             *displaying_order = encoding_order - 1;
1776         }
1777     }
1778 }
1779 
1780 
1781 static void
encode_picture(FILE * yuv_fp,FILE * avc_fp,int frame_num,int display_num,int is_idr,int slice_type,int next_is_bpic,int next_display_num)1782 encode_picture(FILE *yuv_fp, FILE *avc_fp,
1783                int frame_num, int display_num,
1784                int is_idr,
1785                int slice_type, int next_is_bpic,
1786                int next_display_num)
1787 {
1788     VAStatus va_status;
1789     int ret = 0, codedbuf_size;
1790 
1791     begin_picture(yuv_fp, frame_num, display_num, slice_type, is_idr);
1792 
1793     //if (next_display_num < frame_number) {
1794     if (1) {
1795         int index;
1796 
1797         /* prepare for next frame */
1798         if (avcenc_context.current_input_surface == SID_INPUT_PICTURE_0)
1799             index = SID_INPUT_PICTURE_1;
1800         else
1801             index = SID_INPUT_PICTURE_0;
1802         if (next_display_num >= frame_number)
1803             next_display_num = frame_number - 1;
1804         ret = fseeko(yuv_fp, (off_t)frame_size * next_display_num, SEEK_SET);
1805         CHECK_CONDITION(ret == 0);
1806 
1807         avcenc_context.upload_thread_param.yuv_fp = yuv_fp;
1808         avcenc_context.upload_thread_param.surface_id = surface_ids[index];
1809 
1810         avcenc_context.upload_thread_value = pthread_create(&avcenc_context.upload_thread_id,
1811                                              NULL,
1812                                              upload_thread_function,
1813                                              (void*)&avcenc_context.upload_thread_param);
1814     }
1815 
1816     do {
1817         avcenc_destroy_buffers(&avcenc_context.codedbuf_buf_id, 1);
1818         avcenc_destroy_buffers(&avcenc_context.pic_param_buf_id, 1);
1819 
1820 
1821         if (SLICE_TYPE_I == slice_type) {
1822             codedbuf_size = avcenc_context.codedbuf_i_size;
1823         } else {
1824             codedbuf_size = avcenc_context.codedbuf_pb_size;
1825         }
1826 
1827         /* coded buffer */
1828         va_status = vaCreateBuffer(va_dpy,
1829                                    avcenc_context.context_id,
1830                                    VAEncCodedBufferType,
1831                                    codedbuf_size, 1, NULL,
1832                                    &avcenc_context.codedbuf_buf_id);
1833         CHECK_VASTATUS(va_status, "vaCreateBuffer");
1834 
1835         /* Update the RefPicList */
1836         update_RefPicList();
1837 
1838         /* picture parameter set */
1839         avcenc_update_picture_parameter(slice_type, is_idr);
1840 
1841         /* slice parameter */
1842         avcenc_update_slice_parameter(slice_type);
1843 
1844         if (avcenc_context.rate_control_method == VA_RC_CBR)
1845             avcenc_update_sei_param(is_idr);
1846 
1847         avcenc_render_picture();
1848 
1849         ret = store_coded_buffer(avc_fp, slice_type);
1850     } while (ret);
1851 
1852     end_picture();
1853 }
1854 
show_help()1855 static void show_help()
1856 {
1857     printf("Usage: avnenc <width> <height> <input_yuvfile> <output_avcfile> [--qp=qpvalue|--fb=framebitrate] [--mode=0(I frames only)/1(I and P frames)/2(I, P and B frames)] [--low-power] [--roi-test] [--frames=0(ignore when < 0)/N(number)] \n");
1858 }
1859 
avcenc_context_seq_param_init(VAEncSequenceParameterBufferH264 * seq_param,int width,int height)1860 static void avcenc_context_seq_param_init(VAEncSequenceParameterBufferH264 *seq_param,
1861         int width, int height)
1862 
1863 {
1864     int width_in_mbs = (width + 15) / 16;
1865     int height_in_mbs = (height + 15) / 16;
1866     int frame_cropping_flag = 0;
1867     int frame_crop_bottom_offset = 0;
1868 
1869     seq_param->seq_parameter_set_id = 0;
1870     seq_param->level_idc = 41;
1871     seq_param->intra_period = intra_period;
1872     seq_param->intra_idr_period = seq_param->intra_period;
1873     seq_param->ip_period = ip_period;
1874     seq_param->max_num_ref_frames = 4;
1875     seq_param->picture_width_in_mbs = width_in_mbs;
1876     seq_param->picture_height_in_mbs = height_in_mbs;
1877     seq_param->seq_fields.bits.frame_mbs_only_flag = 1;
1878     seq_param->seq_fields.bits.chroma_format_idc = 1;
1879 
1880 
1881     if (frame_bit_rate > 0)
1882         seq_param->bits_per_second = 1000 * frame_bit_rate; /* use kbps as input */
1883     else
1884         seq_param->bits_per_second = 0;
1885 
1886     seq_param->time_scale = frame_rate * 2;
1887     seq_param->num_units_in_tick = 1;           /* Tc = num_units_in_tick / time_sacle */
1888 
1889     if (height_in_mbs * 16 - height) {
1890         frame_cropping_flag = 1;
1891         frame_crop_bottom_offset =
1892             (height_in_mbs * 16 - height) / (2 * (!seq_param->seq_fields.bits.frame_mbs_only_flag + 1));
1893     }
1894 
1895     seq_param->frame_cropping_flag = frame_cropping_flag;
1896     seq_param->frame_crop_left_offset = 0;
1897     seq_param->frame_crop_right_offset = 0;
1898     seq_param->frame_crop_top_offset = 0;
1899     seq_param->frame_crop_bottom_offset = frame_crop_bottom_offset;
1900 
1901     seq_param->seq_fields.bits.pic_order_cnt_type = 0;
1902     seq_param->seq_fields.bits.direct_8x8_inference_flag = 0;
1903 
1904     seq_param->seq_fields.bits.log2_max_frame_num_minus4 = Log2MaxFrameNum - 4;
1905     seq_param->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = Log2MaxPicOrderCntLsb - 4;
1906 
1907     if (frame_bit_rate > 0)
1908         seq_param->vui_parameters_present_flag = 1; //HRD info located in vui
1909     else
1910         seq_param->vui_parameters_present_flag = 0;
1911 }
1912 
avcenc_context_pic_param_init(VAEncPictureParameterBufferH264 * pic_param)1913 static void avcenc_context_pic_param_init(VAEncPictureParameterBufferH264 *pic_param)
1914 {
1915     pic_param->seq_parameter_set_id = 0;
1916     pic_param->pic_parameter_set_id = 0;
1917 
1918     pic_param->last_picture = 0;
1919     pic_param->frame_num = 0;
1920 
1921     pic_param->pic_init_qp = (qp_value >= 0 ?  qp_value : 26);
1922     pic_param->num_ref_idx_l0_active_minus1 = 0;
1923     pic_param->num_ref_idx_l1_active_minus1 = 0;
1924 
1925     pic_param->pic_fields.bits.idr_pic_flag = 0;
1926     pic_param->pic_fields.bits.reference_pic_flag = 0;
1927     pic_param->pic_fields.bits.entropy_coding_mode_flag = ENTROPY_MODE_CABAC;
1928     pic_param->pic_fields.bits.weighted_pred_flag = 0;
1929     pic_param->pic_fields.bits.weighted_bipred_idc = 0;
1930 
1931     if (avcenc_context.constraint_set_flag & 0x7)
1932         pic_param->pic_fields.bits.transform_8x8_mode_flag = 0;
1933     else
1934         pic_param->pic_fields.bits.transform_8x8_mode_flag = 1;
1935 
1936     pic_param->pic_fields.bits.deblocking_filter_control_present_flag = 1;
1937 
1938     memset(pic_param->ReferenceFrames, 0xff, 16 * sizeof(VAPictureH264)); /* invalid all */
1939 }
1940 
avcenc_context_sei_init()1941 static void avcenc_context_sei_init()
1942 {
1943     /* it comes for the bps defined in SPS */
1944     avcenc_context.i_initial_cpb_removal_delay = 2 * 90000;
1945     avcenc_context.i_initial_cpb_removal_delay_offset = 2 * 90000;
1946 
1947     avcenc_context.i_cpb_removal_delay = 2;
1948     avcenc_context.i_initial_cpb_removal_delay_length = 24;
1949     avcenc_context.i_cpb_removal_delay_length = 24;
1950     avcenc_context.i_dpb_output_delay_length = 24;
1951     avcenc_context.time_offset_length = 24;
1952 
1953     avcenc_context.prev_idr_cpb_removal = avcenc_context.i_initial_cpb_removal_delay / 90000;
1954     avcenc_context.current_idr_cpb_removal = avcenc_context.prev_idr_cpb_removal;
1955     avcenc_context.current_cpb_removal = 0;
1956     avcenc_context.idr_frame_num = 0;
1957 }
1958 
avcenc_context_init(int width,int height)1959 static void avcenc_context_init(int width, int height)
1960 {
1961     int i;
1962     memset(&avcenc_context, 0, sizeof(avcenc_context));
1963     avcenc_context.profile = VAProfileH264Main;
1964 
1965     memset(&use_slot, 0, sizeof(use_slot));
1966     switch (avcenc_context.profile) {
1967     case VAProfileH264ConstrainedBaseline:
1968         avcenc_context.constraint_set_flag |= (1 << 0); /* Annex A.2.1 */
1969         avcenc_context.constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1970         break;
1971 
1972     case VAProfileH264Main:
1973         avcenc_context.constraint_set_flag |= (1 << 1); /* Annex A.2.2 */
1974         break;
1975 
1976     case VAProfileH264High:
1977         avcenc_context.constraint_set_flag |= (1 << 3); /* Annex A.2.4 */
1978         break;
1979 
1980     default:
1981         break;
1982     }
1983 
1984     avcenc_context.seq_param_buf_id = VA_INVALID_ID;
1985     avcenc_context.pic_param_buf_id = VA_INVALID_ID;
1986     avcenc_context.packed_seq_header_param_buf_id = VA_INVALID_ID;
1987     avcenc_context.packed_seq_buf_id = VA_INVALID_ID;
1988     avcenc_context.packed_pic_header_param_buf_id = VA_INVALID_ID;
1989     avcenc_context.packed_pic_buf_id = VA_INVALID_ID;
1990     avcenc_context.codedbuf_buf_id = VA_INVALID_ID;
1991     avcenc_context.misc_parameter_hrd_buf_id = VA_INVALID_ID;
1992     avcenc_context.codedbuf_i_size = width * height;
1993     avcenc_context.codedbuf_pb_size = width * height;
1994     avcenc_context.current_input_surface = SID_INPUT_PICTURE_0;
1995     avcenc_context.upload_thread_value = -1;
1996     avcenc_context.packed_sei_header_param_buf_id = VA_INVALID_ID;
1997     avcenc_context.packed_sei_buf_id = VA_INVALID_ID;
1998     avcenc_context.misc_parameter_roi_buf_id = VA_INVALID_ID;
1999     avcenc_context.packed_aud_header_param_buf_id = VA_INVALID_ID;
2000     avcenc_context.packed_aud_buf_id = VA_INVALID_ID;
2001 
2002     if (qp_value == -1)
2003         avcenc_context.rate_control_method = VA_RC_CBR;
2004     else if (qp_value == -2)
2005         avcenc_context.rate_control_method = VA_RC_VBR;
2006     else {
2007         assert(qp_value >= 0 && qp_value <= 51);
2008         avcenc_context.rate_control_method = VA_RC_CQP;
2009     }
2010 
2011     for (i = 0; i < MAX_SLICES; i++) {
2012         avcenc_context.slice_param_buf_id[i] = VA_INVALID_ID;
2013     }
2014 
2015     avcenc_context_seq_param_init(&avcenc_context.seq_param, width, height);
2016     avcenc_context_pic_param_init(&avcenc_context.pic_param);
2017     if (avcenc_context.rate_control_method == VA_RC_CBR)
2018         avcenc_context_sei_init();
2019 }
2020 
main(int argc,char * argv[])2021 int main(int argc, char *argv[])
2022 {
2023     int f;
2024     FILE *yuv_fp;
2025     FILE *avc_fp;
2026     off_t file_size;
2027     int mode_value;
2028     struct timeval tpstart, tpend;
2029     float  timeuse;
2030     int frame_num_value = 0;
2031 
2032     va_init_display_args(&argc, argv);
2033 
2034     if (argc < 5) {
2035         show_help();
2036         return -1;
2037     }
2038 
2039     picture_width = atoi(argv[1]);
2040     picture_height = atoi(argv[2]);
2041     picture_width_in_mbs = (picture_width + 15) / 16;
2042     picture_height_in_mbs = (picture_height + 15) / 16;
2043 
2044     if (argc > 5) {
2045         int o;
2046 
2047         optind = 5;
2048 
2049         while ((o = getopt_long_only(argc, argv, "", longopts, NULL)) != -1) {
2050             switch (o) {
2051             case 1:     // qp
2052                 frame_bit_rate = -1;
2053                 qp_value = atoi(optarg);
2054 
2055                 if (qp_value > 51)
2056                     qp_value = 51;
2057 
2058                 if (qp_value < 0)
2059                     qp_value = 0;
2060 
2061                 break;
2062 
2063             case 2:     // fb
2064                 qp_value = -1;
2065                 frame_bit_rate = atoi(optarg);
2066 
2067                 if (frame_bit_rate <= 0) {
2068                     show_help();
2069 
2070                     return -1;
2071                 }
2072 
2073                 break;
2074 
2075             case 3:     // mode
2076                 mode_value = atoi(optarg);
2077 
2078                 if (mode_value == 0)
2079                     ip_period = 0;
2080                 else if (mode_value == 1)
2081                     ip_period = 1;
2082                 else if (mode_value == 2)
2083                     /* Hack mechanism before adding the parameter of B-frame number */
2084                     ip_period = 2;
2085                 else {
2086                     printf("mode_value = %d\n", mode_value);
2087                     show_help();
2088                     return -1;
2089                 }
2090 
2091                 break;
2092 
2093             case 4:     // low-power mode
2094                 select_entrypoint = VAEntrypointEncSliceLP;
2095                 break;
2096 
2097             case 5:     // roi-test enable/disable
2098                 roi_test_enable = 1;
2099                 break;
2100 
2101             case 6:     // Frames number
2102                 frame_num_value = atoi(optarg);
2103                 break;
2104             default:
2105                 show_help();
2106                 return -1;
2107             }
2108         }
2109     } else
2110         qp_value = 28;                          //default const QP mode
2111 
2112     yuv_fp = fopen(argv[3], "rb");
2113     if (yuv_fp == NULL) {
2114         printf("Can't open input YUV file\n");
2115         return -1;
2116     }
2117     fseeko(yuv_fp, (off_t)0, SEEK_END);
2118     file_size = ftello(yuv_fp);
2119     frame_size = picture_width * picture_height + ((picture_width * picture_height) >> 1) ;
2120 
2121     if (frame_size == 0) {
2122         fclose(yuv_fp);
2123         printf("Frame size is not correct\n");
2124         return -1;
2125     }
2126     if ((file_size < frame_size) || (file_size % frame_size)) {
2127         fclose(yuv_fp);
2128         printf("The YUV file's size is not correct\n");
2129         return -1;
2130     }
2131     frame_number = file_size / frame_size;
2132     fseeko(yuv_fp, (off_t)0, SEEK_SET);
2133 
2134     avc_fp = fopen(argv[4], "wb");
2135     if (avc_fp == NULL) {
2136         fclose(yuv_fp);
2137         printf("Can't open output avc file\n");
2138         return -1;
2139     }
2140     gettimeofday(&tpstart, NULL);
2141     avcenc_context_init(picture_width, picture_height);
2142     create_encode_pipe();
2143     alloc_encode_resource(yuv_fp);
2144 
2145     enc_frame_number = 0;
2146     if (frame_num_value <= 0)
2147         frame_num_value = frame_number;
2148 
2149     for (f = 0; f < frame_num_value; f++) {            //picture level loop
2150         unsigned long long next_frame_display;
2151         int next_frame_type;
2152 
2153         enc_frame_number = f;
2154 
2155         encoding2display_order(enc_frame_number, intra_period, ip_period,
2156                                &current_frame_display, &current_frame_type);
2157 
2158         encoding2display_order(enc_frame_number + 1, intra_period, ip_period,
2159                                &next_frame_display, &next_frame_type);
2160 
2161         if (current_frame_type == FRAME_IDR) {
2162             numShortTerm = 0;
2163             current_frame_num = 0;
2164             memset(&use_slot, 0, sizeof(use_slot));
2165             current_IDR_display = current_frame_display;
2166             if (avcenc_context.rate_control_method == VA_RC_CBR) {
2167                 unsigned long long frame_interval;
2168 
2169                 frame_interval = enc_frame_number - avcenc_context.idr_frame_num;
2170 
2171                 /* Based on the H264 spec the removal time of the IDR access
2172                  * unit is derived as the following:
2173                  * the removal time of previous IDR unit + Tc * cpb_removal_delay(n)
2174                  */
2175                 avcenc_context.current_cpb_removal = avcenc_context.prev_idr_cpb_removal +
2176                                                      frame_interval * 2;
2177                 avcenc_context.idr_frame_num = enc_frame_number;
2178                 avcenc_context.current_idr_cpb_removal = avcenc_context.current_cpb_removal;
2179                 if (ip_period)
2180                     avcenc_context.current_dpb_removal_delta = (ip_period + 1) * 2;
2181                 else
2182                     avcenc_context.current_dpb_removal_delta = 2;
2183             }
2184         } else {
2185             if (avcenc_context.rate_control_method == VA_RC_CBR) {
2186                 unsigned long long frame_interval;
2187 
2188                 frame_interval = enc_frame_number - avcenc_context.idr_frame_num;
2189 
2190                 /* Based on the H264 spec the removal time of the non-IDR access
2191                  * unit is derived as the following:
2192                  * the removal time of current IDR unit + Tc * cpb_removal_delay(n)
2193                  */
2194                 avcenc_context.current_cpb_removal = avcenc_context.current_idr_cpb_removal +
2195                                                      frame_interval * 2;
2196                 if (current_frame_type == SLICE_TYPE_I ||
2197                     current_frame_type == SLICE_TYPE_P) {
2198                     if (ip_period)
2199                         avcenc_context.current_dpb_removal_delta = (ip_period + 1) * 2;
2200                     else
2201                         avcenc_context.current_dpb_removal_delta = 2;
2202                 } else
2203                     avcenc_context.current_dpb_removal_delta = 2;
2204             }
2205         }
2206 
2207         /* use the simple mechanism to calc the POC */
2208         current_poc = (current_frame_display - current_IDR_display) * 2;
2209 
2210         encode_picture(yuv_fp, avc_fp, frame_number, current_frame_display,
2211                        (current_frame_type == FRAME_IDR) ? 1 : 0,
2212                        (current_frame_type == FRAME_IDR) ? SLICE_TYPE_I : current_frame_type,
2213                        (next_frame_type == SLICE_TYPE_B) ? 1 : 0,
2214                        next_frame_display);
2215         if ((current_frame_type == FRAME_IDR) &&
2216             (avcenc_context.rate_control_method == VA_RC_CBR)) {
2217             /* after one IDR frame is written, it needs to update the
2218              * prev_idr_cpb_removal for next IDR
2219              */
2220             avcenc_context.prev_idr_cpb_removal = avcenc_context.current_idr_cpb_removal;
2221         }
2222         printf("\r %d/%d ...", f, frame_number);
2223         fflush(stdout);
2224     }
2225 
2226     gettimeofday(&tpend, NULL);
2227     timeuse = 1000000 * (tpend.tv_sec - tpstart.tv_sec) + tpend.tv_usec - tpstart.tv_usec;
2228     timeuse /= 1000000;
2229     printf("\ndone!\n");
2230     printf("encode %d frames in %f secondes, FPS is %.1f\n", frame_number, timeuse, frame_number / timeuse);
2231     release_encode_resource();
2232     destory_encode_pipe();
2233 
2234     fclose(yuv_fp);
2235     fclose(avc_fp);
2236 
2237     return 0;
2238 }
2239