1 /**************************************************************************
2 *
3 * Copyright 2018 Advanced Micro Devices, Inc.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28 #include "util/u_handle_table.h"
29 #include "util/u_video.h"
30 #include "va_private.h"
31
32 #include "util/vl_rbsp.h"
33
34 VAStatus
vlVaHandleVAEncPictureParameterBufferTypeH264(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)35 vlVaHandleVAEncPictureParameterBufferTypeH264(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
36 {
37 VAEncPictureParameterBufferH264 *h264;
38 vlVaBuffer *coded_buf;
39 vlVaSurface *surf;
40 unsigned i, j;
41
42 h264 = buf->data;
43 if (h264->pic_fields.bits.idr_pic_flag == 1)
44 context->desc.h264enc.frame_num = 0;
45 context->desc.h264enc.not_referenced = !h264->pic_fields.bits.reference_pic_flag;
46 context->desc.h264enc.pic_order_cnt = h264->CurrPic.TopFieldOrderCnt;
47 context->desc.h264enc.is_ltr = h264->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE;
48 if (context->desc.h264enc.is_ltr)
49 context->desc.h264enc.ltr_index = h264->CurrPic.frame_idx;
50 if (context->desc.h264enc.gop_cnt == 0)
51 context->desc.h264enc.i_remain = context->gop_coeff;
52 else if (context->desc.h264enc.frame_num == 1)
53 context->desc.h264enc.i_remain--;
54
55 /* Evict unused surfaces */
56 for (i = 0; i < context->desc.h264enc.dpb_size; i++) {
57 struct pipe_h264_enc_dpb_entry *dpb = &context->desc.h264enc.dpb[i];
58 if (!dpb->id || dpb->id == h264->CurrPic.picture_id)
59 continue;
60 for (j = 0; j < ARRAY_SIZE(h264->ReferenceFrames); j++) {
61 if (h264->ReferenceFrames[j].picture_id == dpb->id) {
62 dpb->evict = false;
63 break;
64 }
65 }
66 if (j == ARRAY_SIZE(h264->ReferenceFrames)) {
67 if (dpb->evict) {
68 surf = handle_table_get(drv->htab, dpb->id);
69 assert(surf);
70 surf->is_dpb = false;
71 surf->buffer = NULL;
72 /* Keep the buffer for reuse later */
73 dpb->id = 0;
74 }
75 dpb->evict = !dpb->evict;
76 }
77 }
78
79 surf = handle_table_get(drv->htab, h264->CurrPic.picture_id);
80 if (!surf)
81 return VA_STATUS_ERROR_INVALID_PARAMETER;
82
83 for (i = 0; i < ARRAY_SIZE(context->desc.h264enc.dpb); i++) {
84 if (context->desc.h264enc.dpb[i].id == h264->CurrPic.picture_id) {
85 assert(surf->is_dpb);
86 break;
87 }
88 if (!surf->is_dpb && !context->desc.h264enc.dpb[i].id) {
89 surf->is_dpb = true;
90 if (surf->buffer) {
91 surf->buffer->destroy(surf->buffer);
92 surf->buffer = NULL;
93 }
94 if (context->decoder->create_dpb_buffer) {
95 struct pipe_video_buffer *buffer = context->desc.h264enc.dpb[i].buffer;
96 if (!buffer) {
97 /* Find unused buffer */
98 for (j = 0; j < context->desc.h264enc.dpb_size; j++) {
99 struct pipe_h264_enc_dpb_entry *dpb = &context->desc.h264enc.dpb[j];
100 if (!dpb->id && dpb->buffer) {
101 buffer = dpb->buffer;
102 dpb->buffer = NULL;
103 break;
104 }
105 }
106 }
107 if (!buffer)
108 buffer = context->decoder->create_dpb_buffer(context->decoder, &context->desc.base, &surf->templat);
109 surf->buffer = buffer;
110 }
111 vlVaSetSurfaceContext(drv, surf, context);
112 if (i == context->desc.h264enc.dpb_size)
113 context->desc.h264enc.dpb_size++;
114 break;
115 }
116 }
117 if (i == ARRAY_SIZE(context->desc.h264enc.dpb))
118 return VA_STATUS_ERROR_INVALID_PARAMETER;
119 context->desc.h264enc.dpb_curr_pic = i;
120 context->desc.h264enc.dpb[i].id = h264->CurrPic.picture_id;
121 context->desc.h264enc.dpb[i].frame_idx = h264->CurrPic.frame_idx;
122 context->desc.h264enc.dpb[i].pic_order_cnt = h264->CurrPic.TopFieldOrderCnt;
123 context->desc.h264enc.dpb[i].is_ltr = h264->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE;
124 context->desc.h264enc.dpb[i].buffer = surf->buffer;
125 context->desc.h264enc.dpb[i].evict = false;
126
127 context->desc.h264enc.p_remain = context->desc.h264enc.gop_size - context->desc.h264enc.gop_cnt - context->desc.h264enc.i_remain;
128
129 coded_buf = handle_table_get(drv->htab, h264->coded_buf);
130 if (!coded_buf)
131 return VA_STATUS_ERROR_INVALID_BUFFER;
132
133 if (!coded_buf->derived_surface.resource)
134 coded_buf->derived_surface.resource = pipe_buffer_create(drv->pipe->screen, PIPE_BIND_VERTEX_BUFFER,
135 PIPE_USAGE_STAGING, coded_buf->size);
136 context->coded_buf = coded_buf;
137
138 if (context->desc.h264enc.is_ltr)
139 _mesa_hash_table_insert(context->desc.h264enc.frame_idx,
140 UINT_TO_PTR(h264->CurrPic.picture_id + 1),
141 UINT_TO_PTR(context->desc.h264enc.ltr_index));
142 else
143 _mesa_hash_table_insert(context->desc.h264enc.frame_idx,
144 UINT_TO_PTR(h264->CurrPic.picture_id + 1),
145 UINT_TO_PTR(context->desc.h264enc.frame_num));
146
147 if (h264->pic_fields.bits.idr_pic_flag == 1)
148 context->desc.h264enc.picture_type = PIPE_H2645_ENC_PICTURE_TYPE_IDR;
149 else
150 context->desc.h264enc.picture_type = PIPE_H2645_ENC_PICTURE_TYPE_P;
151
152 /* Initialize slice descriptors for this picture */
153 context->desc.h264enc.num_slice_descriptors = 0;
154 memset(&context->desc.h264enc.slices_descriptors, 0, sizeof(context->desc.h264enc.slices_descriptors));
155
156 context->desc.h264enc.init_qp = h264->pic_init_qp;
157 context->desc.h264enc.gop_cnt++;
158 if (context->desc.h264enc.gop_cnt == context->desc.h264enc.gop_size)
159 context->desc.h264enc.gop_cnt = 0;
160
161 context->desc.h264enc.pic_ctrl.enc_cabac_enable = h264->pic_fields.bits.entropy_coding_mode_flag;
162 context->desc.h264enc.num_ref_idx_l0_active_minus1 = h264->num_ref_idx_l0_active_minus1;
163 context->desc.h264enc.num_ref_idx_l1_active_minus1 = h264->num_ref_idx_l1_active_minus1;
164 context->desc.h264enc.pic_ctrl.deblocking_filter_control_present_flag
165 = h264->pic_fields.bits.deblocking_filter_control_present_flag;
166 context->desc.h264enc.pic_ctrl.redundant_pic_cnt_present_flag
167 = h264->pic_fields.bits.redundant_pic_cnt_present_flag;
168 context->desc.h264enc.pic_ctrl.chroma_qp_index_offset = h264->chroma_qp_index_offset;
169 context->desc.h264enc.pic_ctrl.second_chroma_qp_index_offset
170 = h264->second_chroma_qp_index_offset;
171 context->desc.h264enc.pic_ctrl.constrained_intra_pred_flag =
172 h264->pic_fields.bits.constrained_intra_pred_flag;
173 context->desc.h264enc.pic_ctrl.transform_8x8_mode_flag =
174 h264->pic_fields.bits.transform_8x8_mode_flag;
175
176 return VA_STATUS_SUCCESS;
177 }
178
179 static uint8_t
vlVaDpbIndex(vlVaContext * context,VASurfaceID id)180 vlVaDpbIndex(vlVaContext *context, VASurfaceID id)
181 {
182 for (uint8_t i = 0; i < context->desc.h264enc.dpb_size; i++) {
183 if (context->desc.h264enc.dpb[i].id == id)
184 return i;
185 }
186 return PIPE_H2645_LIST_REF_INVALID_ENTRY;
187 }
188
189 VAStatus
vlVaHandleVAEncSliceParameterBufferTypeH264(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)190 vlVaHandleVAEncSliceParameterBufferTypeH264(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
191 {
192 VAEncSliceParameterBufferH264 *h264;
193 unsigned slice_qp;
194
195 h264 = buf->data;
196
197 /* Handle the slice control parameters */
198 struct h264_slice_descriptor slice_descriptor;
199 memset(&slice_descriptor, 0, sizeof(slice_descriptor));
200 slice_descriptor.macroblock_address = h264->macroblock_address;
201 slice_descriptor.num_macroblocks = h264->num_macroblocks;
202 slice_descriptor.slice_type = h264->slice_type;
203 assert(slice_descriptor.slice_type <= PIPE_H264_SLICE_TYPE_I);
204
205 if (context->desc.h264enc.num_slice_descriptors < ARRAY_SIZE(context->desc.h264enc.slices_descriptors))
206 context->desc.h264enc.slices_descriptors[context->desc.h264enc.num_slice_descriptors++] = slice_descriptor;
207 else
208 return VA_STATUS_ERROR_NOT_ENOUGH_BUFFER;
209
210 /* Only use parameters for first slice */
211 if (h264->macroblock_address)
212 return VA_STATUS_SUCCESS;
213
214 memset(&context->desc.h264enc.ref_idx_l0_list, VA_INVALID_ID, sizeof(context->desc.h264enc.ref_idx_l0_list));
215 memset(&context->desc.h264enc.ref_idx_l1_list, VA_INVALID_ID, sizeof(context->desc.h264enc.ref_idx_l1_list));
216 memset(&context->desc.h264enc.ref_list0, PIPE_H2645_LIST_REF_INVALID_ENTRY, sizeof(context->desc.h264enc.ref_list0));
217 memset(&context->desc.h264enc.ref_list1, PIPE_H2645_LIST_REF_INVALID_ENTRY, sizeof(context->desc.h264enc.ref_list1));
218
219 if(h264->num_ref_idx_active_override_flag) {
220 context->desc.h264enc.num_ref_idx_l0_active_minus1 = h264->num_ref_idx_l0_active_minus1;
221 context->desc.h264enc.num_ref_idx_l1_active_minus1 = h264->num_ref_idx_l1_active_minus1;
222 }
223
224 if (h264->slice_type != PIPE_H264_SLICE_TYPE_I && h264->slice_type != PIPE_H264_SLICE_TYPE_SI) {
225 for (int i = 0; i < 32; i++) {
226 if (h264->RefPicList0[i].picture_id != VA_INVALID_ID) {
227 context->desc.h264enc.ref_list0[i] = vlVaDpbIndex(context, h264->RefPicList0[i].picture_id);
228 if (context->desc.h264enc.ref_list0[i] == PIPE_H2645_LIST_REF_INVALID_ENTRY)
229 return VA_STATUS_ERROR_INVALID_PARAMETER;
230
231 context->desc.h264enc.ref_idx_l0_list[i] = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
232 UINT_TO_PTR(h264->RefPicList0[i].picture_id + 1)));
233 context->desc.h264enc.l0_is_long_term[i] = h264->RefPicList0[i].flags & VA_PICTURE_H264_LONG_TERM_REFERENCE;
234 }
235 if (h264->RefPicList1[i].picture_id != VA_INVALID_ID && h264->slice_type == PIPE_H264_SLICE_TYPE_B) {
236 context->desc.h264enc.ref_list1[i] = vlVaDpbIndex(context, h264->RefPicList1[i].picture_id);
237 if (context->desc.h264enc.ref_list1[i] == PIPE_H2645_LIST_REF_INVALID_ENTRY)
238 return VA_STATUS_ERROR_INVALID_PARAMETER;
239
240 context->desc.h264enc.ref_idx_l1_list[i] = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
241 UINT_TO_PTR(h264->RefPicList1[i].picture_id + 1)));
242 context->desc.h264enc.l1_is_long_term[i] = h264->RefPicList1[i].flags & VA_PICTURE_H264_LONG_TERM_REFERENCE;
243 }
244 }
245 }
246
247 slice_qp = context->desc.h264enc.init_qp + h264->slice_qp_delta;
248
249 if ((h264->slice_type == 1) || (h264->slice_type == 6)) {
250 context->desc.h264enc.picture_type = PIPE_H2645_ENC_PICTURE_TYPE_B;
251 context->desc.h264enc.quant_b_frames = slice_qp;
252 } else if ((h264->slice_type == 0) || (h264->slice_type == 5)) {
253 context->desc.h264enc.picture_type = PIPE_H2645_ENC_PICTURE_TYPE_P;
254 context->desc.h264enc.quant_p_frames = slice_qp;
255 } else if ((h264->slice_type == 2) || (h264->slice_type == 7)) {
256 if (context->desc.h264enc.picture_type == PIPE_H2645_ENC_PICTURE_TYPE_IDR)
257 context->desc.h264enc.idr_pic_id++;
258 else
259 context->desc.h264enc.picture_type = PIPE_H2645_ENC_PICTURE_TYPE_I;
260 context->desc.h264enc.quant_i_frames = slice_qp;
261 } else {
262 context->desc.h264enc.picture_type = PIPE_H2645_ENC_PICTURE_TYPE_SKIP;
263 }
264
265 context->desc.h264enc.dpb[context->desc.h264enc.dpb_curr_pic].picture_type = context->desc.h264enc.picture_type;
266
267 context->desc.h264enc.pic_ctrl.enc_cabac_init_idc = h264->cabac_init_idc;
268 context->desc.h264enc.dbk.disable_deblocking_filter_idc = h264->disable_deblocking_filter_idc;
269 context->desc.h264enc.dbk.alpha_c0_offset_div2 = h264->slice_alpha_c0_offset_div2;
270 context->desc.h264enc.dbk.beta_offset_div2 = h264->slice_beta_offset_div2;
271
272 return VA_STATUS_SUCCESS;
273 }
274
275 VAStatus
vlVaHandleVAEncSequenceParameterBufferTypeH264(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)276 vlVaHandleVAEncSequenceParameterBufferTypeH264(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
277 {
278 VAEncSequenceParameterBufferH264 *h264 = buf->data;
279 uint32_t num_units_in_tick = 0, time_scale = 0;
280
281 context->desc.h264enc.ip_period = h264->ip_period;
282 context->desc.h264enc.intra_idr_period =
283 h264->intra_idr_period != 0 ? h264->intra_idr_period : PIPE_DEFAULT_INTRA_IDR_PERIOD;
284 context->gop_coeff = ((1024 + context->desc.h264enc.intra_idr_period - 1) /
285 context->desc.h264enc.intra_idr_period + 1) / 2 * 2;
286 if (context->gop_coeff > VL_VA_ENC_GOP_COEFF)
287 context->gop_coeff = VL_VA_ENC_GOP_COEFF;
288 context->desc.h264enc.gop_size = context->desc.h264enc.intra_idr_period * context->gop_coeff;
289 context->desc.h264enc.seq.pic_order_cnt_type = h264->seq_fields.bits.pic_order_cnt_type;
290 context->desc.h264enc.seq.log2_max_frame_num_minus4 = h264->seq_fields.bits.log2_max_frame_num_minus4;
291 context->desc.h264enc.seq.log2_max_pic_order_cnt_lsb_minus4 = h264->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4;
292 context->desc.h264enc.seq.vui_parameters_present_flag = h264->vui_parameters_present_flag;
293 if (h264->vui_parameters_present_flag) {
294 context->desc.h264enc.seq.vui_flags.aspect_ratio_info_present_flag =
295 h264->vui_fields.bits.aspect_ratio_info_present_flag;
296 context->desc.h264enc.seq.aspect_ratio_idc = h264->aspect_ratio_idc;
297 context->desc.h264enc.seq.sar_width = h264->sar_width;
298 context->desc.h264enc.seq.sar_height = h264->sar_height;
299 context->desc.h264enc.seq.vui_flags.timing_info_present_flag =
300 h264->vui_fields.bits.timing_info_present_flag;
301 num_units_in_tick = h264->num_units_in_tick;
302 time_scale = h264->time_scale;
303 context->desc.h264enc.seq.vui_flags.fixed_frame_rate_flag =
304 h264->vui_fields.bits.fixed_frame_rate_flag;
305 context->desc.h264enc.seq.vui_flags.low_delay_hrd_flag =
306 h264->vui_fields.bits.low_delay_hrd_flag;
307 context->desc.h264enc.seq.vui_flags.bitstream_restriction_flag =
308 h264->vui_fields.bits.bitstream_restriction_flag;
309 context->desc.h264enc.seq.vui_flags.motion_vectors_over_pic_boundaries_flag =
310 h264->vui_fields.bits.motion_vectors_over_pic_boundaries_flag;
311 context->desc.h264enc.seq.log2_max_mv_length_vertical =
312 h264->vui_fields.bits.log2_max_mv_length_vertical;
313 context->desc.h264enc.seq.log2_max_mv_length_horizontal =
314 h264->vui_fields.bits.log2_max_mv_length_horizontal;
315 } else {
316 context->desc.h264enc.seq.vui_flags.timing_info_present_flag = 0;
317 context->desc.h264enc.seq.vui_flags.fixed_frame_rate_flag = 0;
318 context->desc.h264enc.seq.vui_flags.low_delay_hrd_flag = 0;
319 context->desc.h264enc.seq.vui_flags.bitstream_restriction_flag = 0;
320 context->desc.h264enc.seq.vui_flags.motion_vectors_over_pic_boundaries_flag = 0;
321 context->desc.h264enc.seq.log2_max_mv_length_vertical = 0;
322 context->desc.h264enc.seq.log2_max_mv_length_horizontal = 0;
323 }
324
325 if (!context->desc.h264enc.seq.vui_flags.timing_info_present_flag) {
326 /* if not present, set default value */
327 num_units_in_tick = PIPE_DEFAULT_FRAME_RATE_DEN;
328 time_scale = PIPE_DEFAULT_FRAME_RATE_NUM * 2;
329 }
330
331 context->desc.h264enc.seq.num_units_in_tick = num_units_in_tick;
332 context->desc.h264enc.seq.time_scale = time_scale;
333 context->desc.h264enc.rate_ctrl[0].frame_rate_num = time_scale / 2;
334 context->desc.h264enc.rate_ctrl[0].frame_rate_den = num_units_in_tick;
335
336 if (h264->frame_cropping_flag) {
337 context->desc.h264enc.seq.enc_frame_cropping_flag = h264->frame_cropping_flag;
338 context->desc.h264enc.seq.enc_frame_crop_left_offset = h264->frame_crop_left_offset;
339 context->desc.h264enc.seq.enc_frame_crop_right_offset = h264->frame_crop_right_offset;
340 context->desc.h264enc.seq.enc_frame_crop_top_offset = h264->frame_crop_top_offset;
341 context->desc.h264enc.seq.enc_frame_crop_bottom_offset = h264->frame_crop_bottom_offset;
342 }
343
344 return VA_STATUS_SUCCESS;
345 }
346
347 VAStatus
vlVaHandleVAEncMiscParameterTypeRateControlH264(vlVaContext * context,VAEncMiscParameterBuffer * misc)348 vlVaHandleVAEncMiscParameterTypeRateControlH264(vlVaContext *context, VAEncMiscParameterBuffer *misc)
349 {
350 unsigned temporal_id;
351 VAEncMiscParameterRateControl *rc = (VAEncMiscParameterRateControl *)misc->data;
352
353 temporal_id = context->desc.h264enc.rate_ctrl[0].rate_ctrl_method !=
354 PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE ?
355 rc->rc_flags.bits.temporal_id :
356 0;
357
358 if (context->desc.h264enc.rate_ctrl[0].rate_ctrl_method ==
359 PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT)
360 context->desc.h264enc.rate_ctrl[temporal_id].target_bitrate =
361 rc->bits_per_second;
362 else
363 context->desc.h264enc.rate_ctrl[temporal_id].target_bitrate =
364 rc->bits_per_second * (rc->target_percentage / 100.0);
365
366 if (context->desc.h264enc.seq.num_temporal_layers > 0 &&
367 temporal_id >= context->desc.h264enc.seq.num_temporal_layers)
368 return VA_STATUS_ERROR_INVALID_PARAMETER;
369
370 context->desc.h264enc.rate_ctrl[temporal_id].fill_data_enable = !(rc->rc_flags.bits.disable_bit_stuffing);
371 /* context->desc.h264enc.rate_ctrl[temporal_id].skip_frame_enable = !(rc->rc_flags.bits.disable_frame_skip); */
372 context->desc.h264enc.rate_ctrl[temporal_id].skip_frame_enable = 0;
373 context->desc.h264enc.rate_ctrl[temporal_id].peak_bitrate = rc->bits_per_second;
374
375 if ((context->desc.h264enc.rate_ctrl[0].rate_ctrl_method == PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT) ||
376 (context->desc.h264enc.rate_ctrl[0].rate_ctrl_method == PIPE_H2645_ENC_RATE_CONTROL_METHOD_CONSTANT_SKIP))
377 context->desc.h264enc.rate_ctrl[temporal_id].vbv_buffer_size =
378 context->desc.h264enc.rate_ctrl[temporal_id].target_bitrate;
379 else if (context->desc.h264enc.rate_ctrl[temporal_id].target_bitrate < 2000000)
380 context->desc.h264enc.rate_ctrl[temporal_id].vbv_buffer_size =
381 MIN2((context->desc.h264enc.rate_ctrl[0].target_bitrate * 2.75), 2000000);
382 else
383 context->desc.h264enc.rate_ctrl[temporal_id].vbv_buffer_size =
384 context->desc.h264enc.rate_ctrl[temporal_id].target_bitrate;
385
386 context->desc.h264enc.rate_ctrl[temporal_id].max_qp = rc->max_qp;
387 context->desc.h264enc.rate_ctrl[temporal_id].min_qp = rc->min_qp;
388 /* Distinguishes from the default params set for these values in other
389 functions and app specific params passed down */
390 context->desc.h264enc.rate_ctrl[temporal_id].app_requested_qp_range = ((rc->max_qp > 0) || (rc->min_qp > 0));
391
392 if (context->desc.h264enc.rate_ctrl[0].rate_ctrl_method ==
393 PIPE_H2645_ENC_RATE_CONTROL_METHOD_QUALITY_VARIABLE)
394 context->desc.h264enc.rate_ctrl[temporal_id].vbr_quality_factor =
395 rc->quality_factor;
396
397 return VA_STATUS_SUCCESS;
398 }
399
400 VAStatus
vlVaHandleVAEncMiscParameterTypeFrameRateH264(vlVaContext * context,VAEncMiscParameterBuffer * misc)401 vlVaHandleVAEncMiscParameterTypeFrameRateH264(vlVaContext *context, VAEncMiscParameterBuffer *misc)
402 {
403 unsigned temporal_id;
404 VAEncMiscParameterFrameRate *fr = (VAEncMiscParameterFrameRate *)misc->data;
405
406 temporal_id = context->desc.h264enc.rate_ctrl[0].rate_ctrl_method !=
407 PIPE_H2645_ENC_RATE_CONTROL_METHOD_DISABLE ?
408 fr->framerate_flags.bits.temporal_id :
409 0;
410
411 if (context->desc.h264enc.seq.num_temporal_layers > 0 &&
412 temporal_id >= context->desc.h264enc.seq.num_temporal_layers)
413 return VA_STATUS_ERROR_INVALID_PARAMETER;
414
415 if (fr->framerate & 0xffff0000) {
416 context->desc.h264enc.rate_ctrl[temporal_id].frame_rate_num = fr->framerate & 0xffff;
417 context->desc.h264enc.rate_ctrl[temporal_id].frame_rate_den = fr->framerate >> 16 & 0xffff;
418 } else {
419 context->desc.h264enc.rate_ctrl[temporal_id].frame_rate_num = fr->framerate;
420 context->desc.h264enc.rate_ctrl[temporal_id].frame_rate_den = 1;
421 }
422
423 return VA_STATUS_SUCCESS;
424 }
425
parseEncSliceParamsH264(vlVaContext * context,struct vl_rbsp * rbsp,unsigned nal_ref_idc,unsigned nal_unit_type)426 static void parseEncSliceParamsH264(vlVaContext *context,
427 struct vl_rbsp *rbsp,
428 unsigned nal_ref_idc,
429 unsigned nal_unit_type)
430 {
431 struct pipe_h264_enc_seq_param *seq = &context->desc.h264enc.seq;
432 struct pipe_h264_enc_pic_control *pic = &context->desc.h264enc.pic_ctrl;
433 struct pipe_h264_enc_slice_param *slice = &context->desc.h264enc.slice;
434 unsigned modification_of_pic_nums_idc, memory_management_control_operation;
435
436 /* Only parse first slice */
437 if (vl_rbsp_ue(rbsp) != 0) /* first_mb_in_slice */
438 return;
439
440 pic->nal_ref_idc = nal_ref_idc;
441 pic->nal_unit_type = nal_unit_type;
442
443 slice->slice_type = vl_rbsp_ue(rbsp) % 5;
444 vl_rbsp_ue(rbsp); /* pic_parameter_set_id */
445 slice->frame_num = vl_rbsp_u(rbsp, seq->log2_max_frame_num_minus4 + 4);
446
447 if (context->desc.h264enc.picture_type == PIPE_H2645_ENC_PICTURE_TYPE_IDR)
448 slice->idr_pic_id = vl_rbsp_ue(rbsp);
449
450 if (seq->pic_order_cnt_type == 0)
451 slice->pic_order_cnt_lsb = vl_rbsp_u(rbsp, seq->log2_max_pic_order_cnt_lsb_minus4 + 4);
452
453 if (pic->redundant_pic_cnt_present_flag)
454 slice->redundant_pic_cnt = vl_rbsp_ue(rbsp);
455
456 if (slice->slice_type == PIPE_H264_SLICE_TYPE_B)
457 slice->direct_spatial_mv_pred_flag = vl_rbsp_u(rbsp, 1);
458
459 if (slice->slice_type == PIPE_H264_SLICE_TYPE_P ||
460 slice->slice_type == PIPE_H264_SLICE_TYPE_SP ||
461 slice->slice_type == PIPE_H264_SLICE_TYPE_B) {
462 slice->num_ref_idx_active_override_flag = vl_rbsp_u(rbsp, 1);
463 if (slice->num_ref_idx_active_override_flag) {
464 slice->num_ref_idx_l0_active_minus1 = vl_rbsp_ue(rbsp);
465 if (slice->slice_type == PIPE_H264_SLICE_TYPE_B)
466 slice->num_ref_idx_l1_active_minus1 = vl_rbsp_ue(rbsp);
467 }
468 }
469
470 if (slice->slice_type != PIPE_H264_SLICE_TYPE_I &&
471 slice->slice_type != PIPE_H264_SLICE_TYPE_SI) {
472 slice->ref_pic_list_modification_flag_l0 = vl_rbsp_u(rbsp, 1);
473 if (slice->ref_pic_list_modification_flag_l0) {
474 slice->num_ref_list0_mod_operations = 0;
475 while (true) {
476 modification_of_pic_nums_idc = vl_rbsp_ue(rbsp);
477 if (modification_of_pic_nums_idc == 3)
478 break;
479 struct pipe_h264_ref_list_mod_entry *op =
480 &slice->ref_list0_mod_operations[slice->num_ref_list0_mod_operations++];
481 op->modification_of_pic_nums_idc = modification_of_pic_nums_idc;
482 if (op->modification_of_pic_nums_idc == 0 ||
483 op->modification_of_pic_nums_idc == 1)
484 op->abs_diff_pic_num_minus1 = vl_rbsp_ue(rbsp);
485 else if (op->modification_of_pic_nums_idc == 2)
486 op->long_term_pic_num = vl_rbsp_ue(rbsp);
487 }
488 }
489 }
490
491 if (slice->slice_type == PIPE_H264_SLICE_TYPE_B) {
492 slice->ref_pic_list_modification_flag_l1 = vl_rbsp_u(rbsp, 1);
493 if (slice->ref_pic_list_modification_flag_l1) {
494 slice->num_ref_list1_mod_operations = 0;
495 while (true) {
496 modification_of_pic_nums_idc = vl_rbsp_ue(rbsp);
497 if (modification_of_pic_nums_idc == 3)
498 break;
499 struct pipe_h264_ref_list_mod_entry *op =
500 &slice->ref_list1_mod_operations[slice->num_ref_list1_mod_operations++];
501 op->modification_of_pic_nums_idc = modification_of_pic_nums_idc;
502 if (op->modification_of_pic_nums_idc == 0 ||
503 op->modification_of_pic_nums_idc == 1)
504 op->abs_diff_pic_num_minus1 = vl_rbsp_ue(rbsp);
505 else if (op->modification_of_pic_nums_idc == 2)
506 op->long_term_pic_num = vl_rbsp_ue(rbsp);
507 }
508 }
509 }
510
511 if (nal_ref_idc != 0) {
512 if (nal_unit_type == PIPE_H264_NAL_IDR_SLICE) {
513 slice->no_output_of_prior_pics_flag = vl_rbsp_u(rbsp, 1);
514 slice->long_term_reference_flag = vl_rbsp_u(rbsp, 1);
515 } else {
516 slice->adaptive_ref_pic_marking_mode_flag = vl_rbsp_u(rbsp, 1);
517 if (slice->adaptive_ref_pic_marking_mode_flag) {
518 slice->num_ref_pic_marking_operations = 0;
519 while (true) {
520 memory_management_control_operation = vl_rbsp_ue(rbsp);
521 if (memory_management_control_operation == 0)
522 break;
523 struct pipe_h264_ref_pic_marking_entry *op =
524 &slice->ref_pic_marking_operations[slice->num_ref_pic_marking_operations++];
525 op->memory_management_control_operation = memory_management_control_operation;
526 if (memory_management_control_operation == 1 ||
527 memory_management_control_operation == 3)
528 op->difference_of_pic_nums_minus1 = vl_rbsp_ue(rbsp);
529 if (memory_management_control_operation == 2)
530 op->long_term_pic_num = vl_rbsp_ue(rbsp);
531 if (memory_management_control_operation == 3 ||
532 memory_management_control_operation == 6)
533 op->long_term_frame_idx = vl_rbsp_ue(rbsp);
534 if (memory_management_control_operation == 4)
535 op->max_long_term_frame_idx_plus1 = vl_rbsp_ue(rbsp);
536 }
537 }
538 }
539 }
540
541 if (pic->entropy_coding_mode_flag &&
542 slice->slice_type != PIPE_H264_SLICE_TYPE_I &&
543 slice->slice_type != PIPE_H264_SLICE_TYPE_SI)
544 slice->cabac_init_idc = vl_rbsp_ue(rbsp);
545
546 slice->slice_qp_delta = vl_rbsp_se(rbsp);
547
548 if (slice->slice_type == PIPE_H264_SLICE_TYPE_SP ||
549 slice->slice_type == PIPE_H264_SLICE_TYPE_SI) {
550 if (slice->slice_type == PIPE_H264_SLICE_TYPE_SP)
551 vl_rbsp_u(rbsp, 1); /* sp_for_switch_flag */
552 vl_rbsp_se(rbsp); /* slice_qs_delta */
553 }
554
555 if (pic->deblocking_filter_control_present_flag) {
556 slice->disable_deblocking_filter_idc = vl_rbsp_ue(rbsp);
557 if (slice->disable_deblocking_filter_idc != 1) {
558 slice->slice_alpha_c0_offset_div2 = vl_rbsp_se(rbsp);
559 slice->slice_beta_offset_div2 = vl_rbsp_se(rbsp);
560 }
561 }
562 }
563
parseEncHrdParamsH264(struct vl_rbsp * rbsp,pipe_h264_enc_hrd_params * hrd_params)564 static void parseEncHrdParamsH264(struct vl_rbsp *rbsp, pipe_h264_enc_hrd_params* hrd_params)
565 {
566 unsigned i;
567
568 hrd_params->cpb_cnt_minus1 = vl_rbsp_ue(rbsp);
569 hrd_params->bit_rate_scale = vl_rbsp_u(rbsp, 4);
570 hrd_params->cpb_size_scale = vl_rbsp_u(rbsp, 4);
571 for (i = 0; i <= hrd_params->cpb_cnt_minus1; ++i) {
572 hrd_params->bit_rate_value_minus1[i] = vl_rbsp_ue(rbsp);
573 hrd_params->cpb_size_value_minus1[i] = vl_rbsp_ue(rbsp);
574 hrd_params->cbr_flag[i] = vl_rbsp_u(rbsp, 1);
575 }
576 hrd_params->initial_cpb_removal_delay_length_minus1 = vl_rbsp_u(rbsp, 5);
577 hrd_params->cpb_removal_delay_length_minus1 = vl_rbsp_u(rbsp, 5);
578 hrd_params->dpb_output_delay_length_minus1 = vl_rbsp_u(rbsp, 5);
579 hrd_params->time_offset_length = vl_rbsp_u(rbsp, 5);
580 }
581
parseEncSpsParamsH264(vlVaContext * context,struct vl_rbsp * rbsp)582 static void parseEncSpsParamsH264(vlVaContext *context, struct vl_rbsp *rbsp)
583 {
584 unsigned i, profile_idc, num_ref_frames_in_pic_order_cnt_cycle;
585
586 context->desc.h264enc.seq.profile_idc = vl_rbsp_u(rbsp, 8);
587 context->desc.h264enc.seq.enc_constraint_set_flags = vl_rbsp_u(rbsp, 6);
588 vl_rbsp_u(rbsp, 2); /* reserved_zero_2bits */
589 context->desc.h264enc.seq.level_idc = vl_rbsp_u(rbsp, 8);
590
591 vl_rbsp_ue(rbsp); /* seq_parameter_set_id */
592
593 profile_idc = context->desc.h264enc.seq.profile_idc;
594 if (profile_idc == 100 || profile_idc == 110 ||
595 profile_idc == 122 || profile_idc == 244 || profile_idc == 44 ||
596 profile_idc == 83 || profile_idc == 86 || profile_idc == 118 ||
597 profile_idc == 128 || profile_idc == 138 || profile_idc == 139 ||
598 profile_idc == 134 || profile_idc == 135) {
599
600 if (vl_rbsp_ue(rbsp) == 3) /* chroma_format_idc */
601 vl_rbsp_u(rbsp, 1); /* separate_colour_plane_flag */
602
603 context->desc.h264enc.seq.bit_depth_luma_minus8 = vl_rbsp_ue(rbsp);
604 context->desc.h264enc.seq.bit_depth_chroma_minus8 = vl_rbsp_ue(rbsp);
605 vl_rbsp_u(rbsp, 1); /* qpprime_y_zero_transform_bypass_flag */
606
607 if (vl_rbsp_u(rbsp, 1)) { /* seq_scaling_matrix_present_flag */
608 debug_error("SPS scaling matrix not supported");
609 return;
610 }
611 }
612
613 context->desc.h264enc.seq.log2_max_frame_num_minus4 = vl_rbsp_ue(rbsp);
614 context->desc.h264enc.seq.pic_order_cnt_type = vl_rbsp_ue(rbsp);
615
616 if (context->desc.h264enc.seq.pic_order_cnt_type == 0)
617 context->desc.h264enc.seq.log2_max_pic_order_cnt_lsb_minus4 = vl_rbsp_ue(rbsp);
618 else if (context->desc.h264enc.seq.pic_order_cnt_type == 1) {
619 vl_rbsp_u(rbsp, 1); /* delta_pic_order_always_zero_flag */
620 vl_rbsp_se(rbsp); /* offset_for_non_ref_pic */
621 vl_rbsp_se(rbsp); /* offset_for_top_to_bottom_field */
622 num_ref_frames_in_pic_order_cnt_cycle = vl_rbsp_ue(rbsp);
623 for (i = 0; i < num_ref_frames_in_pic_order_cnt_cycle; ++i)
624 vl_rbsp_se(rbsp); /* offset_for_ref_frame[i] */
625 }
626
627 context->desc.h264enc.seq.max_num_ref_frames = vl_rbsp_ue(rbsp);
628 context->desc.h264enc.seq.gaps_in_frame_num_value_allowed_flag = vl_rbsp_u(rbsp, 1);
629 context->desc.h264enc.seq.pic_width_in_mbs_minus1 = vl_rbsp_ue(rbsp);
630 context->desc.h264enc.seq.pic_height_in_map_units_minus1 = vl_rbsp_ue(rbsp);
631 if (!vl_rbsp_u(rbsp, 1)) /* frame_mbs_only_flag */
632 vl_rbsp_u(rbsp, 1); /* mb_adaptive_frame_field_flag */
633
634 context->desc.h264enc.seq.direct_8x8_inference_flag = vl_rbsp_u(rbsp, 1);
635 context->desc.h264enc.seq.enc_frame_cropping_flag = vl_rbsp_u(rbsp, 1);
636 if (context->desc.h264enc.seq.enc_frame_cropping_flag) {
637 context->desc.h264enc.seq.enc_frame_crop_left_offset = vl_rbsp_ue(rbsp);
638 context->desc.h264enc.seq.enc_frame_crop_right_offset = vl_rbsp_ue(rbsp);
639 context->desc.h264enc.seq.enc_frame_crop_top_offset = vl_rbsp_ue(rbsp);
640 context->desc.h264enc.seq.enc_frame_crop_bottom_offset = vl_rbsp_ue(rbsp);
641 }
642
643 context->desc.h264enc.seq.vui_parameters_present_flag = vl_rbsp_u(rbsp, 1);
644 if (context->desc.h264enc.seq.vui_parameters_present_flag) {
645 context->desc.h264enc.seq.vui_flags.aspect_ratio_info_present_flag = vl_rbsp_u(rbsp, 1);
646 if (context->desc.h264enc.seq.vui_flags.aspect_ratio_info_present_flag) {
647 context->desc.h264enc.seq.aspect_ratio_idc = vl_rbsp_u(rbsp, 8);
648 if (context->desc.h264enc.seq.aspect_ratio_idc == PIPE_H2645_EXTENDED_SAR) {
649 context->desc.h264enc.seq.sar_width = vl_rbsp_u(rbsp, 16);
650 context->desc.h264enc.seq.sar_height = vl_rbsp_u(rbsp, 16);
651 }
652 }
653
654 context->desc.h264enc.seq.vui_flags.overscan_info_present_flag = vl_rbsp_u(rbsp, 1);
655 if (context->desc.h264enc.seq.vui_flags.overscan_info_present_flag)
656 context->desc.h264enc.seq.vui_flags.overscan_appropriate_flag = vl_rbsp_u(rbsp, 1);
657
658 context->desc.h264enc.seq.vui_flags.video_signal_type_present_flag = vl_rbsp_u(rbsp, 1);
659 if (context->desc.h264enc.seq.vui_flags.video_signal_type_present_flag) {
660 context->desc.h264enc.seq.video_format = vl_rbsp_u(rbsp, 3);
661 context->desc.h264enc.seq.video_full_range_flag = vl_rbsp_u(rbsp, 1);
662 context->desc.h264enc.seq.vui_flags.colour_description_present_flag = vl_rbsp_u(rbsp, 1);
663 if (context->desc.h264enc.seq.vui_flags.colour_description_present_flag) {
664 context->desc.h264enc.seq.colour_primaries = vl_rbsp_u(rbsp, 8);
665 context->desc.h264enc.seq.transfer_characteristics = vl_rbsp_u(rbsp, 8);
666 context->desc.h264enc.seq.matrix_coefficients = vl_rbsp_u(rbsp, 8);
667 }
668 }
669
670 context->desc.h264enc.seq.vui_flags.chroma_loc_info_present_flag = vl_rbsp_u(rbsp, 1);
671 if (context->desc.h264enc.seq.vui_flags.chroma_loc_info_present_flag) {
672 context->desc.h264enc.seq.chroma_sample_loc_type_top_field = vl_rbsp_ue(rbsp);
673 context->desc.h264enc.seq.chroma_sample_loc_type_bottom_field = vl_rbsp_ue(rbsp);
674 }
675
676 context->desc.h264enc.seq.vui_flags.timing_info_present_flag = vl_rbsp_u(rbsp, 1);
677 if (context->desc.h264enc.seq.vui_flags.timing_info_present_flag) {
678 context->desc.h264enc.seq.num_units_in_tick = vl_rbsp_u(rbsp, 32);
679 context->desc.h264enc.seq.time_scale = vl_rbsp_u(rbsp, 32);
680 context->desc.h264enc.seq.vui_flags.fixed_frame_rate_flag = vl_rbsp_u(rbsp, 1);
681 }
682
683 context->desc.h264enc.seq.vui_flags.nal_hrd_parameters_present_flag = vl_rbsp_u(rbsp, 1);
684 if (context->desc.h264enc.seq.vui_flags.nal_hrd_parameters_present_flag)
685 parseEncHrdParamsH264(rbsp, &context->desc.h264enc.seq.nal_hrd_parameters);
686
687 context->desc.h264enc.seq.vui_flags.vcl_hrd_parameters_present_flag = vl_rbsp_u(rbsp, 1);
688 if (context->desc.h264enc.seq.vui_flags.vcl_hrd_parameters_present_flag)
689 parseEncHrdParamsH264(rbsp, &context->desc.h264enc.seq.vcl_hrd_parameters);
690
691 if (context->desc.h264enc.seq.vui_flags.nal_hrd_parameters_present_flag ||
692 context->desc.h264enc.seq.vui_flags.vcl_hrd_parameters_present_flag)
693 context->desc.h264enc.seq.vui_flags.low_delay_hrd_flag = vl_rbsp_u(rbsp, 1);
694
695 context->desc.h264enc.seq.vui_flags.pic_struct_present_flag = vl_rbsp_u(rbsp, 1);
696
697 context->desc.h264enc.seq.vui_flags.bitstream_restriction_flag = vl_rbsp_u(rbsp, 1);
698 if (context->desc.h264enc.seq.vui_flags.bitstream_restriction_flag) {
699 context->desc.h264enc.seq.vui_flags.motion_vectors_over_pic_boundaries_flag = vl_rbsp_u(rbsp, 1);
700 context->desc.h264enc.seq.max_bytes_per_pic_denom = vl_rbsp_ue(rbsp);
701 context->desc.h264enc.seq.max_bits_per_mb_denom = vl_rbsp_ue(rbsp);
702 context->desc.h264enc.seq.log2_max_mv_length_horizontal = vl_rbsp_ue(rbsp);
703 context->desc.h264enc.seq.log2_max_mv_length_vertical = vl_rbsp_ue(rbsp);
704 context->desc.h264enc.seq.max_num_reorder_frames = vl_rbsp_ue(rbsp);
705 context->desc.h264enc.seq.max_dec_frame_buffering = vl_rbsp_ue(rbsp);
706 }
707 }
708 }
709
slice_group_map(struct vl_rbsp * rbsp,unsigned num_slice_groups_minus1)710 static void slice_group_map(struct vl_rbsp *rbsp, unsigned num_slice_groups_minus1)
711 {
712 unsigned slice_group_map_type = vl_rbsp_ue(rbsp);
713 if (slice_group_map_type == 0) {
714 for (unsigned i = 0; i <= num_slice_groups_minus1; i++)
715 vl_rbsp_ue(rbsp); /* run_length_minus1[i] */
716 } else if (slice_group_map_type == 2) {
717 for (unsigned i = 0; i <= num_slice_groups_minus1; i++) {
718 vl_rbsp_ue(rbsp); /* top_left[i] */
719 vl_rbsp_ue(rbsp); /* bottom_right[i] */
720 }
721 } else if (slice_group_map_type == 3 ||
722 slice_group_map_type == 4 ||
723 slice_group_map_type == 5) {
724 vl_rbsp_u(rbsp, 1); /* slice_group_change_direction_flag */
725 vl_rbsp_ue(rbsp); /* slice_group_change_rate_minus1 */
726 } else if (slice_group_map_type == 6) {
727 unsigned pic_size_in_map_units_minus1 = vl_rbsp_ue(rbsp);
728 for (unsigned i = 0; i <= pic_size_in_map_units_minus1; i++)
729 vl_rbsp_u(rbsp, util_logbase2_ceil(num_slice_groups_minus1 + 1)); /* slice_group_id[i] */
730 }
731 }
732
parseEncPpsParamsH264(vlVaContext * context,struct vl_rbsp * rbsp)733 static void parseEncPpsParamsH264(vlVaContext *context, struct vl_rbsp *rbsp)
734 {
735 struct pipe_h264_enc_pic_control *pic = &context->desc.h264enc.pic_ctrl;
736
737 vl_rbsp_ue(rbsp); /* pic_parameter_set_id */
738 vl_rbsp_ue(rbsp); /* seq_parameter_set_id */
739 pic->entropy_coding_mode_flag = vl_rbsp_u(rbsp, 1);
740 vl_rbsp_u(rbsp, 1); /* bottom_field_pic_order_in_frame_present_flag */
741 unsigned num_slice_groups_minus1 = vl_rbsp_ue(rbsp);
742 if (num_slice_groups_minus1 > 0)
743 slice_group_map(rbsp, num_slice_groups_minus1);
744 pic->num_ref_idx_l0_default_active_minus1 = vl_rbsp_ue(rbsp);
745 pic->num_ref_idx_l1_default_active_minus1 = vl_rbsp_ue(rbsp);
746 pic->weighted_pred_flag = vl_rbsp_u(rbsp, 1);
747 pic->weighted_bipred_idc = vl_rbsp_u(rbsp, 2);
748 pic->pic_init_qp_minus26 = vl_rbsp_se(rbsp);
749 pic->pic_init_qs_minus26 = vl_rbsp_se(rbsp);
750 pic->chroma_qp_index_offset = vl_rbsp_se(rbsp);
751 pic->deblocking_filter_control_present_flag = vl_rbsp_u(rbsp, 1);
752 pic->constrained_intra_pred_flag = vl_rbsp_u(rbsp, 1);
753 pic->redundant_pic_cnt_present_flag = vl_rbsp_u(rbsp, 1);
754 if (vl_rbsp_more_data(rbsp)) {
755 pic->transform_8x8_mode_flag = vl_rbsp_u(rbsp, 1);
756 if (vl_rbsp_u(rbsp, 1)) { /* pic_scaling_matrix_present_flag */
757 debug_error("PPS scaling matrix not supported");
758 return;
759 }
760 pic->second_chroma_qp_index_offset = vl_rbsp_se(rbsp);
761 } else {
762 pic->transform_8x8_mode_flag = 0;
763 pic->second_chroma_qp_index_offset = pic->chroma_qp_index_offset;
764 }
765 }
766
parseEncPrefixH264(vlVaContext * context,struct vl_rbsp * rbsp)767 static void parseEncPrefixH264(vlVaContext *context, struct vl_rbsp *rbsp)
768 {
769 if (!vl_rbsp_u(rbsp, 1)) /* svc_extension_flag */
770 return;
771
772 vl_rbsp_u(rbsp, 1); /* idr_flag */
773 vl_rbsp_u(rbsp, 6); /* priority_id */
774 vl_rbsp_u(rbsp, 1); /* no_inter_layer_pred_flag */
775 vl_rbsp_u(rbsp, 3); /* dependency_id */
776 vl_rbsp_u(rbsp, 4); /* quality_id */
777 context->desc.h264enc.pic_ctrl.temporal_id = vl_rbsp_u(rbsp, 3);
778 }
779
780 VAStatus
vlVaHandleVAEncPackedHeaderDataBufferTypeH264(vlVaContext * context,vlVaBuffer * buf)781 vlVaHandleVAEncPackedHeaderDataBufferTypeH264(vlVaContext *context, vlVaBuffer *buf)
782 {
783 struct vl_vlc vlc = {0};
784 uint8_t *data = buf->data;
785 int nal_start = -1;
786 unsigned nal_unit_type = 0, emulation_bytes_start = 0;
787 bool is_slice = false;
788
789 vl_vlc_init(&vlc, 1, (const void * const*)&data, &buf->size);
790
791 while (vl_vlc_bits_left(&vlc) > 0) {
792 /* search the first 64 bytes for a startcode */
793 for (int i = 0; i < 64 && vl_vlc_bits_left(&vlc) >= 24; ++i) {
794 if (vl_vlc_peekbits(&vlc, 24) == 0x000001)
795 break;
796 vl_vlc_eatbits(&vlc, 8);
797 vl_vlc_fillbits(&vlc);
798 }
799
800 unsigned start = vlc.data - data - vl_vlc_valid_bits(&vlc) / 8;
801 emulation_bytes_start = 4; /* 3 bytes startcode + 1 byte header */
802 /* handle 4 bytes startcode */
803 if (start > 0 && data[start - 1] == 0x00) {
804 start--;
805 emulation_bytes_start++;
806 }
807 if (nal_start >= 0) {
808 vlVaAddRawHeader(&context->desc.h264enc.raw_headers, nal_unit_type,
809 start - nal_start, data + nal_start, is_slice, 0);
810 }
811 nal_start = start;
812 is_slice = false;
813
814 vl_vlc_eatbits(&vlc, 24); /* eat the startcode */
815
816 if (vl_vlc_valid_bits(&vlc) < 15)
817 vl_vlc_fillbits(&vlc);
818
819 vl_vlc_eatbits(&vlc, 1);
820 unsigned nal_ref_idc = vl_vlc_get_uimsbf(&vlc, 2);
821 nal_unit_type = vl_vlc_get_uimsbf(&vlc, 5);
822
823 struct vl_rbsp rbsp;
824 vl_rbsp_init(&rbsp, &vlc, ~0, context->packed_header_emulation_bytes);
825
826 switch (nal_unit_type) {
827 case PIPE_H264_NAL_SLICE:
828 case PIPE_H264_NAL_IDR_SLICE:
829 is_slice = true;
830 parseEncSliceParamsH264(context, &rbsp, nal_ref_idc, nal_unit_type);
831 break;
832 case PIPE_H264_NAL_SPS:
833 parseEncSpsParamsH264(context, &rbsp);
834 break;
835 case PIPE_H264_NAL_PPS:
836 parseEncPpsParamsH264(context, &rbsp);
837 break;
838 case PIPE_H264_NAL_PREFIX:
839 parseEncPrefixH264(context, &rbsp);
840 break;
841 default:
842 break;
843 }
844
845 if (!context->packed_header_emulation_bytes)
846 break;
847 }
848
849 if (nal_start >= 0) {
850 vlVaAddRawHeader(&context->desc.h264enc.raw_headers, nal_unit_type,
851 buf->size - nal_start, data + nal_start, is_slice,
852 context->packed_header_emulation_bytes ? 0 : emulation_bytes_start);
853 }
854
855 return VA_STATUS_SUCCESS;
856 }
857
858 VAStatus
vlVaHandleVAEncMiscParameterTypeTemporalLayerH264(vlVaContext * context,VAEncMiscParameterBuffer * misc)859 vlVaHandleVAEncMiscParameterTypeTemporalLayerH264(vlVaContext *context, VAEncMiscParameterBuffer *misc)
860 {
861 VAEncMiscParameterTemporalLayerStructure *tl = (VAEncMiscParameterTemporalLayerStructure *)misc->data;
862
863 context->desc.h264enc.seq.num_temporal_layers = tl->number_of_layers;
864
865 return VA_STATUS_SUCCESS;
866 }
867
868 VAStatus
vlVaHandleVAEncMiscParameterTypeQualityLevelH264(vlVaContext * context,VAEncMiscParameterBuffer * misc)869 vlVaHandleVAEncMiscParameterTypeQualityLevelH264(vlVaContext *context, VAEncMiscParameterBuffer *misc)
870 {
871 VAEncMiscParameterBufferQualityLevel *ql = (VAEncMiscParameterBufferQualityLevel *)misc->data;
872 vlVaHandleVAEncMiscParameterTypeQualityLevel(&context->desc.h264enc.quality_modes,
873 (vlVaQualityBits *)&ql->quality_level);
874
875 return VA_STATUS_SUCCESS;
876 }
877
878 VAStatus
vlVaHandleVAEncMiscParameterTypeMaxFrameSizeH264(vlVaContext * context,VAEncMiscParameterBuffer * misc)879 vlVaHandleVAEncMiscParameterTypeMaxFrameSizeH264(vlVaContext *context, VAEncMiscParameterBuffer *misc)
880 {
881 VAEncMiscParameterBufferMaxFrameSize *ms = (VAEncMiscParameterBufferMaxFrameSize *)misc->data;
882 context->desc.h264enc.rate_ctrl[0].max_au_size = ms->max_frame_size;
883 return VA_STATUS_SUCCESS;
884 }
885
886 VAStatus
vlVaHandleVAEncMiscParameterTypeHRDH264(vlVaContext * context,VAEncMiscParameterBuffer * misc)887 vlVaHandleVAEncMiscParameterTypeHRDH264(vlVaContext *context, VAEncMiscParameterBuffer *misc)
888 {
889 VAEncMiscParameterHRD *ms = (VAEncMiscParameterHRD *)misc->data;
890
891 if (ms->buffer_size == 0)
892 return VA_STATUS_ERROR_INVALID_PARAMETER;
893
894 /* Distinguishes from the default params set for these values in other
895 functions and app specific params passed down via HRD buffer */
896 context->desc.h264enc.rate_ctrl[0].app_requested_hrd_buffer = true;
897 context->desc.h264enc.rate_ctrl[0].vbv_buffer_size = ms->buffer_size;
898 context->desc.h264enc.rate_ctrl[0].vbv_buf_lv = (ms->initial_buffer_fullness << 6) / ms->buffer_size;
899 context->desc.h264enc.rate_ctrl[0].vbv_buf_initial_size = ms->initial_buffer_fullness;
900
901 for (unsigned i = 1; i < context->desc.h264enc.seq.num_temporal_layers; i++) {
902 context->desc.h264enc.rate_ctrl[i].vbv_buffer_size =
903 (float)ms->buffer_size / context->desc.h264enc.rate_ctrl[0].peak_bitrate *
904 context->desc.h264enc.rate_ctrl[i].peak_bitrate;
905 context->desc.h264enc.rate_ctrl[i].vbv_buf_lv = context->desc.h264enc.rate_ctrl[0].vbv_buf_lv;
906 context->desc.h264enc.rate_ctrl[i].vbv_buf_initial_size =
907 (context->desc.h264enc.rate_ctrl[i].vbv_buffer_size * context->desc.h264enc.rate_ctrl[i].vbv_buf_lv) >> 6;
908 }
909
910 return VA_STATUS_SUCCESS;
911 }
912