1 /**************************************************************************
2 *
3 * Copyright 2010 Thomas Balling Sørensen & Orasanu Lucian.
4 * Copyright 2014 Advanced Micro Devices, Inc.
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the
9 * "Software"), to deal in the Software without restriction, including
10 * without limitation the rights to use, copy, modify, merge, publish,
11 * distribute, sub license, and/or sell copies of the Software, and to
12 * permit persons to whom the Software is furnished to do so, subject to
13 * the following conditions:
14 *
15 * The above copyright notice and this permission notice (including the
16 * next paragraph) shall be included in all copies or substantial portions
17 * of the Software.
18 *
19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
20 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
22 * IN NO EVENT SHALL THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR
23 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26 *
27 **************************************************************************/
28
29 #include "pipe/p_video_codec.h"
30
31 #include "util/u_handle_table.h"
32 #include "util/u_video.h"
33
34 #include "vl/vl_vlc.h"
35 #include "vl/vl_winsys.h"
36
37 #include "va_private.h"
38
39 VAStatus
vlVaBeginPicture(VADriverContextP ctx,VAContextID context_id,VASurfaceID render_target)40 vlVaBeginPicture(VADriverContextP ctx, VAContextID context_id, VASurfaceID render_target)
41 {
42 vlVaDriver *drv;
43 vlVaContext *context;
44 vlVaSurface *surf;
45
46 if (!ctx)
47 return VA_STATUS_ERROR_INVALID_CONTEXT;
48
49 drv = VL_VA_DRIVER(ctx);
50 if (!drv)
51 return VA_STATUS_ERROR_INVALID_CONTEXT;
52
53 mtx_lock(&drv->mutex);
54 context = handle_table_get(drv->htab, context_id);
55 if (!context) {
56 mtx_unlock(&drv->mutex);
57 return VA_STATUS_ERROR_INVALID_CONTEXT;
58 }
59
60 if (u_reduce_video_profile(context->templat.profile) == PIPE_VIDEO_FORMAT_MPEG12) {
61 context->desc.mpeg12.intra_matrix = NULL;
62 context->desc.mpeg12.non_intra_matrix = NULL;
63 }
64
65 surf = handle_table_get(drv->htab, render_target);
66 mtx_unlock(&drv->mutex);
67 if (!surf || !surf->buffer)
68 return VA_STATUS_ERROR_INVALID_SURFACE;
69
70 context->target_id = render_target;
71 surf->ctx = context_id;
72 context->target = surf->buffer;
73 context->mjpeg.sampling_factor = 0;
74
75 if (!context->decoder) {
76
77 /* VPP */
78 if (context->templat.profile == PIPE_VIDEO_PROFILE_UNKNOWN &&
79 context->target->buffer_format != PIPE_FORMAT_B8G8R8A8_UNORM &&
80 context->target->buffer_format != PIPE_FORMAT_R8G8B8A8_UNORM &&
81 context->target->buffer_format != PIPE_FORMAT_B8G8R8X8_UNORM &&
82 context->target->buffer_format != PIPE_FORMAT_R8G8B8X8_UNORM &&
83 context->target->buffer_format != PIPE_FORMAT_NV12 &&
84 context->target->buffer_format != PIPE_FORMAT_P016)
85 return VA_STATUS_ERROR_UNIMPLEMENTED;
86
87 return VA_STATUS_SUCCESS;
88 }
89
90 if (context->decoder->entrypoint != PIPE_VIDEO_ENTRYPOINT_ENCODE)
91 context->needs_begin_frame = true;
92
93 return VA_STATUS_SUCCESS;
94 }
95
96 void
vlVaGetReferenceFrame(vlVaDriver * drv,VASurfaceID surface_id,struct pipe_video_buffer ** ref_frame)97 vlVaGetReferenceFrame(vlVaDriver *drv, VASurfaceID surface_id,
98 struct pipe_video_buffer **ref_frame)
99 {
100 vlVaSurface *surf = handle_table_get(drv->htab, surface_id);
101 if (surf)
102 *ref_frame = surf->buffer;
103 else
104 *ref_frame = NULL;
105 }
106
107 static void
getEncParamPreset(vlVaContext * context)108 getEncParamPreset(vlVaContext *context)
109 {
110 //motion estimation preset
111 context->desc.h264enc.motion_est.motion_est_quarter_pixel = 0x00000001;
112 context->desc.h264enc.motion_est.lsmvert = 0x00000002;
113 context->desc.h264enc.motion_est.enc_disable_sub_mode = 0x00000078;
114 context->desc.h264enc.motion_est.enc_en_ime_overw_dis_subm = 0x00000001;
115 context->desc.h264enc.motion_est.enc_ime_overw_dis_subm_no = 0x00000001;
116 context->desc.h264enc.motion_est.enc_ime2_search_range_x = 0x00000004;
117 context->desc.h264enc.motion_est.enc_ime2_search_range_y = 0x00000004;
118
119 //pic control preset
120 context->desc.h264enc.pic_ctrl.enc_cabac_enable = 0x00000001;
121 context->desc.h264enc.pic_ctrl.enc_constraint_set_flags = 0x00000040;
122
123 //rate control
124 context->desc.h264enc.rate_ctrl.vbv_buffer_size = 20000000;
125 context->desc.h264enc.rate_ctrl.vbv_buf_lv = 48;
126 context->desc.h264enc.rate_ctrl.fill_data_enable = 1;
127 context->desc.h264enc.rate_ctrl.enforce_hrd = 1;
128 context->desc.h264enc.enable_vui = false;
129 if (context->desc.h264enc.rate_ctrl.frame_rate_num == 0 ||
130 context->desc.h264enc.rate_ctrl.frame_rate_den == 0) {
131 context->desc.h264enc.rate_ctrl.frame_rate_num = 30;
132 context->desc.h264enc.rate_ctrl.frame_rate_den = 1;
133 }
134 context->desc.h264enc.rate_ctrl.target_bits_picture =
135 context->desc.h264enc.rate_ctrl.target_bitrate *
136 ((float)context->desc.h264enc.rate_ctrl.frame_rate_den /
137 context->desc.h264enc.rate_ctrl.frame_rate_num);
138 context->desc.h264enc.rate_ctrl.peak_bits_picture_integer =
139 context->desc.h264enc.rate_ctrl.peak_bitrate *
140 ((float)context->desc.h264enc.rate_ctrl.frame_rate_den /
141 context->desc.h264enc.rate_ctrl.frame_rate_num);
142
143 context->desc.h264enc.rate_ctrl.peak_bits_picture_fraction = 0;
144 context->desc.h264enc.ref_pic_mode = 0x00000201;
145 }
146
147 static VAStatus
handlePictureParameterBuffer(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)148 handlePictureParameterBuffer(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
149 {
150 VAStatus vaStatus = VA_STATUS_SUCCESS;
151
152 switch (u_reduce_video_profile(context->templat.profile)) {
153 case PIPE_VIDEO_FORMAT_MPEG12:
154 vlVaHandlePictureParameterBufferMPEG12(drv, context, buf);
155 break;
156
157 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
158 vlVaHandlePictureParameterBufferH264(drv, context, buf);
159 break;
160
161 case PIPE_VIDEO_FORMAT_VC1:
162 vlVaHandlePictureParameterBufferVC1(drv, context, buf);
163 break;
164
165 case PIPE_VIDEO_FORMAT_MPEG4:
166 vlVaHandlePictureParameterBufferMPEG4(drv, context, buf);
167 break;
168
169 case PIPE_VIDEO_FORMAT_HEVC:
170 vlVaHandlePictureParameterBufferHEVC(drv, context, buf);
171 break;
172
173 case PIPE_VIDEO_FORMAT_JPEG:
174 vlVaHandlePictureParameterBufferMJPEG(drv, context, buf);
175 break;
176
177 default:
178 break;
179 }
180
181 /* Create the decoder once max_references is known. */
182 if (!context->decoder) {
183 enum pipe_video_format format =
184 u_reduce_video_profile(context->templat.profile);
185
186 if (!context->target)
187 return VA_STATUS_ERROR_INVALID_CONTEXT;
188
189 if (context->templat.max_references == 0 &&
190 format != PIPE_VIDEO_FORMAT_JPEG)
191 return VA_STATUS_ERROR_INVALID_BUFFER;
192
193 if (format == PIPE_VIDEO_FORMAT_MPEG4_AVC)
194 context->templat.level = u_get_h264_level(context->templat.width,
195 context->templat.height, &context->templat.max_references);
196
197 context->decoder = drv->pipe->create_video_codec(drv->pipe,
198 &context->templat);
199
200 if (!context->decoder)
201 return VA_STATUS_ERROR_ALLOCATION_FAILED;
202
203 context->needs_begin_frame = true;
204 }
205
206 return vaStatus;
207 }
208
209 static void
handleIQMatrixBuffer(vlVaContext * context,vlVaBuffer * buf)210 handleIQMatrixBuffer(vlVaContext *context, vlVaBuffer *buf)
211 {
212 switch (u_reduce_video_profile(context->templat.profile)) {
213 case PIPE_VIDEO_FORMAT_MPEG12:
214 vlVaHandleIQMatrixBufferMPEG12(context, buf);
215 break;
216
217 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
218 vlVaHandleIQMatrixBufferH264(context, buf);
219 break;
220
221 case PIPE_VIDEO_FORMAT_MPEG4:
222 vlVaHandleIQMatrixBufferMPEG4(context, buf);
223 break;
224
225 case PIPE_VIDEO_FORMAT_HEVC:
226 vlVaHandleIQMatrixBufferHEVC(context, buf);
227 break;
228
229 case PIPE_VIDEO_FORMAT_JPEG:
230 vlVaHandleIQMatrixBufferMJPEG(context, buf);
231 break;
232
233 default:
234 break;
235 }
236 }
237
238 static void
handleSliceParameterBuffer(vlVaContext * context,vlVaBuffer * buf)239 handleSliceParameterBuffer(vlVaContext *context, vlVaBuffer *buf)
240 {
241 switch (u_reduce_video_profile(context->templat.profile)) {
242 case PIPE_VIDEO_FORMAT_MPEG12:
243 vlVaHandleSliceParameterBufferMPEG12(context, buf);
244 break;
245
246 case PIPE_VIDEO_FORMAT_VC1:
247 vlVaHandleSliceParameterBufferVC1(context, buf);
248 break;
249
250 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
251 vlVaHandleSliceParameterBufferH264(context, buf);
252 break;
253
254 case PIPE_VIDEO_FORMAT_MPEG4:
255 vlVaHandleSliceParameterBufferMPEG4(context, buf);
256 break;
257
258 case PIPE_VIDEO_FORMAT_HEVC:
259 vlVaHandleSliceParameterBufferHEVC(context, buf);
260 break;
261
262 case PIPE_VIDEO_FORMAT_JPEG:
263 vlVaHandleSliceParameterBufferMJPEG(context, buf);
264 break;
265
266 default:
267 break;
268 }
269 }
270
271 static unsigned int
bufHasStartcode(vlVaBuffer * buf,unsigned int code,unsigned int bits)272 bufHasStartcode(vlVaBuffer *buf, unsigned int code, unsigned int bits)
273 {
274 struct vl_vlc vlc = {0};
275 int i;
276
277 /* search the first 64 bytes for a startcode */
278 vl_vlc_init(&vlc, 1, (const void * const*)&buf->data, &buf->size);
279 for (i = 0; i < 64 && vl_vlc_bits_left(&vlc) >= bits; ++i) {
280 if (vl_vlc_peekbits(&vlc, bits) == code)
281 return 1;
282 vl_vlc_eatbits(&vlc, 8);
283 vl_vlc_fillbits(&vlc);
284 }
285
286 return 0;
287 }
288
289 static void
handleVASliceDataBufferType(vlVaContext * context,vlVaBuffer * buf)290 handleVASliceDataBufferType(vlVaContext *context, vlVaBuffer *buf)
291 {
292 enum pipe_video_format format;
293 unsigned num_buffers = 0;
294 void * const *buffers[2];
295 unsigned sizes[2];
296 static const uint8_t start_code_h264[] = { 0x00, 0x00, 0x01 };
297 static const uint8_t start_code_h265[] = { 0x00, 0x00, 0x01 };
298 static const uint8_t start_code_vc1[] = { 0x00, 0x00, 0x01, 0x0d };
299
300 format = u_reduce_video_profile(context->templat.profile);
301 switch (format) {
302 case PIPE_VIDEO_FORMAT_MPEG4_AVC:
303 if (bufHasStartcode(buf, 0x000001, 24))
304 break;
305
306 buffers[num_buffers] = (void *const)&start_code_h264;
307 sizes[num_buffers++] = sizeof(start_code_h264);
308 break;
309 case PIPE_VIDEO_FORMAT_HEVC:
310 if (bufHasStartcode(buf, 0x000001, 24))
311 break;
312
313 buffers[num_buffers] = (void *const)&start_code_h265;
314 sizes[num_buffers++] = sizeof(start_code_h265);
315 break;
316 case PIPE_VIDEO_FORMAT_VC1:
317 if (bufHasStartcode(buf, 0x0000010d, 32) ||
318 bufHasStartcode(buf, 0x0000010c, 32) ||
319 bufHasStartcode(buf, 0x0000010b, 32))
320 break;
321
322 if (context->decoder->profile == PIPE_VIDEO_PROFILE_VC1_ADVANCED) {
323 buffers[num_buffers] = (void *const)&start_code_vc1;
324 sizes[num_buffers++] = sizeof(start_code_vc1);
325 }
326 break;
327 case PIPE_VIDEO_FORMAT_MPEG4:
328 if (bufHasStartcode(buf, 0x000001, 24))
329 break;
330
331 vlVaDecoderFixMPEG4Startcode(context);
332 buffers[num_buffers] = (void *)context->mpeg4.start_code;
333 sizes[num_buffers++] = context->mpeg4.start_code_size;
334 break;
335 case PIPE_VIDEO_FORMAT_JPEG:
336 break;
337 default:
338 break;
339 }
340
341 buffers[num_buffers] = buf->data;
342 sizes[num_buffers] = buf->size;
343 ++num_buffers;
344
345 if (context->needs_begin_frame) {
346 context->decoder->begin_frame(context->decoder, context->target,
347 &context->desc.base);
348 context->needs_begin_frame = false;
349 }
350 context->decoder->decode_bitstream(context->decoder, context->target, &context->desc.base,
351 num_buffers, (const void * const*)buffers, sizes);
352 }
353
354 static VAStatus
handleVAEncMiscParameterTypeRateControl(vlVaContext * context,VAEncMiscParameterBuffer * misc)355 handleVAEncMiscParameterTypeRateControl(vlVaContext *context, VAEncMiscParameterBuffer *misc)
356 {
357 VAEncMiscParameterRateControl *rc = (VAEncMiscParameterRateControl *)misc->data;
358 if (context->desc.h264enc.rate_ctrl.rate_ctrl_method ==
359 PIPE_H264_ENC_RATE_CONTROL_METHOD_CONSTANT)
360 context->desc.h264enc.rate_ctrl.target_bitrate = rc->bits_per_second;
361 else
362 context->desc.h264enc.rate_ctrl.target_bitrate = rc->bits_per_second * (rc->target_percentage / 100.0);
363 context->desc.h264enc.rate_ctrl.peak_bitrate = rc->bits_per_second;
364 if (context->desc.h264enc.rate_ctrl.target_bitrate < 2000000)
365 context->desc.h264enc.rate_ctrl.vbv_buffer_size = MIN2((context->desc.h264enc.rate_ctrl.target_bitrate * 2.75), 2000000);
366 else
367 context->desc.h264enc.rate_ctrl.vbv_buffer_size = context->desc.h264enc.rate_ctrl.target_bitrate;
368
369 return VA_STATUS_SUCCESS;
370 }
371
372 static VAStatus
handleVAEncMiscParameterTypeFrameRate(vlVaContext * context,VAEncMiscParameterBuffer * misc)373 handleVAEncMiscParameterTypeFrameRate(vlVaContext *context, VAEncMiscParameterBuffer *misc)
374 {
375 VAEncMiscParameterFrameRate *fr = (VAEncMiscParameterFrameRate *)misc->data;
376 if (fr->framerate & 0xffff0000) {
377 context->desc.h264enc.rate_ctrl.frame_rate_num = fr->framerate & 0xffff;
378 context->desc.h264enc.rate_ctrl.frame_rate_den = fr->framerate >> 16 & 0xffff;
379 } else {
380 context->desc.h264enc.rate_ctrl.frame_rate_num = fr->framerate;
381 context->desc.h264enc.rate_ctrl.frame_rate_den = 1;
382 }
383 return VA_STATUS_SUCCESS;
384 }
385
386 static VAStatus
handleVAEncSequenceParameterBufferType(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)387 handleVAEncSequenceParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
388 {
389 VAEncSequenceParameterBufferH264 *h264 = (VAEncSequenceParameterBufferH264 *)buf->data;
390 if (!context->decoder) {
391 context->templat.max_references = h264->max_num_ref_frames;
392 context->templat.level = h264->level_idc;
393 context->decoder = drv->pipe->create_video_codec(drv->pipe, &context->templat);
394 if (!context->decoder)
395 return VA_STATUS_ERROR_ALLOCATION_FAILED;
396 }
397
398 context->gop_coeff = ((1024 + h264->intra_idr_period - 1) / h264->intra_idr_period + 1) / 2 * 2;
399 if (context->gop_coeff > VL_VA_ENC_GOP_COEFF)
400 context->gop_coeff = VL_VA_ENC_GOP_COEFF;
401 context->desc.h264enc.gop_size = h264->intra_idr_period * context->gop_coeff;
402 context->desc.h264enc.rate_ctrl.frame_rate_num = h264->time_scale / 2;
403 context->desc.h264enc.rate_ctrl.frame_rate_den = h264->num_units_in_tick;
404 context->desc.h264enc.pic_order_cnt_type = h264->seq_fields.bits.pic_order_cnt_type;
405 return VA_STATUS_SUCCESS;
406 }
407
408 static VAStatus
handleVAEncMiscParameterBufferType(vlVaContext * context,vlVaBuffer * buf)409 handleVAEncMiscParameterBufferType(vlVaContext *context, vlVaBuffer *buf)
410 {
411 VAStatus vaStatus = VA_STATUS_SUCCESS;
412 VAEncMiscParameterBuffer *misc;
413 misc = buf->data;
414
415 switch (misc->type) {
416 case VAEncMiscParameterTypeRateControl:
417 vaStatus = handleVAEncMiscParameterTypeRateControl(context, misc);
418 break;
419
420 case VAEncMiscParameterTypeFrameRate:
421 vaStatus = handleVAEncMiscParameterTypeFrameRate(context, misc);
422 break;
423
424 default:
425 break;
426 }
427
428 return vaStatus;
429 }
430
431 static VAStatus
handleVAEncPictureParameterBufferType(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)432 handleVAEncPictureParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
433 {
434 VAEncPictureParameterBufferH264 *h264;
435 vlVaBuffer *coded_buf;
436
437 h264 = buf->data;
438 context->desc.h264enc.frame_num = h264->frame_num;
439 context->desc.h264enc.not_referenced = false;
440 context->desc.h264enc.pic_order_cnt = h264->CurrPic.TopFieldOrderCnt;
441 if (context->desc.h264enc.gop_cnt == 0)
442 context->desc.h264enc.i_remain = context->gop_coeff;
443 else if (context->desc.h264enc.frame_num == 1)
444 context->desc.h264enc.i_remain--;
445
446 context->desc.h264enc.p_remain = context->desc.h264enc.gop_size - context->desc.h264enc.gop_cnt - context->desc.h264enc.i_remain;
447
448 coded_buf = handle_table_get(drv->htab, h264->coded_buf);
449 if (!coded_buf->derived_surface.resource)
450 coded_buf->derived_surface.resource = pipe_buffer_create(drv->pipe->screen, PIPE_BIND_VERTEX_BUFFER,
451 PIPE_USAGE_STREAM, coded_buf->size);
452 context->coded_buf = coded_buf;
453
454 util_hash_table_set(context->desc.h264enc.frame_idx,
455 UINT_TO_PTR(h264->CurrPic.picture_id),
456 UINT_TO_PTR(h264->frame_num));
457
458 if (h264->pic_fields.bits.idr_pic_flag == 1)
459 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_IDR;
460 else
461 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
462
463 context->desc.h264enc.quant_i_frames = h264->pic_init_qp;
464 context->desc.h264enc.quant_b_frames = h264->pic_init_qp;
465 context->desc.h264enc.quant_p_frames = h264->pic_init_qp;
466 context->desc.h264enc.gop_cnt++;
467 if (context->desc.h264enc.gop_cnt == context->desc.h264enc.gop_size)
468 context->desc.h264enc.gop_cnt = 0;
469
470 return VA_STATUS_SUCCESS;
471 }
472
473 static VAStatus
handleVAEncSliceParameterBufferType(vlVaDriver * drv,vlVaContext * context,vlVaBuffer * buf)474 handleVAEncSliceParameterBufferType(vlVaDriver *drv, vlVaContext *context, vlVaBuffer *buf)
475 {
476 VAEncSliceParameterBufferH264 *h264;
477
478 h264 = buf->data;
479 context->desc.h264enc.ref_idx_l0 = VA_INVALID_ID;
480 context->desc.h264enc.ref_idx_l1 = VA_INVALID_ID;
481
482 for (int i = 0; i < 32; i++) {
483 if (h264->RefPicList0[i].picture_id != VA_INVALID_ID) {
484 if (context->desc.h264enc.ref_idx_l0 == VA_INVALID_ID)
485 context->desc.h264enc.ref_idx_l0 = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
486 UINT_TO_PTR(h264->RefPicList0[i].picture_id)));
487 }
488 if (h264->RefPicList1[i].picture_id != VA_INVALID_ID && h264->slice_type == 1) {
489 if (context->desc.h264enc.ref_idx_l1 == VA_INVALID_ID)
490 context->desc.h264enc.ref_idx_l1 = PTR_TO_UINT(util_hash_table_get(context->desc.h264enc.frame_idx,
491 UINT_TO_PTR(h264->RefPicList1[i].picture_id)));
492 }
493 }
494
495 if (h264->slice_type == 1)
496 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_B;
497 else if (h264->slice_type == 0)
498 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_P;
499 else if (h264->slice_type == 2) {
500 if (context->desc.h264enc.picture_type == PIPE_H264_ENC_PICTURE_TYPE_IDR)
501 context->desc.h264enc.idr_pic_id++;
502 else
503 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_I;
504 } else
505 context->desc.h264enc.picture_type = PIPE_H264_ENC_PICTURE_TYPE_SKIP;
506
507 return VA_STATUS_SUCCESS;
508 }
509
510 VAStatus
vlVaRenderPicture(VADriverContextP ctx,VAContextID context_id,VABufferID * buffers,int num_buffers)511 vlVaRenderPicture(VADriverContextP ctx, VAContextID context_id, VABufferID *buffers, int num_buffers)
512 {
513 vlVaDriver *drv;
514 vlVaContext *context;
515 VAStatus vaStatus = VA_STATUS_SUCCESS;
516
517 unsigned i;
518
519 if (!ctx)
520 return VA_STATUS_ERROR_INVALID_CONTEXT;
521
522 drv = VL_VA_DRIVER(ctx);
523 if (!drv)
524 return VA_STATUS_ERROR_INVALID_CONTEXT;
525
526 mtx_lock(&drv->mutex);
527 context = handle_table_get(drv->htab, context_id);
528 if (!context) {
529 mtx_unlock(&drv->mutex);
530 return VA_STATUS_ERROR_INVALID_CONTEXT;
531 }
532
533 for (i = 0; i < num_buffers; ++i) {
534 vlVaBuffer *buf = handle_table_get(drv->htab, buffers[i]);
535 if (!buf) {
536 mtx_unlock(&drv->mutex);
537 return VA_STATUS_ERROR_INVALID_BUFFER;
538 }
539
540 switch (buf->type) {
541 case VAPictureParameterBufferType:
542 vaStatus = handlePictureParameterBuffer(drv, context, buf);
543 break;
544
545 case VAIQMatrixBufferType:
546 handleIQMatrixBuffer(context, buf);
547 break;
548
549 case VASliceParameterBufferType:
550 handleSliceParameterBuffer(context, buf);
551 break;
552
553 case VASliceDataBufferType:
554 handleVASliceDataBufferType(context, buf);
555 break;
556 case VAProcPipelineParameterBufferType:
557 vaStatus = vlVaHandleVAProcPipelineParameterBufferType(drv, context, buf);
558 break;
559
560 case VAEncSequenceParameterBufferType:
561 vaStatus = handleVAEncSequenceParameterBufferType(drv, context, buf);
562 break;
563
564 case VAEncMiscParameterBufferType:
565 vaStatus = handleVAEncMiscParameterBufferType(context, buf);
566 break;
567
568 case VAEncPictureParameterBufferType:
569 vaStatus = handleVAEncPictureParameterBufferType(drv, context, buf);
570 break;
571
572 case VAEncSliceParameterBufferType:
573 vaStatus = handleVAEncSliceParameterBufferType(drv, context, buf);
574 break;
575
576 case VAHuffmanTableBufferType:
577 vlVaHandleHuffmanTableBufferType(context, buf);
578 break;
579
580 default:
581 break;
582 }
583 }
584 mtx_unlock(&drv->mutex);
585
586 return vaStatus;
587 }
588
589 VAStatus
vlVaEndPicture(VADriverContextP ctx,VAContextID context_id)590 vlVaEndPicture(VADriverContextP ctx, VAContextID context_id)
591 {
592 vlVaDriver *drv;
593 vlVaContext *context;
594 vlVaBuffer *coded_buf;
595 vlVaSurface *surf;
596 void *feedback;
597 struct pipe_screen *screen;
598 bool supported;
599 bool realloc = false;
600 enum pipe_format format;
601
602 if (!ctx)
603 return VA_STATUS_ERROR_INVALID_CONTEXT;
604
605 drv = VL_VA_DRIVER(ctx);
606 if (!drv)
607 return VA_STATUS_ERROR_INVALID_CONTEXT;
608
609 mtx_lock(&drv->mutex);
610 context = handle_table_get(drv->htab, context_id);
611 mtx_unlock(&drv->mutex);
612 if (!context)
613 return VA_STATUS_ERROR_INVALID_CONTEXT;
614
615 if (!context->decoder) {
616 if (context->templat.profile != PIPE_VIDEO_PROFILE_UNKNOWN)
617 return VA_STATUS_ERROR_INVALID_CONTEXT;
618
619 /* VPP */
620 return VA_STATUS_SUCCESS;
621 }
622
623 mtx_lock(&drv->mutex);
624 surf = handle_table_get(drv->htab, context->target_id);
625 context->mpeg4.frame_num++;
626
627 screen = context->decoder->context->screen;
628 supported = screen->get_video_param(screen, context->decoder->profile,
629 context->decoder->entrypoint,
630 surf->buffer->interlaced ?
631 PIPE_VIDEO_CAP_SUPPORTS_INTERLACED :
632 PIPE_VIDEO_CAP_SUPPORTS_PROGRESSIVE);
633
634 if (!supported) {
635 surf->templat.interlaced = screen->get_video_param(screen,
636 context->decoder->profile,
637 context->decoder->entrypoint,
638 PIPE_VIDEO_CAP_PREFERS_INTERLACED);
639 realloc = true;
640 }
641
642 format = screen->get_video_param(screen, context->decoder->profile,
643 PIPE_VIDEO_ENTRYPOINT_BITSTREAM,
644 PIPE_VIDEO_CAP_PREFERED_FORMAT);
645
646 if (surf->buffer->buffer_format != format &&
647 surf->buffer->buffer_format == PIPE_FORMAT_NV12) {
648 /* check originally as NV12 only */
649 surf->templat.buffer_format = format;
650 realloc = true;
651 }
652
653 if (u_reduce_video_profile(context->templat.profile) == PIPE_VIDEO_FORMAT_JPEG &&
654 surf->buffer->buffer_format == PIPE_FORMAT_NV12) {
655 if (context->mjpeg.sampling_factor == 0x211111 ||
656 context->mjpeg.sampling_factor == 0x221212) {
657 surf->templat.buffer_format = PIPE_FORMAT_YUYV;
658 realloc = true;
659 } else if (context->mjpeg.sampling_factor != 0x221111) {
660 /* Not NV12 either */
661 mtx_unlock(&drv->mutex);
662 return VA_STATUS_ERROR_INVALID_SURFACE;
663 }
664 }
665
666 if (realloc) {
667 struct pipe_video_buffer *old_buf = surf->buffer;
668
669 if (vlVaHandleSurfaceAllocate(drv, surf, &surf->templat) != VA_STATUS_SUCCESS) {
670 mtx_unlock(&drv->mutex);
671 return VA_STATUS_ERROR_ALLOCATION_FAILED;
672 }
673
674 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
675 if (old_buf->interlaced) {
676 struct u_rect src_rect, dst_rect;
677
678 dst_rect.x0 = src_rect.x0 = 0;
679 dst_rect.y0 = src_rect.y0 = 0;
680 dst_rect.x1 = src_rect.x1 = surf->templat.width;
681 dst_rect.y1 = src_rect.y1 = surf->templat.height;
682 vl_compositor_yuv_deint_full(&drv->cstate, &drv->compositor,
683 old_buf, surf->buffer,
684 &src_rect, &dst_rect, VL_COMPOSITOR_WEAVE);
685 } else {
686 /* Can't convert from progressive to interlaced yet */
687 mtx_unlock(&drv->mutex);
688 return VA_STATUS_ERROR_INVALID_SURFACE;
689 }
690 }
691
692 old_buf->destroy(old_buf);
693 context->target = surf->buffer;
694 }
695
696 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
697 coded_buf = context->coded_buf;
698 getEncParamPreset(context);
699 context->desc.h264enc.frame_num_cnt++;
700 context->decoder->begin_frame(context->decoder, context->target, &context->desc.base);
701 context->decoder->encode_bitstream(context->decoder, context->target,
702 coded_buf->derived_surface.resource, &feedback);
703 surf->feedback = feedback;
704 surf->coded_buf = coded_buf;
705 }
706
707 context->decoder->end_frame(context->decoder, context->target, &context->desc.base);
708 if (context->decoder->entrypoint == PIPE_VIDEO_ENTRYPOINT_ENCODE) {
709 int idr_period = context->desc.h264enc.gop_size / context->gop_coeff;
710 int p_remain_in_idr = idr_period - context->desc.h264enc.frame_num;
711 surf->frame_num_cnt = context->desc.h264enc.frame_num_cnt;
712 surf->force_flushed = false;
713 if (context->first_single_submitted) {
714 context->decoder->flush(context->decoder);
715 context->first_single_submitted = false;
716 surf->force_flushed = true;
717 }
718 if (p_remain_in_idr == 1) {
719 if ((context->desc.h264enc.frame_num_cnt % 2) != 0) {
720 context->decoder->flush(context->decoder);
721 context->first_single_submitted = true;
722 }
723 else
724 context->first_single_submitted = false;
725 surf->force_flushed = true;
726 }
727 }
728 mtx_unlock(&drv->mutex);
729 return VA_STATUS_SUCCESS;
730 }
731