1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 #include <stdint.h>
20 #include <string.h>
21
22 #include <mfx/mfxvideo.h>
23
24 #include "config.h"
25
26 #if HAVE_PTHREADS
27 #include <pthread.h>
28 #endif
29
30 #if CONFIG_VAAPI
31 #include "hwcontext_vaapi.h"
32 #endif
33 #if CONFIG_DXVA2
34 #include "hwcontext_dxva2.h"
35 #endif
36
37 #include "buffer.h"
38 #include "common.h"
39 #include "hwcontext.h"
40 #include "hwcontext_internal.h"
41 #include "hwcontext_qsv.h"
42 #include "mem.h"
43 #include "pixfmt.h"
44 #include "pixdesc.h"
45 #include "time.h"
46
47 #define QSV_VERSION_ATLEAST(MAJOR, MINOR) \
48 (MFX_VERSION_MAJOR > (MAJOR) || \
49 MFX_VERSION_MAJOR == (MAJOR) && MFX_VERSION_MINOR >= (MINOR))
50
51 typedef struct QSVDevicePriv {
52 AVBufferRef *child_device_ctx;
53 } QSVDevicePriv;
54
55 typedef struct QSVDeviceContext {
56 mfxHDL handle;
57 mfxHandleType handle_type;
58 mfxVersion ver;
59 mfxIMPL impl;
60
61 enum AVHWDeviceType child_device_type;
62 enum AVPixelFormat child_pix_fmt;
63 } QSVDeviceContext;
64
65 typedef struct QSVFramesContext {
66 mfxSession session_download;
67 int session_download_init;
68 mfxSession session_upload;
69 int session_upload_init;
70 #if HAVE_PTHREADS
71 pthread_mutex_t session_lock;
72 pthread_cond_t session_cond;
73 #endif
74
75 AVBufferRef *child_frames_ref;
76 mfxFrameSurface1 *surfaces_internal;
77 int nb_surfaces_used;
78
79 // used in the frame allocator for non-opaque surfaces
80 mfxMemId *mem_ids;
81 // used in the opaque alloc request for opaque surfaces
82 mfxFrameSurface1 **surface_ptrs;
83
84 mfxExtOpaqueSurfaceAlloc opaque_alloc;
85 mfxExtBuffer *ext_buffers[1];
86 } QSVFramesContext;
87
88 static const struct {
89 mfxHandleType handle_type;
90 enum AVHWDeviceType device_type;
91 enum AVPixelFormat pix_fmt;
92 } supported_handle_types[] = {
93 #if CONFIG_VAAPI
94 { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
95 #endif
96 #if CONFIG_DXVA2
97 { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
98 #endif
99 { 0 },
100 };
101
102 static const struct {
103 enum AVPixelFormat pix_fmt;
104 uint32_t fourcc;
105 } supported_pixel_formats[] = {
106 { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
107 { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
108 { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
109 { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
110 #if CONFIG_VAAPI
111 { AV_PIX_FMT_YUYV422,
112 MFX_FOURCC_YUY2 },
113 #if QSV_VERSION_ATLEAST(1, 27)
114 { AV_PIX_FMT_Y210,
115 MFX_FOURCC_Y210 },
116 #endif
117 #endif
118 };
119
qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)120 static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
121 {
122 int i;
123 for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
124 if (supported_pixel_formats[i].pix_fmt == pix_fmt)
125 return supported_pixel_formats[i].fourcc;
126 }
127 return 0;
128 }
129
qsv_device_init(AVHWDeviceContext * ctx)130 static int qsv_device_init(AVHWDeviceContext *ctx)
131 {
132 AVQSVDeviceContext *hwctx = ctx->hwctx;
133 QSVDeviceContext *s = ctx->internal->priv;
134
135 mfxStatus err;
136 int i;
137
138 for (i = 0; supported_handle_types[i].handle_type; i++) {
139 err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
140 &s->handle);
141 if (err == MFX_ERR_NONE) {
142 s->handle_type = supported_handle_types[i].handle_type;
143 s->child_device_type = supported_handle_types[i].device_type;
144 s->child_pix_fmt = supported_handle_types[i].pix_fmt;
145 break;
146 }
147 }
148 if (!s->handle) {
149 av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
150 "from the session\n");
151 }
152
153 err = MFXQueryIMPL(hwctx->session, &s->impl);
154 if (err == MFX_ERR_NONE)
155 err = MFXQueryVersion(hwctx->session, &s->ver);
156 if (err != MFX_ERR_NONE) {
157 av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
158 return AVERROR_UNKNOWN;
159 }
160
161 return 0;
162 }
163
qsv_frames_uninit(AVHWFramesContext * ctx)164 static void qsv_frames_uninit(AVHWFramesContext *ctx)
165 {
166 QSVFramesContext *s = ctx->internal->priv;
167
168 if (s->session_download) {
169 MFXVideoVPP_Close(s->session_download);
170 MFXClose(s->session_download);
171 }
172 s->session_download = NULL;
173 s->session_download_init = 0;
174
175 if (s->session_upload) {
176 MFXVideoVPP_Close(s->session_upload);
177 MFXClose(s->session_upload);
178 }
179 s->session_upload = NULL;
180 s->session_upload_init = 0;
181
182 #if HAVE_PTHREADS
183 pthread_mutex_destroy(&s->session_lock);
184 pthread_cond_destroy(&s->session_cond);
185 #endif
186
187 av_freep(&s->mem_ids);
188 av_freep(&s->surface_ptrs);
189 av_freep(&s->surfaces_internal);
190 av_buffer_unref(&s->child_frames_ref);
191 }
192
qsv_pool_release_dummy(void * opaque,uint8_t * data)193 static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
194 {
195 }
196
qsv_pool_alloc(void * opaque,buffer_size_t size)197 static AVBufferRef *qsv_pool_alloc(void *opaque, buffer_size_t size)
198 {
199 AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
200 QSVFramesContext *s = ctx->internal->priv;
201 AVQSVFramesContext *hwctx = ctx->hwctx;
202
203 if (s->nb_surfaces_used < hwctx->nb_surfaces) {
204 s->nb_surfaces_used++;
205 return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
206 sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
207 }
208
209 return NULL;
210 }
211
qsv_init_child_ctx(AVHWFramesContext * ctx)212 static int qsv_init_child_ctx(AVHWFramesContext *ctx)
213 {
214 AVQSVFramesContext *hwctx = ctx->hwctx;
215 QSVFramesContext *s = ctx->internal->priv;
216 QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
217
218 AVBufferRef *child_device_ref = NULL;
219 AVBufferRef *child_frames_ref = NULL;
220
221 AVHWDeviceContext *child_device_ctx;
222 AVHWFramesContext *child_frames_ctx;
223
224 int i, ret = 0;
225
226 if (!device_priv->handle) {
227 av_log(ctx, AV_LOG_ERROR,
228 "Cannot create a non-opaque internal surface pool without "
229 "a hardware handle\n");
230 return AVERROR(EINVAL);
231 }
232
233 child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
234 if (!child_device_ref)
235 return AVERROR(ENOMEM);
236 child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
237
238 #if CONFIG_VAAPI
239 if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
240 AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
241 child_device_hwctx->display = (VADisplay)device_priv->handle;
242 }
243 #endif
244 #if CONFIG_DXVA2
245 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
246 AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
247 child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
248 }
249 #endif
250
251 ret = av_hwdevice_ctx_init(child_device_ref);
252 if (ret < 0) {
253 av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
254 goto fail;
255 }
256
257 child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
258 if (!child_frames_ref) {
259 ret = AVERROR(ENOMEM);
260 goto fail;
261 }
262 child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
263
264 child_frames_ctx->format = device_priv->child_pix_fmt;
265 child_frames_ctx->sw_format = ctx->sw_format;
266 child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
267 child_frames_ctx->width = FFALIGN(ctx->width, 16);
268 child_frames_ctx->height = FFALIGN(ctx->height, 16);
269
270 #if CONFIG_DXVA2
271 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
272 AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
273 if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
274 child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
275 else
276 child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
277 }
278 #endif
279
280 ret = av_hwframe_ctx_init(child_frames_ref);
281 if (ret < 0) {
282 av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
283 goto fail;
284 }
285
286 #if CONFIG_VAAPI
287 if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
288 AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
289 for (i = 0; i < ctx->initial_pool_size; i++)
290 s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
291 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
292 }
293 #endif
294 #if CONFIG_DXVA2
295 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
296 AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
297 for (i = 0; i < ctx->initial_pool_size; i++)
298 s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
299 if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
300 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
301 else
302 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
303 }
304 #endif
305
306 s->child_frames_ref = child_frames_ref;
307 child_frames_ref = NULL;
308
309 fail:
310 av_buffer_unref(&child_device_ref);
311 av_buffer_unref(&child_frames_ref);
312 return ret;
313 }
314
qsv_init_surface(AVHWFramesContext * ctx,mfxFrameSurface1 * surf)315 static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
316 {
317 const AVPixFmtDescriptor *desc;
318 uint32_t fourcc;
319
320 desc = av_pix_fmt_desc_get(ctx->sw_format);
321 if (!desc)
322 return AVERROR(EINVAL);
323
324 fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
325 if (!fourcc)
326 return AVERROR(EINVAL);
327
328 surf->Info.BitDepthLuma = desc->comp[0].depth;
329 surf->Info.BitDepthChroma = desc->comp[0].depth;
330 surf->Info.Shift = desc->comp[0].depth > 8;
331
332 if (desc->log2_chroma_w && desc->log2_chroma_h)
333 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
334 else if (desc->log2_chroma_w)
335 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
336 else
337 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
338
339 surf->Info.FourCC = fourcc;
340 surf->Info.Width = FFALIGN(ctx->width, 16);
341 surf->Info.CropW = ctx->width;
342 surf->Info.Height = FFALIGN(ctx->height, 16);
343 surf->Info.CropH = ctx->height;
344 surf->Info.FrameRateExtN = 25;
345 surf->Info.FrameRateExtD = 1;
346 surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
347
348 return 0;
349 }
350
qsv_init_pool(AVHWFramesContext * ctx,uint32_t fourcc)351 static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
352 {
353 QSVFramesContext *s = ctx->internal->priv;
354 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
355
356 int i, ret = 0;
357
358 if (ctx->initial_pool_size <= 0) {
359 av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
360 return AVERROR(EINVAL);
361 }
362
363 s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
364 sizeof(*s->surfaces_internal));
365 if (!s->surfaces_internal)
366 return AVERROR(ENOMEM);
367
368 for (i = 0; i < ctx->initial_pool_size; i++) {
369 ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
370 if (ret < 0)
371 return ret;
372 }
373
374 if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
375 ret = qsv_init_child_ctx(ctx);
376 if (ret < 0)
377 return ret;
378 }
379
380 ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
381 ctx, qsv_pool_alloc, NULL);
382 if (!ctx->internal->pool_internal)
383 return AVERROR(ENOMEM);
384
385 frames_hwctx->surfaces = s->surfaces_internal;
386 frames_hwctx->nb_surfaces = ctx->initial_pool_size;
387
388 return 0;
389 }
390
frame_alloc(mfxHDL pthis,mfxFrameAllocRequest * req,mfxFrameAllocResponse * resp)391 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
392 mfxFrameAllocResponse *resp)
393 {
394 AVHWFramesContext *ctx = pthis;
395 QSVFramesContext *s = ctx->internal->priv;
396 AVQSVFramesContext *hwctx = ctx->hwctx;
397 mfxFrameInfo *i = &req->Info;
398 mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
399
400 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
401 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
402 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
403 return MFX_ERR_UNSUPPORTED;
404 if (i->Width > i1->Width || i->Height > i1->Height ||
405 i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
406 av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
407 "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
408 i->Width, i->Height, i->FourCC, i->ChromaFormat,
409 i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
410 return MFX_ERR_UNSUPPORTED;
411 }
412
413 resp->mids = s->mem_ids;
414 resp->NumFrameActual = hwctx->nb_surfaces;
415
416 return MFX_ERR_NONE;
417 }
418
frame_free(mfxHDL pthis,mfxFrameAllocResponse * resp)419 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
420 {
421 return MFX_ERR_NONE;
422 }
423
frame_lock(mfxHDL pthis,mfxMemId mid,mfxFrameData * ptr)424 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
425 {
426 return MFX_ERR_UNSUPPORTED;
427 }
428
frame_unlock(mfxHDL pthis,mfxMemId mid,mfxFrameData * ptr)429 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
430 {
431 return MFX_ERR_UNSUPPORTED;
432 }
433
frame_get_hdl(mfxHDL pthis,mfxMemId mid,mfxHDL * hdl)434 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
435 {
436 *hdl = mid;
437 return MFX_ERR_NONE;
438 }
439
qsv_init_internal_session(AVHWFramesContext * ctx,mfxSession * session,int upload)440 static int qsv_init_internal_session(AVHWFramesContext *ctx,
441 mfxSession *session, int upload)
442 {
443 QSVFramesContext *s = ctx->internal->priv;
444 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
445 QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
446 int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
447
448 mfxFrameAllocator frame_allocator = {
449 .pthis = ctx,
450 .Alloc = frame_alloc,
451 .Lock = frame_lock,
452 .Unlock = frame_unlock,
453 .GetHDL = frame_get_hdl,
454 .Free = frame_free,
455 };
456
457 mfxVideoParam par;
458 mfxStatus err;
459
460 err = MFXInit(device_priv->impl, &device_priv->ver, session);
461 if (err != MFX_ERR_NONE) {
462 av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
463 return AVERROR_UNKNOWN;
464 }
465
466 if (device_priv->handle) {
467 err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
468 device_priv->handle);
469 if (err != MFX_ERR_NONE)
470 return AVERROR_UNKNOWN;
471 }
472
473 if (!opaque) {
474 err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
475 if (err != MFX_ERR_NONE)
476 return AVERROR_UNKNOWN;
477 }
478
479 memset(&par, 0, sizeof(par));
480
481 if (opaque) {
482 par.ExtParam = s->ext_buffers;
483 par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
484 par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
485 MFX_IOPATTERN_IN_OPAQUE_MEMORY;
486 } else {
487 par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
488 MFX_IOPATTERN_IN_VIDEO_MEMORY;
489 }
490
491 par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
492 MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
493 par.AsyncDepth = 1;
494
495 par.vpp.In = frames_hwctx->surfaces[0].Info;
496
497 /* Apparently VPP requires the frame rate to be set to some value, otherwise
498 * init will fail (probably for the framerate conversion filter). Since we
499 * are only doing data upload/download here, we just invent an arbitrary
500 * value */
501 par.vpp.In.FrameRateExtN = 25;
502 par.vpp.In.FrameRateExtD = 1;
503 par.vpp.Out = par.vpp.In;
504
505 err = MFXVideoVPP_Init(*session, &par);
506 if (err != MFX_ERR_NONE) {
507 av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
508 "Surface upload/download will not be possible\n");
509 MFXClose(*session);
510 *session = NULL;
511 }
512
513 return 0;
514 }
515
qsv_frames_init(AVHWFramesContext * ctx)516 static int qsv_frames_init(AVHWFramesContext *ctx)
517 {
518 QSVFramesContext *s = ctx->internal->priv;
519 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
520
521 int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
522
523 uint32_t fourcc;
524 int i, ret;
525
526 fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
527 if (!fourcc) {
528 av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
529 return AVERROR(ENOSYS);
530 }
531
532 if (!ctx->pool) {
533 ret = qsv_init_pool(ctx, fourcc);
534 if (ret < 0) {
535 av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
536 return ret;
537 }
538 }
539
540 if (opaque) {
541 s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
542 sizeof(*s->surface_ptrs));
543 if (!s->surface_ptrs)
544 return AVERROR(ENOMEM);
545
546 for (i = 0; i < frames_hwctx->nb_surfaces; i++)
547 s->surface_ptrs[i] = frames_hwctx->surfaces + i;
548
549 s->opaque_alloc.In.Surfaces = s->surface_ptrs;
550 s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
551 s->opaque_alloc.In.Type = frames_hwctx->frame_type;
552
553 s->opaque_alloc.Out = s->opaque_alloc.In;
554
555 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
556 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
557
558 s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
559 } else {
560 s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
561 if (!s->mem_ids)
562 return AVERROR(ENOMEM);
563
564 for (i = 0; i < frames_hwctx->nb_surfaces; i++)
565 s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
566 }
567
568 s->session_download = NULL;
569 s->session_upload = NULL;
570
571 s->session_download_init = 0;
572 s->session_upload_init = 0;
573
574 #if HAVE_PTHREADS
575 pthread_mutex_init(&s->session_lock, NULL);
576 pthread_cond_init(&s->session_cond, NULL);
577 #endif
578
579 return 0;
580 }
581
qsv_get_buffer(AVHWFramesContext * ctx,AVFrame * frame)582 static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
583 {
584 frame->buf[0] = av_buffer_pool_get(ctx->pool);
585 if (!frame->buf[0])
586 return AVERROR(ENOMEM);
587
588 frame->data[3] = frame->buf[0]->data;
589 frame->format = AV_PIX_FMT_QSV;
590 frame->width = ctx->width;
591 frame->height = ctx->height;
592
593 return 0;
594 }
595
qsv_transfer_get_formats(AVHWFramesContext * ctx,enum AVHWFrameTransferDirection dir,enum AVPixelFormat ** formats)596 static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
597 enum AVHWFrameTransferDirection dir,
598 enum AVPixelFormat **formats)
599 {
600 enum AVPixelFormat *fmts;
601
602 fmts = av_malloc_array(2, sizeof(*fmts));
603 if (!fmts)
604 return AVERROR(ENOMEM);
605
606 fmts[0] = ctx->sw_format;
607 fmts[1] = AV_PIX_FMT_NONE;
608
609 *formats = fmts;
610
611 return 0;
612 }
613
qsv_frames_derive_from(AVHWFramesContext * dst_ctx,AVHWFramesContext * src_ctx,int flags)614 static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
615 AVHWFramesContext *src_ctx, int flags)
616 {
617 AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
618 int i;
619
620 switch (dst_ctx->device_ctx->type) {
621 #if CONFIG_VAAPI
622 case AV_HWDEVICE_TYPE_VAAPI:
623 {
624 AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
625 dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
626 sizeof(*dst_hwctx->surface_ids));
627 if (!dst_hwctx->surface_ids)
628 return AVERROR(ENOMEM);
629 for (i = 0; i < src_hwctx->nb_surfaces; i++)
630 dst_hwctx->surface_ids[i] =
631 *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
632 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
633 }
634 break;
635 #endif
636 #if CONFIG_DXVA2
637 case AV_HWDEVICE_TYPE_DXVA2:
638 {
639 AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
640 dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
641 sizeof(*dst_hwctx->surfaces));
642 if (!dst_hwctx->surfaces)
643 return AVERROR(ENOMEM);
644 for (i = 0; i < src_hwctx->nb_surfaces; i++)
645 dst_hwctx->surfaces[i] =
646 (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
647 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
648 if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
649 dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
650 else
651 dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
652 }
653 break;
654 #endif
655 default:
656 return AVERROR(ENOSYS);
657 }
658
659 return 0;
660 }
661
qsv_map_from(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src,int flags)662 static int qsv_map_from(AVHWFramesContext *ctx,
663 AVFrame *dst, const AVFrame *src, int flags)
664 {
665 QSVFramesContext *s = ctx->internal->priv;
666 mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
667 AVHWFramesContext *child_frames_ctx;
668 const AVPixFmtDescriptor *desc;
669 uint8_t *child_data;
670 AVFrame *dummy;
671 int ret = 0;
672
673 if (!s->child_frames_ref)
674 return AVERROR(ENOSYS);
675 child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
676
677 switch (child_frames_ctx->device_ctx->type) {
678 #if CONFIG_VAAPI
679 case AV_HWDEVICE_TYPE_VAAPI:
680 child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
681 break;
682 #endif
683 #if CONFIG_DXVA2
684 case AV_HWDEVICE_TYPE_DXVA2:
685 child_data = surf->Data.MemId;
686 break;
687 #endif
688 default:
689 return AVERROR(ENOSYS);
690 }
691
692 if (dst->format == child_frames_ctx->format) {
693 ret = ff_hwframe_map_create(s->child_frames_ref,
694 dst, src, NULL, NULL);
695 if (ret < 0)
696 return ret;
697
698 dst->width = src->width;
699 dst->height = src->height;
700 dst->data[3] = child_data;
701
702 return 0;
703 }
704
705 desc = av_pix_fmt_desc_get(dst->format);
706 if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
707 // This only supports mapping to software.
708 return AVERROR(ENOSYS);
709 }
710
711 dummy = av_frame_alloc();
712 if (!dummy)
713 return AVERROR(ENOMEM);
714
715 dummy->buf[0] = av_buffer_ref(src->buf[0]);
716 dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
717 if (!dummy->buf[0] || !dummy->hw_frames_ctx)
718 goto fail;
719
720 dummy->format = child_frames_ctx->format;
721 dummy->width = src->width;
722 dummy->height = src->height;
723 dummy->data[3] = child_data;
724
725 ret = av_hwframe_map(dst, dummy, flags);
726
727 fail:
728 av_frame_free(&dummy);
729
730 return ret;
731 }
732
qsv_transfer_data_child(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)733 static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
734 const AVFrame *src)
735 {
736 QSVFramesContext *s = ctx->internal->priv;
737 AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
738 int download = !!src->hw_frames_ctx;
739 mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
740
741 AVFrame *dummy;
742 int ret;
743
744 dummy = av_frame_alloc();
745 if (!dummy)
746 return AVERROR(ENOMEM);
747
748 dummy->format = child_frames_ctx->format;
749 dummy->width = src->width;
750 dummy->height = src->height;
751 dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
752 dummy->data[3] = surf->Data.MemId;
753 dummy->hw_frames_ctx = s->child_frames_ref;
754
755 ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
756 av_hwframe_transfer_data(dummy, src, 0);
757
758 dummy->buf[0] = NULL;
759 dummy->data[3] = NULL;
760 dummy->hw_frames_ctx = NULL;
761
762 av_frame_free(&dummy);
763
764 return ret;
765 }
766
map_frame_to_surface(const AVFrame * frame,mfxFrameSurface1 * surface)767 static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
768 {
769 switch (frame->format) {
770 case AV_PIX_FMT_NV12:
771 case AV_PIX_FMT_P010:
772 surface->Data.Y = frame->data[0];
773 surface->Data.UV = frame->data[1];
774 break;
775
776 case AV_PIX_FMT_YUV420P:
777 surface->Data.Y = frame->data[0];
778 surface->Data.U = frame->data[1];
779 surface->Data.V = frame->data[2];
780 break;
781
782 case AV_PIX_FMT_BGRA:
783 surface->Data.B = frame->data[0];
784 surface->Data.G = frame->data[0] + 1;
785 surface->Data.R = frame->data[0] + 2;
786 surface->Data.A = frame->data[0] + 3;
787 break;
788 #if CONFIG_VAAPI
789 case AV_PIX_FMT_YUYV422:
790 surface->Data.Y = frame->data[0];
791 surface->Data.U = frame->data[0] + 1;
792 surface->Data.V = frame->data[0] + 3;
793 break;
794
795 case AV_PIX_FMT_Y210:
796 surface->Data.Y16 = (mfxU16 *)frame->data[0];
797 surface->Data.U16 = (mfxU16 *)frame->data[0] + 1;
798 surface->Data.V16 = (mfxU16 *)frame->data[0] + 3;
799 break;
800 #endif
801 default:
802 return MFX_ERR_UNSUPPORTED;
803 }
804 surface->Data.Pitch = frame->linesize[0];
805 surface->Data.TimeStamp = frame->pts;
806
807 return 0;
808 }
809
qsv_transfer_data_from(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)810 static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
811 const AVFrame *src)
812 {
813 QSVFramesContext *s = ctx->internal->priv;
814 mfxFrameSurface1 out = {{ 0 }};
815 mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
816
817 mfxSyncPoint sync = NULL;
818 mfxStatus err;
819 int ret = 0;
820
821 while (!s->session_download_init && !s->session_download && !ret) {
822 #if HAVE_PTHREADS
823 if (pthread_mutex_trylock(&s->session_lock) == 0) {
824 #endif
825 if (!s->session_download_init) {
826 ret = qsv_init_internal_session(ctx, &s->session_download, 0);
827 if (s->session_download)
828 s->session_download_init = 1;
829 }
830 #if HAVE_PTHREADS
831 pthread_mutex_unlock(&s->session_lock);
832 pthread_cond_signal(&s->session_cond);
833 } else {
834 pthread_mutex_lock(&s->session_lock);
835 while (!s->session_download_init && !s->session_download) {
836 pthread_cond_wait(&s->session_cond, &s->session_lock);
837 }
838 pthread_mutex_unlock(&s->session_lock);
839 }
840 #endif
841 }
842
843 if (ret < 0)
844 return ret;
845
846 if (!s->session_download) {
847 if (s->child_frames_ref)
848 return qsv_transfer_data_child(ctx, dst, src);
849
850 av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
851 return AVERROR(ENOSYS);
852 }
853
854 out.Info = in->Info;
855 map_frame_to_surface(dst, &out);
856
857 do {
858 err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
859 if (err == MFX_WRN_DEVICE_BUSY)
860 av_usleep(1);
861 } while (err == MFX_WRN_DEVICE_BUSY);
862
863 if (err < 0 || !sync) {
864 av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
865 return AVERROR_UNKNOWN;
866 }
867
868 do {
869 err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
870 } while (err == MFX_WRN_IN_EXECUTION);
871 if (err < 0) {
872 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
873 return AVERROR_UNKNOWN;
874 }
875
876 return 0;
877 }
878
qsv_transfer_data_to(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)879 static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
880 const AVFrame *src)
881 {
882 QSVFramesContext *s = ctx->internal->priv;
883 mfxFrameSurface1 in = {{ 0 }};
884 mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
885
886 mfxSyncPoint sync = NULL;
887 mfxStatus err;
888 int ret = 0;
889 /* make a copy if the input is not padded as libmfx requires */
890 AVFrame tmp_frame;
891 const AVFrame *src_frame;
892 int realigned = 0;
893
894
895 while (!s->session_upload_init && !s->session_upload && !ret) {
896 #if HAVE_PTHREADS
897 if (pthread_mutex_trylock(&s->session_lock) == 0) {
898 #endif
899 if (!s->session_upload_init) {
900 ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
901 if (s->session_upload)
902 s->session_upload_init = 1;
903 }
904 #if HAVE_PTHREADS
905 pthread_mutex_unlock(&s->session_lock);
906 pthread_cond_signal(&s->session_cond);
907 } else {
908 pthread_mutex_lock(&s->session_lock);
909 while (!s->session_upload_init && !s->session_upload) {
910 pthread_cond_wait(&s->session_cond, &s->session_lock);
911 }
912 pthread_mutex_unlock(&s->session_lock);
913 }
914 #endif
915 }
916 if (ret < 0)
917 return ret;
918
919 if (src->height & 15 || src->linesize[0] & 15) {
920 realigned = 1;
921 memset(&tmp_frame, 0, sizeof(tmp_frame));
922 tmp_frame.format = src->format;
923 tmp_frame.width = FFALIGN(src->width, 16);
924 tmp_frame.height = FFALIGN(src->height, 16);
925 ret = av_frame_get_buffer(&tmp_frame, 0);
926 if (ret < 0)
927 return ret;
928
929 ret = av_frame_copy(&tmp_frame, src);
930 if (ret < 0) {
931 av_frame_unref(&tmp_frame);
932 return ret;
933 }
934 }
935
936 src_frame = realigned ? &tmp_frame : src;
937
938 if (!s->session_upload) {
939 if (s->child_frames_ref)
940 return qsv_transfer_data_child(ctx, dst, src_frame);
941
942 av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
943 return AVERROR(ENOSYS);
944 }
945
946 in.Info = out->Info;
947 map_frame_to_surface(src_frame, &in);
948
949 do {
950 err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
951 if (err == MFX_WRN_DEVICE_BUSY)
952 av_usleep(1);
953 } while (err == MFX_WRN_DEVICE_BUSY);
954
955 if (err < 0 || !sync) {
956 av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
957 return AVERROR_UNKNOWN;
958 }
959
960 do {
961 err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
962 } while (err == MFX_WRN_IN_EXECUTION);
963 if (err < 0) {
964 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
965 return AVERROR_UNKNOWN;
966 }
967
968 if (realigned)
969 av_frame_unref(&tmp_frame);
970
971 return 0;
972 }
973
qsv_frames_derive_to(AVHWFramesContext * dst_ctx,AVHWFramesContext * src_ctx,int flags)974 static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
975 AVHWFramesContext *src_ctx, int flags)
976 {
977 QSVFramesContext *s = dst_ctx->internal->priv;
978 AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
979 int i;
980
981 switch (src_ctx->device_ctx->type) {
982 #if CONFIG_VAAPI
983 case AV_HWDEVICE_TYPE_VAAPI:
984 {
985 AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
986 s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
987 sizeof(*s->surfaces_internal));
988 if (!s->surfaces_internal)
989 return AVERROR(ENOMEM);
990 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
991 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
992 s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
993 }
994 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
995 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
996 }
997 break;
998 #endif
999 #if CONFIG_DXVA2
1000 case AV_HWDEVICE_TYPE_DXVA2:
1001 {
1002 AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
1003 s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
1004 sizeof(*s->surfaces_internal));
1005 if (!s->surfaces_internal)
1006 return AVERROR(ENOMEM);
1007 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
1008 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
1009 s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
1010 }
1011 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
1012 if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
1013 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
1014 else
1015 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
1016 }
1017 break;
1018 #endif
1019 default:
1020 return AVERROR(ENOSYS);
1021 }
1022
1023 dst_hwctx->surfaces = s->surfaces_internal;
1024
1025 return 0;
1026 }
1027
qsv_map_to(AVHWFramesContext * dst_ctx,AVFrame * dst,const AVFrame * src,int flags)1028 static int qsv_map_to(AVHWFramesContext *dst_ctx,
1029 AVFrame *dst, const AVFrame *src, int flags)
1030 {
1031 AVQSVFramesContext *hwctx = dst_ctx->hwctx;
1032 int i, err;
1033
1034 for (i = 0; i < hwctx->nb_surfaces; i++) {
1035 #if CONFIG_VAAPI
1036 if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
1037 (VASurfaceID)(uintptr_t)src->data[3])
1038 break;
1039 #endif
1040 #if CONFIG_DXVA2
1041 if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
1042 (IDirect3DSurface9*)(uintptr_t)src->data[3])
1043 break;
1044 #endif
1045 }
1046 if (i >= hwctx->nb_surfaces) {
1047 av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
1048 "is not in the mapped frames context.\n");
1049 return AVERROR(EINVAL);
1050 }
1051
1052 err = ff_hwframe_map_create(dst->hw_frames_ctx,
1053 dst, src, NULL, NULL);
1054 if (err)
1055 return err;
1056
1057 dst->width = src->width;
1058 dst->height = src->height;
1059 dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
1060
1061 return 0;
1062 }
1063
qsv_frames_get_constraints(AVHWDeviceContext * ctx,const void * hwconfig,AVHWFramesConstraints * constraints)1064 static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
1065 const void *hwconfig,
1066 AVHWFramesConstraints *constraints)
1067 {
1068 int i;
1069
1070 constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
1071 sizeof(*constraints->valid_sw_formats));
1072 if (!constraints->valid_sw_formats)
1073 return AVERROR(ENOMEM);
1074
1075 for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
1076 constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
1077 constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
1078
1079 constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
1080 if (!constraints->valid_hw_formats)
1081 return AVERROR(ENOMEM);
1082
1083 constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
1084 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
1085
1086 return 0;
1087 }
1088
qsv_device_free(AVHWDeviceContext * ctx)1089 static void qsv_device_free(AVHWDeviceContext *ctx)
1090 {
1091 AVQSVDeviceContext *hwctx = ctx->hwctx;
1092 QSVDevicePriv *priv = ctx->user_opaque;
1093
1094 if (hwctx->session)
1095 MFXClose(hwctx->session);
1096
1097 av_buffer_unref(&priv->child_device_ctx);
1098 av_freep(&priv);
1099 }
1100
choose_implementation(const char * device)1101 static mfxIMPL choose_implementation(const char *device)
1102 {
1103 static const struct {
1104 const char *name;
1105 mfxIMPL impl;
1106 } impl_map[] = {
1107 { "auto", MFX_IMPL_AUTO },
1108 { "sw", MFX_IMPL_SOFTWARE },
1109 { "hw", MFX_IMPL_HARDWARE },
1110 { "auto_any", MFX_IMPL_AUTO_ANY },
1111 { "hw_any", MFX_IMPL_HARDWARE_ANY },
1112 { "hw2", MFX_IMPL_HARDWARE2 },
1113 { "hw3", MFX_IMPL_HARDWARE3 },
1114 { "hw4", MFX_IMPL_HARDWARE4 },
1115 };
1116
1117 mfxIMPL impl = MFX_IMPL_AUTO_ANY;
1118 int i;
1119
1120 if (device) {
1121 for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
1122 if (!strcmp(device, impl_map[i].name)) {
1123 impl = impl_map[i].impl;
1124 break;
1125 }
1126 if (i == FF_ARRAY_ELEMS(impl_map))
1127 impl = strtol(device, NULL, 0);
1128 }
1129
1130 return impl;
1131 }
1132
qsv_device_derive_from_child(AVHWDeviceContext * ctx,mfxIMPL implementation,AVHWDeviceContext * child_device_ctx,int flags)1133 static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
1134 mfxIMPL implementation,
1135 AVHWDeviceContext *child_device_ctx,
1136 int flags)
1137 {
1138 AVQSVDeviceContext *hwctx = ctx->hwctx;
1139
1140 mfxVersion ver = { { 3, 1 } };
1141 mfxHDL handle;
1142 mfxHandleType handle_type;
1143 mfxStatus err;
1144 int ret;
1145
1146 switch (child_device_ctx->type) {
1147 #if CONFIG_VAAPI
1148 case AV_HWDEVICE_TYPE_VAAPI:
1149 {
1150 AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1151 handle_type = MFX_HANDLE_VA_DISPLAY;
1152 handle = (mfxHDL)child_device_hwctx->display;
1153 }
1154 break;
1155 #endif
1156 #if CONFIG_DXVA2
1157 case AV_HWDEVICE_TYPE_DXVA2:
1158 {
1159 AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1160 handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1161 handle = (mfxHDL)child_device_hwctx->devmgr;
1162 }
1163 break;
1164 #endif
1165 default:
1166 ret = AVERROR(ENOSYS);
1167 goto fail;
1168 }
1169
1170 err = MFXInit(implementation, &ver, &hwctx->session);
1171 if (err != MFX_ERR_NONE) {
1172 av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1173 "%d.\n", err);
1174 ret = AVERROR_UNKNOWN;
1175 goto fail;
1176 }
1177
1178 err = MFXQueryVersion(hwctx->session, &ver);
1179 if (err != MFX_ERR_NONE) {
1180 av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1181 ret = AVERROR_UNKNOWN;
1182 goto fail;
1183 }
1184
1185 av_log(ctx, AV_LOG_VERBOSE,
1186 "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1187 MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1188
1189 MFXClose(hwctx->session);
1190
1191 err = MFXInit(implementation, &ver, &hwctx->session);
1192 if (err != MFX_ERR_NONE) {
1193 av_log(ctx, AV_LOG_ERROR,
1194 "Error initializing an MFX session: %d.\n", err);
1195 ret = AVERROR_UNKNOWN;
1196 goto fail;
1197 }
1198
1199 err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1200 if (err != MFX_ERR_NONE) {
1201 av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1202 "%d\n", err);
1203 ret = AVERROR_UNKNOWN;
1204 goto fail;
1205 }
1206
1207 return 0;
1208
1209 fail:
1210 if (hwctx->session)
1211 MFXClose(hwctx->session);
1212 return ret;
1213 }
1214
qsv_device_derive(AVHWDeviceContext * ctx,AVHWDeviceContext * child_device_ctx,AVDictionary * opts,int flags)1215 static int qsv_device_derive(AVHWDeviceContext *ctx,
1216 AVHWDeviceContext *child_device_ctx,
1217 AVDictionary *opts, int flags)
1218 {
1219 return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
1220 child_device_ctx, flags);
1221 }
1222
qsv_device_create(AVHWDeviceContext * ctx,const char * device,AVDictionary * opts,int flags)1223 static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1224 AVDictionary *opts, int flags)
1225 {
1226 QSVDevicePriv *priv;
1227 enum AVHWDeviceType child_device_type;
1228 AVHWDeviceContext *child_device;
1229 AVDictionary *child_device_opts;
1230 AVDictionaryEntry *e;
1231
1232 mfxIMPL impl;
1233 int ret;
1234
1235 priv = av_mallocz(sizeof(*priv));
1236 if (!priv)
1237 return AVERROR(ENOMEM);
1238
1239 ctx->user_opaque = priv;
1240 ctx->free = qsv_device_free;
1241
1242 e = av_dict_get(opts, "child_device", NULL, 0);
1243
1244 child_device_opts = NULL;
1245 if (CONFIG_VAAPI) {
1246 child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1247 // libmfx does not actually implement VAAPI properly, rather it
1248 // depends on the specific behaviour of a matching iHD driver when
1249 // used on recent Intel hardware. Set options to the VAAPI device
1250 // creation so that we should pick a usable setup by default if
1251 // possible, even when multiple devices and drivers are available.
1252 av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
1253 av_dict_set(&child_device_opts, "driver", "iHD", 0);
1254 } else if (CONFIG_DXVA2)
1255 child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1256 else {
1257 av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1258 return AVERROR(ENOSYS);
1259 }
1260
1261 ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1262 e ? e->value : NULL, child_device_opts, 0);
1263
1264 av_dict_free(&child_device_opts);
1265 if (ret < 0)
1266 return ret;
1267
1268 child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
1269
1270 impl = choose_implementation(device);
1271
1272 return qsv_device_derive_from_child(ctx, impl, child_device, 0);
1273 }
1274
1275 const HWContextType ff_hwcontext_type_qsv = {
1276 .type = AV_HWDEVICE_TYPE_QSV,
1277 .name = "QSV",
1278
1279 .device_hwctx_size = sizeof(AVQSVDeviceContext),
1280 .device_priv_size = sizeof(QSVDeviceContext),
1281 .frames_hwctx_size = sizeof(AVQSVFramesContext),
1282 .frames_priv_size = sizeof(QSVFramesContext),
1283
1284 .device_create = qsv_device_create,
1285 .device_derive = qsv_device_derive,
1286 .device_init = qsv_device_init,
1287 .frames_get_constraints = qsv_frames_get_constraints,
1288 .frames_init = qsv_frames_init,
1289 .frames_uninit = qsv_frames_uninit,
1290 .frames_get_buffer = qsv_get_buffer,
1291 .transfer_get_formats = qsv_transfer_get_formats,
1292 .transfer_data_to = qsv_transfer_data_to,
1293 .transfer_data_from = qsv_transfer_data_from,
1294 .map_to = qsv_map_to,
1295 .map_from = qsv_map_from,
1296 .frames_derive_to = qsv_frames_derive_to,
1297 .frames_derive_from = qsv_frames_derive_from,
1298
1299 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1300 };
1301