1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 /**
20 * @file
21 * Intel Quick Sync Video VPP base function
22 */
23
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
27 #include "libavutil/hwcontext_qsv.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36 MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39
40 typedef struct QSVFrame {
41 AVFrame *frame;
42 mfxFrameSurface1 *surface;
43 mfxFrameSurface1 surface_internal; /* for system memory */
44 struct QSVFrame *next;
45 } QSVFrame;
46
47 /* abstract struct for all QSV filters */
48 struct QSVVPPContext {
49 mfxSession session;
50 int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
51 enum AVPixelFormat out_sw_format; /* Real output format */
52 mfxVideoParam vpp_param;
53 mfxFrameInfo *frame_infos; /* frame info for each input */
54
55 /* members related to the input/output surface */
56 int in_mem_mode;
57 int out_mem_mode;
58 QSVFrame *in_frame_list;
59 QSVFrame *out_frame_list;
60 int nb_surface_ptrs_in;
61 int nb_surface_ptrs_out;
62 mfxFrameSurface1 **surface_ptrs_in;
63 mfxFrameSurface1 **surface_ptrs_out;
64
65 /* MFXVPP extern parameters */
66 mfxExtOpaqueSurfaceAlloc opaque_alloc;
67 mfxExtBuffer **ext_buffers;
68 int nb_ext_buffers;
69 };
70
71 static const mfxHandleType handle_types[] = {
72 MFX_HANDLE_VA_DISPLAY,
73 MFX_HANDLE_D3D9_DEVICE_MANAGER,
74 MFX_HANDLE_D3D11_DEVICE,
75 };
76
77 static const AVRational default_tb = { 1, 90000 };
78
79 static const struct {
80 int mfx_iopattern;
81 const char *desc;
82 } qsv_iopatterns[] = {
83 {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
84 {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
85 {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
86 {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
87 {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
88 {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
89 };
90
ff_qsvvpp_print_iopattern(void * log_ctx,int mfx_iopattern,const char * extra_string)91 int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern,
92 const char *extra_string)
93 {
94 const char *desc = NULL;
95
96 for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
97 if (qsv_iopatterns[i].mfx_iopattern == mfx_iopattern) {
98 desc = qsv_iopatterns[i].desc;
99 }
100 }
101 if (!desc)
102 desc = "unknown iopattern";
103
104 av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
105 return 0;
106 }
107
108 static const struct {
109 mfxStatus mfxerr;
110 int averr;
111 const char *desc;
112 } qsv_errors[] = {
113 { MFX_ERR_NONE, 0, "success" },
114 { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
115 { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
116 { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
117 { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
118 { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
119 { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
120 { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
121 { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
122 { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
123 /* the following 3 errors should always be handled explicitly, so those "mappings"
124 * are for completeness only */
125 { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
126 { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
127 { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
128 { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
129 { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
130 { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
131 { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
132 { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
133 { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
134 { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
135 { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
136
137 { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
138 { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
139 { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
140 { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
141 { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
142 { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
143 { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
144 { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
145 { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
146 };
147
qsv_map_error(mfxStatus mfx_err,const char ** desc)148 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
149 {
150 int i;
151 for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
152 if (qsv_errors[i].mfxerr == mfx_err) {
153 if (desc)
154 *desc = qsv_errors[i].desc;
155 return qsv_errors[i].averr;
156 }
157 }
158 if (desc)
159 *desc = "unknown error";
160 return AVERROR_UNKNOWN;
161 }
162
ff_qsvvpp_print_error(void * log_ctx,mfxStatus err,const char * error_string)163 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
164 const char *error_string)
165 {
166 const char *desc;
167 int ret;
168 ret = qsv_map_error(err, &desc);
169 av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
170 return ret;
171 }
172
ff_qsvvpp_print_warning(void * log_ctx,mfxStatus err,const char * warning_string)173 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
174 const char *warning_string)
175 {
176 const char *desc;
177 int ret;
178 ret = qsv_map_error(err, &desc);
179 av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
180 return ret;
181 }
182
183 /* functions for frameAlloc */
frame_alloc(mfxHDL pthis,mfxFrameAllocRequest * req,mfxFrameAllocResponse * resp)184 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
185 mfxFrameAllocResponse *resp)
186 {
187 QSVVPPContext *s = pthis;
188 int i;
189
190 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
191 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
192 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
193 return MFX_ERR_UNSUPPORTED;
194
195 if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
196 resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
197 if (!resp->mids)
198 return AVERROR(ENOMEM);
199
200 for (i = 0; i < s->nb_surface_ptrs_in; i++)
201 resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
202
203 resp->NumFrameActual = s->nb_surface_ptrs_in;
204 } else {
205 resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
206 if (!resp->mids)
207 return AVERROR(ENOMEM);
208
209 for (i = 0; i < s->nb_surface_ptrs_out; i++)
210 resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
211
212 resp->NumFrameActual = s->nb_surface_ptrs_out;
213 }
214
215 return MFX_ERR_NONE;
216 }
217
frame_free(mfxHDL pthis,mfxFrameAllocResponse * resp)218 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
219 {
220 av_freep(&resp->mids);
221 return MFX_ERR_NONE;
222 }
223
frame_lock(mfxHDL pthis,mfxMemId mid,mfxFrameData * ptr)224 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
225 {
226 return MFX_ERR_UNSUPPORTED;
227 }
228
frame_unlock(mfxHDL pthis,mfxMemId mid,mfxFrameData * ptr)229 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
230 {
231 return MFX_ERR_UNSUPPORTED;
232 }
233
frame_get_hdl(mfxHDL pthis,mfxMemId mid,mfxHDL * hdl)234 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
235 {
236 *hdl = mid;
237 return MFX_ERR_NONE;
238 }
239
pix_fmt_to_mfx_fourcc(int format)240 static int pix_fmt_to_mfx_fourcc(int format)
241 {
242 switch (format) {
243 case AV_PIX_FMT_YUV420P:
244 return MFX_FOURCC_YV12;
245 case AV_PIX_FMT_NV12:
246 return MFX_FOURCC_NV12;
247 case AV_PIX_FMT_YUYV422:
248 return MFX_FOURCC_YUY2;
249 case AV_PIX_FMT_BGRA:
250 return MFX_FOURCC_RGB4;
251 }
252
253 return MFX_FOURCC_NV12;
254 }
255
map_frame_to_surface(AVFrame * frame,mfxFrameSurface1 * surface)256 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
257 {
258 switch (frame->format) {
259 case AV_PIX_FMT_NV12:
260 case AV_PIX_FMT_P010:
261 surface->Data.Y = frame->data[0];
262 surface->Data.UV = frame->data[1];
263 break;
264 case AV_PIX_FMT_YUV420P:
265 surface->Data.Y = frame->data[0];
266 surface->Data.U = frame->data[1];
267 surface->Data.V = frame->data[2];
268 break;
269 case AV_PIX_FMT_YUYV422:
270 surface->Data.Y = frame->data[0];
271 surface->Data.U = frame->data[0] + 1;
272 surface->Data.V = frame->data[0] + 3;
273 break;
274 case AV_PIX_FMT_RGB32:
275 surface->Data.B = frame->data[0];
276 surface->Data.G = frame->data[0] + 1;
277 surface->Data.R = frame->data[0] + 2;
278 surface->Data.A = frame->data[0] + 3;
279 break;
280 default:
281 return MFX_ERR_UNSUPPORTED;
282 }
283 surface->Data.Pitch = frame->linesize[0];
284
285 return 0;
286 }
287
288 /* fill the surface info */
fill_frameinfo_by_link(mfxFrameInfo * frameinfo,AVFilterLink * link)289 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
290 {
291 enum AVPixelFormat pix_fmt;
292 AVHWFramesContext *frames_ctx;
293 AVQSVFramesContext *frames_hwctx;
294 const AVPixFmtDescriptor *desc;
295
296 if (link->format == AV_PIX_FMT_QSV) {
297 if (!link->hw_frames_ctx)
298 return AVERROR(EINVAL);
299
300 frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
301 frames_hwctx = frames_ctx->hwctx;
302 *frameinfo = frames_hwctx->surfaces[0].Info;
303 } else {
304 pix_fmt = link->format;
305 desc = av_pix_fmt_desc_get(pix_fmt);
306 if (!desc)
307 return AVERROR_BUG;
308
309 frameinfo->CropX = 0;
310 frameinfo->CropY = 0;
311 frameinfo->Width = FFALIGN(link->w, 32);
312 frameinfo->Height = FFALIGN(link->h, 32);
313 frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
314 frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
315 frameinfo->BitDepthLuma = desc->comp[0].depth;
316 frameinfo->BitDepthChroma = desc->comp[0].depth;
317 frameinfo->Shift = desc->comp[0].depth > 8;
318 if (desc->log2_chroma_w && desc->log2_chroma_h)
319 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
320 else if (desc->log2_chroma_w)
321 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
322 else
323 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
324 }
325
326 frameinfo->CropW = link->w;
327 frameinfo->CropH = link->h;
328 frameinfo->FrameRateExtN = link->frame_rate.num;
329 frameinfo->FrameRateExtD = link->frame_rate.den;
330 frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
331 frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
332
333 return 0;
334 }
335
clear_unused_frames(QSVFrame * list)336 static void clear_unused_frames(QSVFrame *list)
337 {
338 while (list) {
339 if (list->surface && !list->surface->Data.Locked) {
340 list->surface = NULL;
341 av_frame_free(&list->frame);
342 }
343 list = list->next;
344 }
345 }
346
clear_frame_list(QSVFrame ** list)347 static void clear_frame_list(QSVFrame **list)
348 {
349 while (*list) {
350 QSVFrame *frame;
351
352 frame = *list;
353 *list = (*list)->next;
354 av_frame_free(&frame->frame);
355 av_freep(&frame);
356 }
357 }
358
get_free_frame(QSVFrame ** list)359 static QSVFrame *get_free_frame(QSVFrame **list)
360 {
361 QSVFrame *out = *list;
362
363 for (; out; out = out->next) {
364 if (!out->surface)
365 break;
366 }
367
368 if (!out) {
369 out = av_mallocz(sizeof(*out));
370 if (!out) {
371 av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
372 return NULL;
373 }
374 out->next = *list;
375 *list = out;
376 }
377
378 return out;
379 }
380
381 /* get the input surface */
submit_frame(QSVVPPContext * s,AVFilterLink * inlink,AVFrame * picref)382 static QSVFrame *submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
383 {
384 QSVFrame *qsv_frame;
385 AVFilterContext *ctx = inlink->dst;
386
387 clear_unused_frames(s->in_frame_list);
388
389 qsv_frame = get_free_frame(&s->in_frame_list);
390 if (!qsv_frame)
391 return NULL;
392
393 /* Turn AVFrame into mfxFrameSurface1.
394 * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
395 * mfxFrameSurface1 is stored in AVFrame->data[3];
396 * for system memory mode, raw video data is stored in
397 * AVFrame, we should map it into mfxFrameSurface1.
398 */
399 if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
400 if (picref->format != AV_PIX_FMT_QSV) {
401 av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
402 return NULL;
403 }
404 qsv_frame->frame = av_frame_clone(picref);
405 qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
406 } else {
407 /* make a copy if the input is not padded as libmfx requires */
408 if (picref->height & 31 || picref->linesize[0] & 31) {
409 qsv_frame->frame = ff_get_video_buffer(inlink,
410 FFALIGN(inlink->w, 32),
411 FFALIGN(inlink->h, 32));
412 if (!qsv_frame->frame)
413 return NULL;
414
415 qsv_frame->frame->width = picref->width;
416 qsv_frame->frame->height = picref->height;
417
418 if (av_frame_copy(qsv_frame->frame, picref) < 0) {
419 av_frame_free(&qsv_frame->frame);
420 return NULL;
421 }
422
423 av_frame_copy_props(qsv_frame->frame, picref);
424 } else
425 qsv_frame->frame = av_frame_clone(picref);
426
427 if (map_frame_to_surface(qsv_frame->frame,
428 &qsv_frame->surface_internal) < 0) {
429 av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
430 return NULL;
431 }
432 qsv_frame->surface = &qsv_frame->surface_internal;
433 }
434
435 qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
436 qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
437 inlink->time_base, default_tb);
438
439 qsv_frame->surface->Info.PicStruct =
440 !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
441 (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
442 MFX_PICSTRUCT_FIELD_BFF);
443 if (qsv_frame->frame->repeat_pict == 1)
444 qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
445 else if (qsv_frame->frame->repeat_pict == 2)
446 qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
447 else if (qsv_frame->frame->repeat_pict == 4)
448 qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
449
450 return qsv_frame;
451 }
452
453 /* get the output surface */
query_frame(QSVVPPContext * s,AVFilterLink * outlink)454 static QSVFrame *query_frame(QSVVPPContext *s, AVFilterLink *outlink)
455 {
456 AVFilterContext *ctx = outlink->src;
457 QSVFrame *out_frame;
458 int ret;
459
460 clear_unused_frames(s->out_frame_list);
461
462 out_frame = get_free_frame(&s->out_frame_list);
463 if (!out_frame)
464 return NULL;
465
466 /* For video memory, get a hw frame;
467 * For system memory, get a sw frame and map it into a mfx_surface. */
468 if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
469 out_frame->frame = av_frame_alloc();
470 if (!out_frame->frame)
471 return NULL;
472
473 ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
474 if (ret < 0) {
475 av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
476 return NULL;
477 }
478
479 out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
480 } else {
481 /* Get a frame with aligned dimensions.
482 * Libmfx need system memory being 128x64 aligned */
483 out_frame->frame = ff_get_video_buffer(outlink,
484 FFALIGN(outlink->w, 128),
485 FFALIGN(outlink->h, 64));
486 if (!out_frame->frame)
487 return NULL;
488
489 out_frame->frame->width = outlink->w;
490 out_frame->frame->height = outlink->h;
491
492 ret = map_frame_to_surface(out_frame->frame,
493 &out_frame->surface_internal);
494 if (ret < 0)
495 return NULL;
496
497 out_frame->surface = &out_frame->surface_internal;
498 }
499
500 out_frame->surface->Info = s->vpp_param.vpp.Out;
501
502 return out_frame;
503 }
504
505 /* create the QSV session */
init_vpp_session(AVFilterContext * avctx,QSVVPPContext * s)506 static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
507 {
508 AVFilterLink *inlink = avctx->inputs[0];
509 AVFilterLink *outlink = avctx->outputs[0];
510 AVQSVFramesContext *in_frames_hwctx = NULL;
511 AVQSVFramesContext *out_frames_hwctx = NULL;
512
513 AVBufferRef *device_ref;
514 AVHWDeviceContext *device_ctx;
515 AVQSVDeviceContext *device_hwctx;
516 mfxHDL handle;
517 mfxHandleType handle_type;
518 mfxVersion ver;
519 mfxIMPL impl;
520 int ret, i;
521
522 if (inlink->hw_frames_ctx) {
523 AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
524
525 device_ref = frames_ctx->device_ref;
526 in_frames_hwctx = frames_ctx->hwctx;
527
528 s->in_mem_mode = in_frames_hwctx->frame_type;
529
530 s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
531 sizeof(*s->surface_ptrs_in));
532 if (!s->surface_ptrs_in)
533 return AVERROR(ENOMEM);
534
535 for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
536 s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
537
538 s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
539 } else if (avctx->hw_device_ctx) {
540 device_ref = avctx->hw_device_ctx;
541 s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
542 } else {
543 av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
544 return AVERROR(EINVAL);
545 }
546
547 device_ctx = (AVHWDeviceContext *)device_ref->data;
548 device_hwctx = device_ctx->hwctx;
549
550 if (outlink->format == AV_PIX_FMT_QSV) {
551 AVHWFramesContext *out_frames_ctx;
552 AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
553 if (!out_frames_ref)
554 return AVERROR(ENOMEM);
555
556 s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
557 MFX_MEMTYPE_OPAQUE_FRAME :
558 MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
559
560 out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
561 out_frames_hwctx = out_frames_ctx->hwctx;
562
563 out_frames_ctx->format = AV_PIX_FMT_QSV;
564 out_frames_ctx->width = FFALIGN(outlink->w, 32);
565 out_frames_ctx->height = FFALIGN(outlink->h, 32);
566 out_frames_ctx->sw_format = s->out_sw_format;
567 out_frames_ctx->initial_pool_size = 64;
568 if (avctx->extra_hw_frames > 0)
569 out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
570 out_frames_hwctx->frame_type = s->out_mem_mode;
571
572 ret = av_hwframe_ctx_init(out_frames_ref);
573 if (ret < 0) {
574 av_buffer_unref(&out_frames_ref);
575 av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
576 return ret;
577 }
578
579 s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
580 sizeof(*s->surface_ptrs_out));
581 if (!s->surface_ptrs_out) {
582 av_buffer_unref(&out_frames_ref);
583 return AVERROR(ENOMEM);
584 }
585
586 for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
587 s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
588 s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
589
590 av_buffer_unref(&outlink->hw_frames_ctx);
591 outlink->hw_frames_ctx = out_frames_ref;
592 } else
593 s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
594
595 /* extract the properties of the "master" session given to us */
596 ret = MFXQueryIMPL(device_hwctx->session, &impl);
597 if (ret == MFX_ERR_NONE)
598 ret = MFXQueryVersion(device_hwctx->session, &ver);
599 if (ret != MFX_ERR_NONE) {
600 av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
601 return AVERROR_UNKNOWN;
602 }
603
604 for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
605 ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
606 if (ret == MFX_ERR_NONE) {
607 handle_type = handle_types[i];
608 break;
609 }
610 }
611
612 if (ret < 0)
613 return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
614 else if (ret > 0) {
615 ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
616 return AVERROR_UNKNOWN;
617 }
618
619 /* create a "slave" session with those same properties, to be used for vpp */
620 ret = MFXInit(impl, &ver, &s->session);
621 if (ret < 0)
622 return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
623 else if (ret > 0) {
624 ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
625 return AVERROR_UNKNOWN;
626 }
627
628 if (handle) {
629 ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
630 if (ret != MFX_ERR_NONE)
631 return AVERROR_UNKNOWN;
632 }
633
634 if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
635 ret = MFXJoinSession(device_hwctx->session, s->session);
636 if (ret != MFX_ERR_NONE)
637 return AVERROR_UNKNOWN;
638 }
639
640 if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
641 s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
642 s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
643 s->opaque_alloc.In.Type = s->in_mem_mode;
644
645 s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
646 s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
647 s->opaque_alloc.Out.Type = s->out_mem_mode;
648
649 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
650 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
651 } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
652 mfxFrameAllocator frame_allocator = {
653 .pthis = s,
654 .Alloc = frame_alloc,
655 .Lock = frame_lock,
656 .Unlock = frame_unlock,
657 .GetHDL = frame_get_hdl,
658 .Free = frame_free,
659 };
660
661 ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
662 if (ret != MFX_ERR_NONE)
663 return AVERROR_UNKNOWN;
664 }
665
666 return 0;
667 }
668
ff_qsvvpp_create(AVFilterContext * avctx,QSVVPPContext ** vpp,QSVVPPParam * param)669 int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
670 {
671 int i;
672 int ret;
673 QSVVPPContext *s;
674
675 s = av_mallocz(sizeof(*s));
676 if (!s)
677 return AVERROR(ENOMEM);
678
679 s->filter_frame = param->filter_frame;
680 if (!s->filter_frame)
681 s->filter_frame = ff_filter_frame;
682 s->out_sw_format = param->out_sw_format;
683
684 /* create the vpp session */
685 ret = init_vpp_session(avctx, s);
686 if (ret < 0)
687 goto failed;
688
689 s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
690 if (!s->frame_infos) {
691 ret = AVERROR(ENOMEM);
692 goto failed;
693 }
694
695 /* Init each input's information */
696 for (i = 0; i < avctx->nb_inputs; i++) {
697 ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
698 if (ret < 0)
699 goto failed;
700 }
701
702 /* Update input's frame info according to crop */
703 for (i = 0; i < param->num_crop; i++) {
704 QSVVPPCrop *crop = param->crop + i;
705 if (crop->in_idx > avctx->nb_inputs) {
706 ret = AVERROR(EINVAL);
707 goto failed;
708 }
709 s->frame_infos[crop->in_idx].CropX = crop->x;
710 s->frame_infos[crop->in_idx].CropY = crop->y;
711 s->frame_infos[crop->in_idx].CropW = crop->w;
712 s->frame_infos[crop->in_idx].CropH = crop->h;
713 }
714
715 s->vpp_param.vpp.In = s->frame_infos[0];
716
717 ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
718 if (ret < 0) {
719 av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
720 goto failed;
721 }
722
723 if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
724 s->nb_ext_buffers = param->num_ext_buf + 1;
725 s->ext_buffers = av_mallocz_array(s->nb_ext_buffers, sizeof(*s->ext_buffers));
726 if (!s->ext_buffers) {
727 ret = AVERROR(ENOMEM);
728 goto failed;
729 }
730
731 s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
732 for (i = 1; i < param->num_ext_buf; i++)
733 s->ext_buffers[i] = param->ext_buf[i - 1];
734 s->vpp_param.ExtParam = s->ext_buffers;
735 s->vpp_param.NumExtParam = s->nb_ext_buffers;
736 } else {
737 s->vpp_param.NumExtParam = param->num_ext_buf;
738 s->vpp_param.ExtParam = param->ext_buf;
739 }
740
741 s->vpp_param.AsyncDepth = 1;
742
743 if (IS_SYSTEM_MEMORY(s->in_mem_mode))
744 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
745 else if (IS_VIDEO_MEMORY(s->in_mem_mode))
746 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
747 else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
748 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
749
750 if (IS_SYSTEM_MEMORY(s->out_mem_mode))
751 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
752 else if (IS_VIDEO_MEMORY(s->out_mem_mode))
753 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
754 else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
755 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
756
757 /* Print input memory mode */
758 ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
759 /* Print output memory mode */
760 ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
761 ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
762 if (ret < 0) {
763 ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
764 goto failed;
765 } else if (ret > 0)
766 ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
767
768 *vpp = s;
769 return 0;
770
771 failed:
772 ff_qsvvpp_free(&s);
773
774 return ret;
775 }
776
ff_qsvvpp_free(QSVVPPContext ** vpp)777 int ff_qsvvpp_free(QSVVPPContext **vpp)
778 {
779 QSVVPPContext *s = *vpp;
780
781 if (!s)
782 return 0;
783
784 if (s->session) {
785 MFXVideoVPP_Close(s->session);
786 MFXClose(s->session);
787 }
788
789 /* release all the resources */
790 clear_frame_list(&s->in_frame_list);
791 clear_frame_list(&s->out_frame_list);
792 av_freep(&s->surface_ptrs_in);
793 av_freep(&s->surface_ptrs_out);
794 av_freep(&s->ext_buffers);
795 av_freep(&s->frame_infos);
796 av_freep(vpp);
797
798 return 0;
799 }
800
ff_qsvvpp_filter_frame(QSVVPPContext * s,AVFilterLink * inlink,AVFrame * picref)801 int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
802 {
803 AVFilterContext *ctx = inlink->dst;
804 AVFilterLink *outlink = ctx->outputs[0];
805 mfxSyncPoint sync;
806 QSVFrame *in_frame, *out_frame;
807 int ret, filter_ret;
808
809 in_frame = submit_frame(s, inlink, picref);
810 if (!in_frame) {
811 av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
812 FF_INLINK_IDX(inlink));
813 return AVERROR(ENOMEM);
814 }
815
816 do {
817 out_frame = query_frame(s, outlink);
818 if (!out_frame) {
819 av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
820 return AVERROR(ENOMEM);
821 }
822
823 do {
824 ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
825 out_frame->surface, NULL, &sync);
826 if (ret == MFX_WRN_DEVICE_BUSY)
827 av_usleep(500);
828 } while (ret == MFX_WRN_DEVICE_BUSY);
829
830 if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
831 /* Ignore more_data error */
832 if (ret == MFX_ERR_MORE_DATA)
833 ret = AVERROR(EAGAIN);
834 break;
835 }
836
837 if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
838 av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
839
840 out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
841 default_tb, outlink->time_base);
842
843 filter_ret = s->filter_frame(outlink, out_frame->frame);
844 if (filter_ret < 0) {
845 av_frame_free(&out_frame->frame);
846 ret = filter_ret;
847 break;
848 }
849 out_frame->frame = NULL;
850 } while(ret == MFX_ERR_MORE_SURFACE);
851
852 return ret;
853 }
854