1 /*
2 * This file is part of FFmpeg.
3 *
4 * FFmpeg is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * FFmpeg is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with FFmpeg; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 /**
20 * @file
21 * Intel Quick Sync Video VPP base function
22 */
23
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
27 #include "libavutil/hwcontext_qsv.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36 MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39 #define MFX_IMPL_VIA_MASK(impl) (0x0f00 & (impl))
40
41 static const AVRational default_tb = { 1, 90000 };
42
43 typedef struct QSVAsyncFrame {
44 mfxSyncPoint sync;
45 QSVFrame *frame;
46 } QSVAsyncFrame;
47
48 static const struct {
49 int mfx_iopattern;
50 const char *desc;
51 } qsv_iopatterns[] = {
52 {MFX_IOPATTERN_IN_VIDEO_MEMORY, "input is video memory surface" },
53 {MFX_IOPATTERN_IN_SYSTEM_MEMORY, "input is system memory surface" },
54 {MFX_IOPATTERN_IN_OPAQUE_MEMORY, "input is opaque memory surface" },
55 {MFX_IOPATTERN_OUT_VIDEO_MEMORY, "output is video memory surface" },
56 {MFX_IOPATTERN_OUT_SYSTEM_MEMORY, "output is system memory surface" },
57 {MFX_IOPATTERN_OUT_OPAQUE_MEMORY, "output is opaque memory surface" },
58 };
59
ff_qsvvpp_print_iopattern(void * log_ctx,int mfx_iopattern,const char * extra_string)60 int ff_qsvvpp_print_iopattern(void *log_ctx, int mfx_iopattern,
61 const char *extra_string)
62 {
63 const char *desc = NULL;
64
65 for (int i = 0; i < FF_ARRAY_ELEMS(qsv_iopatterns); i++) {
66 if (qsv_iopatterns[i].mfx_iopattern == mfx_iopattern) {
67 desc = qsv_iopatterns[i].desc;
68 }
69 }
70 if (!desc)
71 desc = "unknown iopattern";
72
73 av_log(log_ctx, AV_LOG_VERBOSE, "%s: %s\n", extra_string, desc);
74 return 0;
75 }
76
77 static const struct {
78 mfxStatus mfxerr;
79 int averr;
80 const char *desc;
81 } qsv_errors[] = {
82 { MFX_ERR_NONE, 0, "success" },
83 { MFX_ERR_UNKNOWN, AVERROR_UNKNOWN, "unknown error" },
84 { MFX_ERR_NULL_PTR, AVERROR(EINVAL), "NULL pointer" },
85 { MFX_ERR_UNSUPPORTED, AVERROR(ENOSYS), "unsupported" },
86 { MFX_ERR_MEMORY_ALLOC, AVERROR(ENOMEM), "failed to allocate memory" },
87 { MFX_ERR_NOT_ENOUGH_BUFFER, AVERROR(ENOMEM), "insufficient input/output buffer" },
88 { MFX_ERR_INVALID_HANDLE, AVERROR(EINVAL), "invalid handle" },
89 { MFX_ERR_LOCK_MEMORY, AVERROR(EIO), "failed to lock the memory block" },
90 { MFX_ERR_NOT_INITIALIZED, AVERROR_BUG, "not initialized" },
91 { MFX_ERR_NOT_FOUND, AVERROR(ENOSYS), "specified object was not found" },
92 /* the following 3 errors should always be handled explicitly, so those "mappings"
93 * are for completeness only */
94 { MFX_ERR_MORE_DATA, AVERROR_UNKNOWN, "expect more data at input" },
95 { MFX_ERR_MORE_SURFACE, AVERROR_UNKNOWN, "expect more surface at output" },
96 { MFX_ERR_MORE_BITSTREAM, AVERROR_UNKNOWN, "expect more bitstream at output" },
97 { MFX_ERR_ABORTED, AVERROR_UNKNOWN, "operation aborted" },
98 { MFX_ERR_DEVICE_LOST, AVERROR(EIO), "device lost" },
99 { MFX_ERR_INCOMPATIBLE_VIDEO_PARAM, AVERROR(EINVAL), "incompatible video parameters" },
100 { MFX_ERR_INVALID_VIDEO_PARAM, AVERROR(EINVAL), "invalid video parameters" },
101 { MFX_ERR_UNDEFINED_BEHAVIOR, AVERROR_BUG, "undefined behavior" },
102 { MFX_ERR_DEVICE_FAILED, AVERROR(EIO), "device failed" },
103 { MFX_ERR_INCOMPATIBLE_AUDIO_PARAM, AVERROR(EINVAL), "incompatible audio parameters" },
104 { MFX_ERR_INVALID_AUDIO_PARAM, AVERROR(EINVAL), "invalid audio parameters" },
105
106 { MFX_WRN_IN_EXECUTION, 0, "operation in execution" },
107 { MFX_WRN_DEVICE_BUSY, 0, "device busy" },
108 { MFX_WRN_VIDEO_PARAM_CHANGED, 0, "video parameters changed" },
109 { MFX_WRN_PARTIAL_ACCELERATION, 0, "partial acceleration" },
110 { MFX_WRN_INCOMPATIBLE_VIDEO_PARAM, 0, "incompatible video parameters" },
111 { MFX_WRN_VALUE_NOT_CHANGED, 0, "value is saturated" },
112 { MFX_WRN_OUT_OF_RANGE, 0, "value out of range" },
113 { MFX_WRN_FILTER_SKIPPED, 0, "filter skipped" },
114 { MFX_WRN_INCOMPATIBLE_AUDIO_PARAM, 0, "incompatible audio parameters" },
115 };
116
qsv_map_error(mfxStatus mfx_err,const char ** desc)117 static int qsv_map_error(mfxStatus mfx_err, const char **desc)
118 {
119 int i;
120 for (i = 0; i < FF_ARRAY_ELEMS(qsv_errors); i++) {
121 if (qsv_errors[i].mfxerr == mfx_err) {
122 if (desc)
123 *desc = qsv_errors[i].desc;
124 return qsv_errors[i].averr;
125 }
126 }
127 if (desc)
128 *desc = "unknown error";
129 return AVERROR_UNKNOWN;
130 }
131
ff_qsvvpp_print_error(void * log_ctx,mfxStatus err,const char * error_string)132 int ff_qsvvpp_print_error(void *log_ctx, mfxStatus err,
133 const char *error_string)
134 {
135 const char *desc;
136 int ret;
137 ret = qsv_map_error(err, &desc);
138 av_log(log_ctx, AV_LOG_ERROR, "%s: %s (%d)\n", error_string, desc, err);
139 return ret;
140 }
141
ff_qsvvpp_print_warning(void * log_ctx,mfxStatus err,const char * warning_string)142 int ff_qsvvpp_print_warning(void *log_ctx, mfxStatus err,
143 const char *warning_string)
144 {
145 const char *desc;
146 int ret;
147 ret = qsv_map_error(err, &desc);
148 av_log(log_ctx, AV_LOG_WARNING, "%s: %s (%d)\n", warning_string, desc, err);
149 return ret;
150 }
151
152 /* functions for frameAlloc */
frame_alloc(mfxHDL pthis,mfxFrameAllocRequest * req,mfxFrameAllocResponse * resp)153 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
154 mfxFrameAllocResponse *resp)
155 {
156 QSVVPPContext *s = pthis;
157 int i;
158
159 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
160 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
161 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
162 return MFX_ERR_UNSUPPORTED;
163
164 if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
165 resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
166 if (!resp->mids)
167 return AVERROR(ENOMEM);
168
169 for (i = 0; i < s->nb_surface_ptrs_in; i++)
170 resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
171
172 resp->NumFrameActual = s->nb_surface_ptrs_in;
173 } else {
174 resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
175 if (!resp->mids)
176 return AVERROR(ENOMEM);
177
178 for (i = 0; i < s->nb_surface_ptrs_out; i++)
179 resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
180
181 resp->NumFrameActual = s->nb_surface_ptrs_out;
182 }
183
184 return MFX_ERR_NONE;
185 }
186
frame_free(mfxHDL pthis,mfxFrameAllocResponse * resp)187 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
188 {
189 av_freep(&resp->mids);
190 return MFX_ERR_NONE;
191 }
192
frame_lock(mfxHDL pthis,mfxMemId mid,mfxFrameData * ptr)193 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
194 {
195 return MFX_ERR_UNSUPPORTED;
196 }
197
frame_unlock(mfxHDL pthis,mfxMemId mid,mfxFrameData * ptr)198 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
199 {
200 return MFX_ERR_UNSUPPORTED;
201 }
202
frame_get_hdl(mfxHDL pthis,mfxMemId mid,mfxHDL * hdl)203 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
204 {
205 mfxHDLPair *pair_dst = (mfxHDLPair*)hdl;
206 mfxHDLPair *pair_src = (mfxHDLPair*)mid;
207
208 pair_dst->first = pair_src->first;
209
210 if (pair_src->second != (mfxMemId)MFX_INFINITE)
211 pair_dst->second = pair_src->second;
212 return MFX_ERR_NONE;
213 }
214
pix_fmt_to_mfx_fourcc(int format)215 static int pix_fmt_to_mfx_fourcc(int format)
216 {
217 switch (format) {
218 case AV_PIX_FMT_YUV420P:
219 return MFX_FOURCC_YV12;
220 case AV_PIX_FMT_NV12:
221 return MFX_FOURCC_NV12;
222 case AV_PIX_FMT_YUYV422:
223 return MFX_FOURCC_YUY2;
224 case AV_PIX_FMT_BGRA:
225 return MFX_FOURCC_RGB4;
226 }
227
228 return MFX_FOURCC_NV12;
229 }
230
map_frame_to_surface(AVFrame * frame,mfxFrameSurface1 * surface)231 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
232 {
233 switch (frame->format) {
234 case AV_PIX_FMT_NV12:
235 case AV_PIX_FMT_P010:
236 surface->Data.Y = frame->data[0];
237 surface->Data.UV = frame->data[1];
238 break;
239 case AV_PIX_FMT_YUV420P:
240 surface->Data.Y = frame->data[0];
241 surface->Data.U = frame->data[1];
242 surface->Data.V = frame->data[2];
243 break;
244 case AV_PIX_FMT_YUYV422:
245 surface->Data.Y = frame->data[0];
246 surface->Data.U = frame->data[0] + 1;
247 surface->Data.V = frame->data[0] + 3;
248 break;
249 case AV_PIX_FMT_RGB32:
250 surface->Data.B = frame->data[0];
251 surface->Data.G = frame->data[0] + 1;
252 surface->Data.R = frame->data[0] + 2;
253 surface->Data.A = frame->data[0] + 3;
254 break;
255 default:
256 return MFX_ERR_UNSUPPORTED;
257 }
258 surface->Data.Pitch = frame->linesize[0];
259
260 return 0;
261 }
262
263 /* fill the surface info */
fill_frameinfo_by_link(mfxFrameInfo * frameinfo,AVFilterLink * link)264 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
265 {
266 enum AVPixelFormat pix_fmt;
267 AVHWFramesContext *frames_ctx;
268 AVQSVFramesContext *frames_hwctx;
269 const AVPixFmtDescriptor *desc;
270
271 if (link->format == AV_PIX_FMT_QSV) {
272 if (!link->hw_frames_ctx)
273 return AVERROR(EINVAL);
274
275 frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
276 frames_hwctx = frames_ctx->hwctx;
277 *frameinfo = frames_hwctx->surfaces[0].Info;
278 } else {
279 pix_fmt = link->format;
280 desc = av_pix_fmt_desc_get(pix_fmt);
281 if (!desc)
282 return AVERROR_BUG;
283
284 frameinfo->CropX = 0;
285 frameinfo->CropY = 0;
286 frameinfo->Width = FFALIGN(link->w, 32);
287 frameinfo->Height = FFALIGN(link->h, 32);
288 frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
289 frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
290 frameinfo->BitDepthLuma = desc->comp[0].depth;
291 frameinfo->BitDepthChroma = desc->comp[0].depth;
292 frameinfo->Shift = desc->comp[0].depth > 8;
293 if (desc->log2_chroma_w && desc->log2_chroma_h)
294 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
295 else if (desc->log2_chroma_w)
296 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
297 else
298 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
299 }
300
301 frameinfo->CropW = link->w;
302 frameinfo->CropH = link->h;
303 frameinfo->FrameRateExtN = link->frame_rate.num;
304 frameinfo->FrameRateExtD = link->frame_rate.den;
305 frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
306 frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
307
308 return 0;
309 }
310
clear_unused_frames(QSVFrame * list)311 static void clear_unused_frames(QSVFrame *list)
312 {
313 while (list) {
314 /* list->queued==1 means the frame is not cached in VPP
315 * process any more, it can be released to pool. */
316 if ((list->queued == 1) && !list->surface.Data.Locked) {
317 av_frame_free(&list->frame);
318 list->queued = 0;
319 }
320 list = list->next;
321 }
322 }
323
clear_frame_list(QSVFrame ** list)324 static void clear_frame_list(QSVFrame **list)
325 {
326 while (*list) {
327 QSVFrame *frame;
328
329 frame = *list;
330 *list = (*list)->next;
331 av_frame_free(&frame->frame);
332 av_freep(&frame);
333 }
334 }
335
get_free_frame(QSVFrame ** list)336 static QSVFrame *get_free_frame(QSVFrame **list)
337 {
338 QSVFrame *out = *list;
339
340 for (; out; out = out->next) {
341 if (!out->queued) {
342 out->queued = 1;
343 break;
344 }
345 }
346
347 if (!out) {
348 out = av_mallocz(sizeof(*out));
349 if (!out) {
350 av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
351 return NULL;
352 }
353 out->queued = 1;
354 out->next = *list;
355 *list = out;
356 }
357
358 return out;
359 }
360
361 /* get the input surface */
submit_frame(QSVVPPContext * s,AVFilterLink * inlink,AVFrame * picref)362 static QSVFrame *submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
363 {
364 QSVFrame *qsv_frame;
365 AVFilterContext *ctx = inlink->dst;
366
367 clear_unused_frames(s->in_frame_list);
368
369 qsv_frame = get_free_frame(&s->in_frame_list);
370 if (!qsv_frame)
371 return NULL;
372
373 /* Turn AVFrame into mfxFrameSurface1.
374 * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
375 * mfxFrameSurface1 is stored in AVFrame->data[3];
376 * for system memory mode, raw video data is stored in
377 * AVFrame, we should map it into mfxFrameSurface1.
378 */
379 if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
380 if (picref->format != AV_PIX_FMT_QSV) {
381 av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
382 return NULL;
383 }
384 qsv_frame->frame = av_frame_clone(picref);
385 qsv_frame->surface = *(mfxFrameSurface1 *)qsv_frame->frame->data[3];
386 } else {
387 /* make a copy if the input is not padded as libmfx requires */
388 if (picref->height & 31 || picref->linesize[0] & 31) {
389 qsv_frame->frame = ff_get_video_buffer(inlink,
390 FFALIGN(inlink->w, 32),
391 FFALIGN(inlink->h, 32));
392 if (!qsv_frame->frame)
393 return NULL;
394
395 qsv_frame->frame->width = picref->width;
396 qsv_frame->frame->height = picref->height;
397
398 if (av_frame_copy(qsv_frame->frame, picref) < 0) {
399 av_frame_free(&qsv_frame->frame);
400 return NULL;
401 }
402
403 av_frame_copy_props(qsv_frame->frame, picref);
404 } else
405 qsv_frame->frame = av_frame_clone(picref);
406
407 if (map_frame_to_surface(qsv_frame->frame,
408 &qsv_frame->surface) < 0) {
409 av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
410 return NULL;
411 }
412 }
413
414 qsv_frame->surface.Info = s->frame_infos[FF_INLINK_IDX(inlink)];
415 qsv_frame->surface.Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
416 inlink->time_base, default_tb);
417
418 qsv_frame->surface.Info.PicStruct =
419 !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
420 (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
421 MFX_PICSTRUCT_FIELD_BFF);
422 if (qsv_frame->frame->repeat_pict == 1)
423 qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
424 else if (qsv_frame->frame->repeat_pict == 2)
425 qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
426 else if (qsv_frame->frame->repeat_pict == 4)
427 qsv_frame->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
428
429 return qsv_frame;
430 }
431
432 /* get the output surface */
query_frame(QSVVPPContext * s,AVFilterLink * outlink)433 static QSVFrame *query_frame(QSVVPPContext *s, AVFilterLink *outlink)
434 {
435 AVFilterContext *ctx = outlink->src;
436 QSVFrame *out_frame;
437 int ret;
438
439 clear_unused_frames(s->out_frame_list);
440
441 out_frame = get_free_frame(&s->out_frame_list);
442 if (!out_frame)
443 return NULL;
444
445 /* For video memory, get a hw frame;
446 * For system memory, get a sw frame and map it into a mfx_surface. */
447 if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
448 out_frame->frame = av_frame_alloc();
449 if (!out_frame->frame)
450 return NULL;
451
452 ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
453 if (ret < 0) {
454 av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
455 return NULL;
456 }
457
458 out_frame->surface = *(mfxFrameSurface1 *)out_frame->frame->data[3];
459 } else {
460 /* Get a frame with aligned dimensions.
461 * Libmfx need system memory being 128x64 aligned */
462 out_frame->frame = ff_get_video_buffer(outlink,
463 FFALIGN(outlink->w, 128),
464 FFALIGN(outlink->h, 64));
465 if (!out_frame->frame)
466 return NULL;
467
468 out_frame->frame->width = outlink->w;
469 out_frame->frame->height = outlink->h;
470
471 ret = map_frame_to_surface(out_frame->frame,
472 &out_frame->surface);
473 if (ret < 0)
474 return NULL;
475 }
476
477 out_frame->surface.Info = s->vpp_param.vpp.Out;
478
479 return out_frame;
480 }
481
482 /* create the QSV session */
init_vpp_session(AVFilterContext * avctx,QSVVPPContext * s)483 static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
484 {
485 AVFilterLink *inlink = avctx->inputs[0];
486 AVFilterLink *outlink = avctx->outputs[0];
487 AVQSVFramesContext *in_frames_hwctx = NULL;
488 AVQSVFramesContext *out_frames_hwctx = NULL;
489
490 AVBufferRef *device_ref;
491 AVHWDeviceContext *device_ctx;
492 AVQSVDeviceContext *device_hwctx;
493 mfxHDL handle;
494 mfxHandleType handle_type;
495 mfxVersion ver;
496 mfxIMPL impl;
497 int ret, i;
498
499 if (inlink->hw_frames_ctx) {
500 AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
501
502 device_ref = frames_ctx->device_ref;
503 in_frames_hwctx = frames_ctx->hwctx;
504
505 s->in_mem_mode = in_frames_hwctx->frame_type;
506
507 s->surface_ptrs_in = av_calloc(in_frames_hwctx->nb_surfaces,
508 sizeof(*s->surface_ptrs_in));
509 if (!s->surface_ptrs_in)
510 return AVERROR(ENOMEM);
511
512 for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
513 s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
514
515 s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
516 } else if (avctx->hw_device_ctx) {
517 device_ref = avctx->hw_device_ctx;
518 s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
519 } else {
520 av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
521 return AVERROR(EINVAL);
522 }
523
524 device_ctx = (AVHWDeviceContext *)device_ref->data;
525 device_hwctx = device_ctx->hwctx;
526
527 if (outlink->format == AV_PIX_FMT_QSV) {
528 AVHWFramesContext *out_frames_ctx;
529 AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
530 if (!out_frames_ref)
531 return AVERROR(ENOMEM);
532
533 s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
534 MFX_MEMTYPE_OPAQUE_FRAME :
535 MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | MFX_MEMTYPE_FROM_VPPOUT;
536
537 out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
538 out_frames_hwctx = out_frames_ctx->hwctx;
539
540 out_frames_ctx->format = AV_PIX_FMT_QSV;
541 out_frames_ctx->width = FFALIGN(outlink->w, 32);
542 out_frames_ctx->height = FFALIGN(outlink->h, 32);
543 out_frames_ctx->sw_format = s->out_sw_format;
544 out_frames_ctx->initial_pool_size = 64;
545 if (avctx->extra_hw_frames > 0)
546 out_frames_ctx->initial_pool_size += avctx->extra_hw_frames;
547 out_frames_hwctx->frame_type = s->out_mem_mode;
548
549 ret = av_hwframe_ctx_init(out_frames_ref);
550 if (ret < 0) {
551 av_buffer_unref(&out_frames_ref);
552 av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
553 return ret;
554 }
555
556 s->surface_ptrs_out = av_calloc(out_frames_hwctx->nb_surfaces,
557 sizeof(*s->surface_ptrs_out));
558 if (!s->surface_ptrs_out) {
559 av_buffer_unref(&out_frames_ref);
560 return AVERROR(ENOMEM);
561 }
562
563 for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
564 s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
565 s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
566
567 av_buffer_unref(&outlink->hw_frames_ctx);
568 outlink->hw_frames_ctx = out_frames_ref;
569 } else
570 s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
571
572 /* extract the properties of the "master" session given to us */
573 ret = MFXQueryIMPL(device_hwctx->session, &impl);
574 if (ret == MFX_ERR_NONE)
575 ret = MFXQueryVersion(device_hwctx->session, &ver);
576 if (ret != MFX_ERR_NONE) {
577 av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
578 return AVERROR_UNKNOWN;
579 }
580
581 if (MFX_IMPL_VIA_VAAPI == MFX_IMPL_VIA_MASK(impl)) {
582 handle_type = MFX_HANDLE_VA_DISPLAY;
583 } else if (MFX_IMPL_VIA_D3D11 == MFX_IMPL_VIA_MASK(impl)) {
584 handle_type = MFX_HANDLE_D3D11_DEVICE;
585 } else if (MFX_IMPL_VIA_D3D9 == MFX_IMPL_VIA_MASK(impl)) {
586 handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
587 } else {
588 av_log(avctx, AV_LOG_ERROR, "Error unsupported handle type\n");
589 return AVERROR_UNKNOWN;
590 }
591
592 ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_type, &handle);
593 if (ret < 0)
594 return ff_qsvvpp_print_error(avctx, ret, "Error getting the session handle");
595 else if (ret > 0) {
596 ff_qsvvpp_print_warning(avctx, ret, "Warning in getting the session handle");
597 return AVERROR_UNKNOWN;
598 }
599
600 /* create a "slave" session with those same properties, to be used for vpp */
601 ret = MFXInit(impl, &ver, &s->session);
602 if (ret < 0)
603 return ff_qsvvpp_print_error(avctx, ret, "Error initializing a session");
604 else if (ret > 0) {
605 ff_qsvvpp_print_warning(avctx, ret, "Warning in session initialization");
606 return AVERROR_UNKNOWN;
607 }
608
609 if (handle) {
610 ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
611 if (ret != MFX_ERR_NONE)
612 return AVERROR_UNKNOWN;
613 }
614
615 if (QSV_RUNTIME_VERSION_ATLEAST(ver, 1, 25)) {
616 ret = MFXJoinSession(device_hwctx->session, s->session);
617 if (ret != MFX_ERR_NONE)
618 return AVERROR_UNKNOWN;
619 }
620
621 if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
622 s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
623 s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
624 s->opaque_alloc.In.Type = s->in_mem_mode;
625
626 s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
627 s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
628 s->opaque_alloc.Out.Type = s->out_mem_mode;
629
630 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
631 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
632 } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
633 mfxFrameAllocator frame_allocator = {
634 .pthis = s,
635 .Alloc = frame_alloc,
636 .Lock = frame_lock,
637 .Unlock = frame_unlock,
638 .GetHDL = frame_get_hdl,
639 .Free = frame_free,
640 };
641
642 ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
643 if (ret != MFX_ERR_NONE)
644 return AVERROR_UNKNOWN;
645 }
646
647 return 0;
648 }
649
ff_qsvvpp_create(AVFilterContext * avctx,QSVVPPContext ** vpp,QSVVPPParam * param)650 int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
651 {
652 int i;
653 int ret;
654 QSVVPPContext *s;
655
656 s = av_mallocz(sizeof(*s));
657 if (!s)
658 return AVERROR(ENOMEM);
659
660 s->filter_frame = param->filter_frame;
661 if (!s->filter_frame)
662 s->filter_frame = ff_filter_frame;
663 s->out_sw_format = param->out_sw_format;
664
665 /* create the vpp session */
666 ret = init_vpp_session(avctx, s);
667 if (ret < 0)
668 goto failed;
669
670 s->frame_infos = av_calloc(avctx->nb_inputs, sizeof(*s->frame_infos));
671 if (!s->frame_infos) {
672 ret = AVERROR(ENOMEM);
673 goto failed;
674 }
675
676 /* Init each input's information */
677 for (i = 0; i < avctx->nb_inputs; i++) {
678 ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
679 if (ret < 0)
680 goto failed;
681 }
682
683 /* Update input's frame info according to crop */
684 for (i = 0; i < param->num_crop; i++) {
685 QSVVPPCrop *crop = param->crop + i;
686 if (crop->in_idx > avctx->nb_inputs) {
687 ret = AVERROR(EINVAL);
688 goto failed;
689 }
690 s->frame_infos[crop->in_idx].CropX = crop->x;
691 s->frame_infos[crop->in_idx].CropY = crop->y;
692 s->frame_infos[crop->in_idx].CropW = crop->w;
693 s->frame_infos[crop->in_idx].CropH = crop->h;
694 }
695
696 s->vpp_param.vpp.In = s->frame_infos[0];
697
698 ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
699 if (ret < 0) {
700 av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
701 goto failed;
702 }
703
704 if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
705 s->nb_ext_buffers = param->num_ext_buf + 1;
706 s->ext_buffers = av_calloc(s->nb_ext_buffers, sizeof(*s->ext_buffers));
707 if (!s->ext_buffers) {
708 ret = AVERROR(ENOMEM);
709 goto failed;
710 }
711
712 s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
713 for (i = 1; i < param->num_ext_buf; i++)
714 s->ext_buffers[i] = param->ext_buf[i - 1];
715 s->vpp_param.ExtParam = s->ext_buffers;
716 s->vpp_param.NumExtParam = s->nb_ext_buffers;
717 } else {
718 s->vpp_param.NumExtParam = param->num_ext_buf;
719 s->vpp_param.ExtParam = param->ext_buf;
720 }
721
722 s->got_frame = 0;
723
724 /** keep fifo size at least 1. Even when async_depth is 0, fifo is used. */
725 s->async_fifo = av_fifo_alloc2(param->async_depth + 1, sizeof(QSVAsyncFrame), 0);
726 s->async_depth = param->async_depth;
727 if (!s->async_fifo) {
728 ret = AVERROR(ENOMEM);
729 goto failed;
730 }
731
732 s->vpp_param.AsyncDepth = param->async_depth;
733
734 if (IS_SYSTEM_MEMORY(s->in_mem_mode))
735 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
736 else if (IS_VIDEO_MEMORY(s->in_mem_mode))
737 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
738 else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
739 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
740
741 if (IS_SYSTEM_MEMORY(s->out_mem_mode))
742 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
743 else if (IS_VIDEO_MEMORY(s->out_mem_mode))
744 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
745 else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
746 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
747
748 /* Print input memory mode */
749 ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0x0F, "VPP");
750 /* Print output memory mode */
751 ff_qsvvpp_print_iopattern(avctx, s->vpp_param.IOPattern & 0xF0, "VPP");
752 ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
753 if (ret < 0) {
754 ret = ff_qsvvpp_print_error(avctx, ret, "Failed to create a qsvvpp");
755 goto failed;
756 } else if (ret > 0)
757 ff_qsvvpp_print_warning(avctx, ret, "Warning When creating qsvvpp");
758
759 *vpp = s;
760 return 0;
761
762 failed:
763 ff_qsvvpp_free(&s);
764
765 return ret;
766 }
767
ff_qsvvpp_free(QSVVPPContext ** vpp)768 int ff_qsvvpp_free(QSVVPPContext **vpp)
769 {
770 QSVVPPContext *s = *vpp;
771
772 if (!s)
773 return 0;
774
775 if (s->session) {
776 MFXVideoVPP_Close(s->session);
777 MFXClose(s->session);
778 }
779
780 /* release all the resources */
781 clear_frame_list(&s->in_frame_list);
782 clear_frame_list(&s->out_frame_list);
783 av_freep(&s->surface_ptrs_in);
784 av_freep(&s->surface_ptrs_out);
785 av_freep(&s->ext_buffers);
786 av_freep(&s->frame_infos);
787 av_fifo_freep2(&s->async_fifo);
788 av_freep(vpp);
789
790 return 0;
791 }
792
ff_qsvvpp_filter_frame(QSVVPPContext * s,AVFilterLink * inlink,AVFrame * picref)793 int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
794 {
795 AVFilterContext *ctx = inlink->dst;
796 AVFilterLink *outlink = ctx->outputs[0];
797 QSVAsyncFrame aframe;
798 mfxSyncPoint sync;
799 QSVFrame *in_frame, *out_frame;
800 int ret, filter_ret;
801
802 while (s->eof && av_fifo_read(s->async_fifo, &aframe, 1) >= 0) {
803 if (MFXVideoCORE_SyncOperation(s->session, aframe.sync, 1000) < 0)
804 av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
805
806 filter_ret = s->filter_frame(outlink, aframe.frame->frame);
807 if (filter_ret < 0) {
808 av_frame_free(&aframe.frame->frame);
809 return filter_ret;
810 }
811 aframe.frame->queued--;
812 s->got_frame = 1;
813 aframe.frame->frame = NULL;
814 };
815
816 if (!picref)
817 return 0;
818
819 in_frame = submit_frame(s, inlink, picref);
820 if (!in_frame) {
821 av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
822 FF_INLINK_IDX(inlink));
823 return AVERROR(ENOMEM);
824 }
825
826 do {
827 out_frame = query_frame(s, outlink);
828 if (!out_frame) {
829 av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
830 return AVERROR(ENOMEM);
831 }
832
833 do {
834 ret = MFXVideoVPP_RunFrameVPPAsync(s->session, &in_frame->surface,
835 &out_frame->surface, NULL, &sync);
836 if (ret == MFX_WRN_DEVICE_BUSY)
837 av_usleep(500);
838 } while (ret == MFX_WRN_DEVICE_BUSY);
839
840 if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
841 /* Ignore more_data error */
842 if (ret == MFX_ERR_MORE_DATA)
843 return AVERROR(EAGAIN);
844 break;
845 }
846 out_frame->frame->pts = av_rescale_q(out_frame->surface.Data.TimeStamp,
847 default_tb, outlink->time_base);
848
849 out_frame->queued++;
850 aframe = (QSVAsyncFrame){ sync, out_frame };
851 av_fifo_write(s->async_fifo, &aframe, 1);
852
853 if (av_fifo_can_read(s->async_fifo) > s->async_depth) {
854 av_fifo_read(s->async_fifo, &aframe, 1);
855
856 do {
857 ret = MFXVideoCORE_SyncOperation(s->session, aframe.sync, 1000);
858 } while (ret == MFX_WRN_IN_EXECUTION);
859
860 filter_ret = s->filter_frame(outlink, aframe.frame->frame);
861 if (filter_ret < 0) {
862 av_frame_free(&aframe.frame->frame);
863 return filter_ret;
864 }
865
866 aframe.frame->queued--;
867 s->got_frame = 1;
868 aframe.frame->frame = NULL;
869 }
870 } while(ret == MFX_ERR_MORE_SURFACE);
871
872 if (ret < 0)
873 return ff_qsvvpp_print_error(ctx, ret, "Error running VPP");
874 else if (ret > 0)
875 ff_qsvvpp_print_warning(ctx, ret, "Warning in running VPP");
876
877 return 0;
878 }
879