• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 #include <string.h>
19 
20 #include "libavutil/avassert.h"
21 #include "libavutil/mem.h"
22 #include "libavutil/opt.h"
23 #include "libavutil/pixdesc.h"
24 
25 #include "avfilter.h"
26 #include "framesync.h"
27 #include "formats.h"
28 #include "internal.h"
29 #include "vaapi_vpp.h"
30 
31 typedef struct OverlayVAAPIContext {
32     VAAPIVPPContext  vpp_ctx; /**< must be the first field */
33     FFFrameSync      fs;
34     int              overlay_ox;
35     int              overlay_oy;
36     int              overlay_ow;
37     int              overlay_oh;
38     float            alpha;
39 } OverlayVAAPIContext;
40 
overlay_vaapi_query_formats(AVFilterContext * ctx)41 static int overlay_vaapi_query_formats(AVFilterContext *ctx)
42 {
43     int ret;
44     enum {
45         MAIN    = 0,
46         OVERLAY = 1,
47     };
48 
49     static const enum AVPixelFormat pix_fmts[] = {
50         AV_PIX_FMT_VAAPI,
51         AV_PIX_FMT_NONE
52     };
53 
54     ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->inputs[MAIN]->outcfg.formats);
55     if (ret < 0)
56         return ret;
57 
58     ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->inputs[OVERLAY]->outcfg.formats);
59     if (ret < 0)
60         return ret;
61 
62     ret = ff_formats_ref(ff_make_format_list(pix_fmts), &ctx->outputs[0]->incfg.formats);
63     if (ret < 0)
64         return ret;
65 
66     return 0;
67 }
68 
overlay_vaapi_build_filter_params(AVFilterContext * avctx)69 static int overlay_vaapi_build_filter_params(AVFilterContext *avctx)
70 {
71     VAAPIVPPContext *vpp_ctx   = avctx->priv;
72     VAStatus vas;
73     int support_flag;
74     VAProcPipelineCaps pipeline_caps;
75 
76     memset(&pipeline_caps, 0, sizeof(pipeline_caps));
77     vas = vaQueryVideoProcPipelineCaps(vpp_ctx->hwctx->display,
78                                        vpp_ctx->va_context,
79                                        NULL, 0,
80                                        &pipeline_caps);
81     if (vas != VA_STATUS_SUCCESS) {
82         av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
83                "caps: %d (%s).\n", vas, vaErrorStr(vas));
84         return AVERROR(EIO);
85     }
86 
87     if (!pipeline_caps.blend_flags) {
88         av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support overlay\n");
89         return AVERROR(EINVAL);
90     }
91 
92     support_flag = pipeline_caps.blend_flags & VA_BLEND_GLOBAL_ALPHA;
93     if (!support_flag) {
94         av_log(avctx, AV_LOG_ERROR, "VAAPI driver doesn't support global alpha blending\n");
95         return AVERROR(EINVAL);
96     }
97 
98     return 0;
99 }
100 
overlay_vaapi_render_picture(AVFilterContext * avctx,VAProcPipelineParameterBuffer * params,VAProcPipelineParameterBuffer * subpic_params,AVFrame * output_frame)101 static int overlay_vaapi_render_picture(AVFilterContext *avctx,
102                                         VAProcPipelineParameterBuffer *params,
103                                         VAProcPipelineParameterBuffer *subpic_params,
104                                         AVFrame *output_frame)
105 {
106     VAAPIVPPContext *ctx   = avctx->priv;
107     VASurfaceID output_surface;
108     VABufferID params_id;
109     VABufferID subpic_params_id;
110     VAStatus vas;
111     int err = 0;
112 
113     output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
114 
115     vas = vaBeginPicture(ctx->hwctx->display,
116                          ctx->va_context, output_surface);
117     if (vas != VA_STATUS_SUCCESS) {
118         av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
119                "%d (%s).\n", vas, vaErrorStr(vas));
120         err = AVERROR(EIO);
121         goto fail;
122     }
123 
124     vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
125                          VAProcPipelineParameterBufferType,
126                          sizeof(*params), 1, params, &params_id);
127     if (vas != VA_STATUS_SUCCESS) {
128         av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
129                "%d (%s).\n", vas, vaErrorStr(vas));
130         err = AVERROR(EIO);
131         goto fail_after_begin;
132     }
133     av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
134            params_id);
135 
136 
137     vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
138                          VAProcPipelineParameterBufferType,
139                          sizeof(*subpic_params), 1, subpic_params, &subpic_params_id);
140     if (vas != VA_STATUS_SUCCESS) {
141         av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
142                "%d (%s).\n", vas, vaErrorStr(vas));
143         err = AVERROR(EIO);
144         goto fail_after_begin;
145     }
146     av_log(avctx, AV_LOG_DEBUG, "Pipeline subpic parameter buffer is %#x.\n",
147            subpic_params_id);
148 
149     vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
150                           &params_id, 1);
151     if (vas != VA_STATUS_SUCCESS) {
152         av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
153                "%d (%s).\n", vas, vaErrorStr(vas));
154         err = AVERROR(EIO);
155         goto fail_after_begin;
156     }
157 
158     vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
159                           &subpic_params_id, 1);
160     if (vas != VA_STATUS_SUCCESS) {
161         av_log(avctx, AV_LOG_ERROR, "Failed to render subpic parameter buffer: "
162                "%d (%s).\n", vas, vaErrorStr(vas));
163         err = AVERROR(EIO);
164         goto fail_after_begin;
165     }
166 
167     vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
168     if (vas != VA_STATUS_SUCCESS) {
169         av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
170                "%d (%s).\n", vas, vaErrorStr(vas));
171         err = AVERROR(EIO);
172         goto fail_after_render;
173     }
174 
175     if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks &
176         AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
177         vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
178         if (vas != VA_STATUS_SUCCESS) {
179             av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
180                    "%d (%s).\n", vas, vaErrorStr(vas));
181             // And ignore.
182         }
183     }
184 
185     return 0;
186 
187     // We want to make sure that if vaBeginPicture has been called, we also
188     // call vaRenderPicture and vaEndPicture.  These calls may well fail or
189     // do something else nasty, but once we're in this failure case there
190     // isn't much else we can do.
191 fail_after_begin:
192     vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
193 fail_after_render:
194     vaEndPicture(ctx->hwctx->display, ctx->va_context);
195 fail:
196     return err;
197 }
198 
overlay_vaapi_blend(FFFrameSync * fs)199 static int overlay_vaapi_blend(FFFrameSync *fs)
200 {
201     AVFilterContext    *avctx = fs->parent;
202     AVFilterLink     *outlink = avctx->outputs[0];
203     OverlayVAAPIContext *ctx  = avctx->priv;
204     VAAPIVPPContext *vpp_ctx  = avctx->priv;
205     AVFrame *input_main, *input_overlay;
206     AVFrame *output;
207     VAProcPipelineParameterBuffer params, subpic_params;
208     VABlendState blend_state; /**< Blend State */
209     VARectangle overlay_region, output_region;
210     int err;
211 
212     err = overlay_vaapi_build_filter_params(avctx);
213     if (err < 0)
214         return err;
215 
216     err = ff_framesync_get_frame(fs, 0, &input_main, 0);
217     if (err < 0)
218         return err;
219     err = ff_framesync_get_frame(fs, 1, &input_overlay, 0);
220     if (err < 0)
221         return err;
222 
223     av_log(avctx, AV_LOG_DEBUG, "Filter main: %s, %ux%u (%"PRId64").\n",
224            av_get_pix_fmt_name(input_main->format),
225            input_main->width, input_main->height, input_main->pts);
226 
227     av_log(avctx, AV_LOG_DEBUG, "Filter overlay: %s, %ux%u (%"PRId64").\n",
228            av_get_pix_fmt_name(input_overlay->format),
229            input_overlay->width, input_overlay->height, input_overlay->pts);
230 
231     if (vpp_ctx->va_context == VA_INVALID_ID)
232         return AVERROR(EINVAL);
233 
234     output = ff_get_video_buffer(outlink, outlink->w, outlink->h);
235     if (!output) {
236         err = AVERROR(ENOMEM);
237         goto fail;
238     }
239 
240     err = av_frame_copy_props(output, input_main);
241     if (err < 0)
242         goto fail;
243 
244     err = ff_vaapi_vpp_init_params(avctx, &params,
245                                    input_main, output);
246     if (err < 0)
247         goto fail;
248 
249     overlay_region = (VARectangle) {
250         .x      = ctx->overlay_ox,
251         .y      = ctx->overlay_oy,
252         .width  = ctx->overlay_ow ? ctx->overlay_ow : input_overlay->width,
253         .height = ctx->overlay_oh ? ctx->overlay_oh : input_overlay->height,
254     };
255 
256     output_region = (VARectangle) {
257         .x      = 0,
258         .y      = 0,
259         .width  = output->width,
260         .height = output->height,
261     };
262 
263     if (overlay_region.x + overlay_region.width > input_main->width ||
264         overlay_region.y + overlay_region.height > input_main->height) {
265         av_log(ctx, AV_LOG_WARNING,
266                "The overlay image exceeds the scope of the main image, "
267                "will crop the overlay image according based on the main image.\n");
268     }
269 
270     params.filters     = &vpp_ctx->filter_buffers[0];
271     params.num_filters = vpp_ctx->nb_filter_buffers;
272 
273     params.output_region = &output_region;
274     params.output_background_color = VAAPI_VPP_BACKGROUND_BLACK;
275 
276     memcpy(&subpic_params, &params, sizeof(subpic_params));
277 
278     blend_state.flags = VA_BLEND_GLOBAL_ALPHA;
279     blend_state.global_alpha = ctx->alpha;
280     subpic_params.blend_state = &blend_state;
281 
282     subpic_params.surface = (VASurfaceID)(uintptr_t)input_overlay->data[3];
283     subpic_params.output_region = &overlay_region;
284 
285     err = overlay_vaapi_render_picture(avctx, &params, &subpic_params, output);
286     if (err < 0)
287         goto fail;
288 
289     av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
290            av_get_pix_fmt_name(output->format),
291            output->width, output->height, output->pts);
292 
293     return ff_filter_frame(outlink, output);
294 
295 fail:
296     av_frame_free(&output);
297     return err;
298 }
299 
overlay_vaapi_init_framesync(AVFilterContext * avctx)300 static int overlay_vaapi_init_framesync(AVFilterContext *avctx)
301 {
302     OverlayVAAPIContext *ctx = avctx->priv;
303     int ret, i;
304 
305     ctx->fs.on_event = overlay_vaapi_blend;
306     ctx->fs.opaque   = ctx;
307     ret = ff_framesync_init(&ctx->fs, avctx, avctx->nb_inputs);
308     if (ret < 0)
309         return ret;
310 
311     for (i = 0; i < avctx->nb_inputs; i++) {
312         FFFrameSyncIn *in = &ctx->fs.in[i];
313         in->before    = EXT_STOP;
314         in->after     = EXT_INFINITY;
315         in->sync      = i ? 1 : 2;
316         in->time_base = avctx->inputs[i]->time_base;
317     }
318 
319     return ff_framesync_configure(&ctx->fs);
320 }
321 
overlay_vaapi_config_output(AVFilterLink * outlink)322 static int overlay_vaapi_config_output(AVFilterLink *outlink)
323 {
324     AVFilterContext  *avctx  = outlink->src;
325     OverlayVAAPIContext *ctx = avctx->priv;
326     VAAPIVPPContext *vpp_ctx = avctx->priv;
327     int err;
328 
329     err = overlay_vaapi_init_framesync(avctx);
330     if (err < 0)
331         return err;
332 
333     vpp_ctx->output_width  = avctx->inputs[0]->w;
334     vpp_ctx->output_height = avctx->inputs[0]->h;
335 
336     err = ff_vaapi_vpp_config_output(outlink);
337     if (err < 0)
338         return err;
339 
340     err = ff_framesync_init_dualinput(&ctx->fs, avctx);
341     if (err < 0)
342         return err;
343 
344     return ff_framesync_configure(&ctx->fs);
345 }
346 
overlay_vaapi_init(AVFilterContext * avctx)347 static av_cold int overlay_vaapi_init(AVFilterContext *avctx)
348 {
349     VAAPIVPPContext *vpp_ctx = avctx->priv;
350 
351     ff_vaapi_vpp_ctx_init(avctx);
352     vpp_ctx->output_format = AV_PIX_FMT_NONE;
353 
354     return 0;
355 }
356 
overlay_vaapi_activate(AVFilterContext * avctx)357 static int overlay_vaapi_activate(AVFilterContext *avctx)
358 {
359     OverlayVAAPIContext *ctx = avctx->priv;
360 
361     return ff_framesync_activate(&ctx->fs);
362 }
363 
overlay_vaapi_uninit(AVFilterContext * avctx)364 static av_cold void overlay_vaapi_uninit(AVFilterContext *avctx)
365 {
366     OverlayVAAPIContext *ctx = avctx->priv;
367 
368     ff_framesync_uninit(&ctx->fs);
369 }
370 
371 #define OFFSET(x) offsetof(OverlayVAAPIContext, x)
372 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
373 static const AVOption overlay_vaapi_options[] = {
374     { "x", "Overlay x position",
375       OFFSET(overlay_ox), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
376     { "y", "Overlay y position",
377       OFFSET(overlay_oy), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
378     { "w", "Overlay width",
379       OFFSET(overlay_ow), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
380     { "h", "Overlay height",
381       OFFSET(overlay_oh), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, INT_MAX, .flags = FLAGS },
382     { "alpha", "Overlay global alpha",
383       OFFSET(alpha), AV_OPT_TYPE_FLOAT, { .dbl = 0.0}, 0.0, 1.0, .flags = FLAGS},
384     { NULL },
385 };
386 
387 AVFILTER_DEFINE_CLASS(overlay_vaapi);
388 
389 static const AVFilterPad overlay_vaapi_inputs[] = {
390     {
391         .name             = "main",
392         .type             = AVMEDIA_TYPE_VIDEO,
393         .get_buffer.video = ff_default_get_video_buffer,
394         .config_props     = &ff_vaapi_vpp_config_input,
395     },
396     {
397         .name             = "overlay",
398         .type             = AVMEDIA_TYPE_VIDEO,
399         .get_buffer.video = ff_default_get_video_buffer,
400     },
401 };
402 
403 static const AVFilterPad overlay_vaapi_outputs[] = {
404     {
405         .name          = "default",
406         .type          = AVMEDIA_TYPE_VIDEO,
407         .config_props  = &overlay_vaapi_config_output,
408     },
409 };
410 
411 const AVFilter ff_vf_overlay_vaapi = {
412     .name            = "overlay_vaapi",
413     .description     = NULL_IF_CONFIG_SMALL("Overlay one video on top of another"),
414     .priv_size       = sizeof(OverlayVAAPIContext),
415     .priv_class      = &overlay_vaapi_class,
416     .init            = &overlay_vaapi_init,
417     .uninit          = &overlay_vaapi_uninit,
418     .activate        = &overlay_vaapi_activate,
419     FILTER_INPUTS(overlay_vaapi_inputs),
420     FILTER_OUTPUTS(overlay_vaapi_outputs),
421     FILTER_QUERY_FUNC(overlay_vaapi_query_formats),
422     .flags_internal  = FF_FILTER_FLAG_HWFRAME_AWARE,
423 };
424