• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUPixFmtMap {
36     VdpYCbCrFormat vdpau_fmt;
37     enum AVPixelFormat pix_fmt;
38 } VDPAUPixFmtMap;
39 
40 static const VDPAUPixFmtMap pix_fmts_420[] = {
41     { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12    },
42     { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
43 #ifdef VDP_YCBCR_FORMAT_P016
44     { VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016    },
45     { VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010    },
46 #endif
47     { 0,                     AV_PIX_FMT_NONE,   },
48 };
49 
50 static const VDPAUPixFmtMap pix_fmts_422[] = {
51     { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16    },
52     { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
53     { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
54     { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
55     { 0,                     AV_PIX_FMT_NONE,   },
56 };
57 
58 static const VDPAUPixFmtMap pix_fmts_444[] = {
59 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
60     { VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P },
61 #endif
62 #ifdef VDP_YCBCR_FORMAT_P016
63     {VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16},
64 #endif
65     { 0,                          AV_PIX_FMT_NONE,   },
66 };
67 
68 static const struct {
69     VdpChromaType chroma_type;
70     enum AVPixelFormat frames_sw_format;
71     const VDPAUPixFmtMap *map;
72 } vdpau_pix_fmts[] = {
73     { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
74     { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
75     { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
76 #ifdef VDP_YCBCR_FORMAT_P016
77     { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 },
78     { VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 },
79     { VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 },
80     { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 },
81     { VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 },
82 #endif
83 };
84 
85 typedef struct VDPAUDeviceContext {
86     VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
87     VdpVideoSurfaceGetBitsYCbCr                     *get_data;
88     VdpVideoSurfacePutBitsYCbCr                     *put_data;
89     VdpVideoSurfaceCreate                           *surf_create;
90     VdpVideoSurfaceDestroy                          *surf_destroy;
91 
92     enum AVPixelFormat *pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)];
93     int              nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)];
94 } VDPAUDeviceContext;
95 
96 typedef struct VDPAUFramesContext {
97     VdpVideoSurfaceGetBitsYCbCr *get_data;
98     VdpVideoSurfacePutBitsYCbCr *put_data;
99     VdpChromaType chroma_type;
100     int chroma_idx;
101 
102     const enum AVPixelFormat *pix_fmts;
103     int                       nb_pix_fmts;
104 } VDPAUFramesContext;
105 
count_pixfmts(const VDPAUPixFmtMap * map)106 static int count_pixfmts(const VDPAUPixFmtMap *map)
107 {
108     int count = 0;
109     while (map->pix_fmt != AV_PIX_FMT_NONE) {
110         map++;
111         count++;
112     }
113     return count;
114 }
115 
vdpau_init_pixmfts(AVHWDeviceContext * ctx)116 static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
117 {
118     AVVDPAUDeviceContext *hwctx = ctx->hwctx;
119     VDPAUDeviceContext    *priv = ctx->internal->priv;
120     int i;
121 
122     for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
123         const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
124         int nb_pix_fmts;
125 
126         nb_pix_fmts = count_pixfmts(map);
127         priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
128         if (!priv->pix_fmts[i])
129             return AVERROR(ENOMEM);
130 
131         nb_pix_fmts = 0;
132         while (map->pix_fmt != AV_PIX_FMT_NONE) {
133             VdpBool supported;
134             VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
135                                                     map->vdpau_fmt, &supported);
136             if (err == VDP_STATUS_OK && supported)
137                 priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
138             map++;
139         }
140         priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
141         priv->nb_pix_fmts[i]             = nb_pix_fmts;
142     }
143 
144     return 0;
145 }
146 
147 #define GET_CALLBACK(id, result)                                                \
148 do {                                                                            \
149     void *tmp;                                                                  \
150     err = hwctx->get_proc_address(hwctx->device, id, &tmp);                     \
151     if (err != VDP_STATUS_OK) {                                                 \
152         av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n");     \
153         return AVERROR_UNKNOWN;                                                 \
154     }                                                                           \
155     result = tmp;                                                               \
156 } while (0)
157 
vdpau_device_init(AVHWDeviceContext * ctx)158 static int vdpau_device_init(AVHWDeviceContext *ctx)
159 {
160     AVVDPAUDeviceContext *hwctx = ctx->hwctx;
161     VDPAUDeviceContext   *priv  = ctx->internal->priv;
162     VdpStatus             err;
163     int                   ret;
164 
165     GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
166                  priv->get_transfer_caps);
167     GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
168     GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
169     GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE,           priv->surf_create);
170     GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY,          priv->surf_destroy);
171 
172     ret = vdpau_init_pixmfts(ctx);
173     if (ret < 0) {
174         av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
175         return ret;
176     }
177 
178     return 0;
179 }
180 
vdpau_device_uninit(AVHWDeviceContext * ctx)181 static void vdpau_device_uninit(AVHWDeviceContext *ctx)
182 {
183     VDPAUDeviceContext *priv = ctx->internal->priv;
184     int i;
185 
186     for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
187         av_freep(&priv->pix_fmts[i]);
188 }
189 
vdpau_frames_get_constraints(AVHWDeviceContext * ctx,const void * hwconfig,AVHWFramesConstraints * constraints)190 static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx,
191                                         const void *hwconfig,
192                                         AVHWFramesConstraints *constraints)
193 {
194     VDPAUDeviceContext   *priv  = ctx->internal->priv;
195     int nb_sw_formats = 0;
196     int i;
197 
198     constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(vdpau_pix_fmts) + 1,
199                                                     sizeof(*constraints->valid_sw_formats));
200     if (!constraints->valid_sw_formats)
201         return AVERROR(ENOMEM);
202 
203     for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
204         if (priv->nb_pix_fmts[i] > 1)
205             constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
206     }
207     constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
208 
209     constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
210     if (!constraints->valid_hw_formats)
211         return AVERROR(ENOMEM);
212 
213     constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
214     constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
215 
216     return 0;
217 }
218 
vdpau_buffer_free(void * opaque,uint8_t * data)219 static void vdpau_buffer_free(void *opaque, uint8_t *data)
220 {
221     AVHWFramesContext          *ctx = opaque;
222     VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
223     VdpVideoSurface            surf = (VdpVideoSurface)(uintptr_t)data;
224 
225     device_priv->surf_destroy(surf);
226 }
227 
vdpau_pool_alloc(void * opaque,buffer_size_t size)228 static AVBufferRef *vdpau_pool_alloc(void *opaque, buffer_size_t size)
229 {
230     AVHWFramesContext             *ctx = opaque;
231     VDPAUFramesContext           *priv = ctx->internal->priv;
232     AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
233     VDPAUDeviceContext    *device_priv = ctx->device_ctx->internal->priv;
234 
235     AVBufferRef *ret;
236     VdpVideoSurface surf;
237     VdpStatus err;
238 
239     err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
240                                    ctx->width, ctx->height, &surf);
241     if (err != VDP_STATUS_OK) {
242         av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
243         return NULL;
244     }
245 
246     ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
247                            vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY);
248     if (!ret) {
249         device_priv->surf_destroy(surf);
250         return NULL;
251     }
252 
253     return ret;
254 }
255 
vdpau_frames_init(AVHWFramesContext * ctx)256 static int vdpau_frames_init(AVHWFramesContext *ctx)
257 {
258     VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
259     VDPAUFramesContext        *priv = ctx->internal->priv;
260 
261     int i;
262 
263     for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
264         if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
265             priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
266             priv->chroma_idx  = i;
267             priv->pix_fmts    = device_priv->pix_fmts[i];
268             priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
269             break;
270         }
271     }
272     if (priv->nb_pix_fmts < 2) {
273         av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
274                av_get_pix_fmt_name(ctx->sw_format));
275         return AVERROR(ENOSYS);
276     }
277 
278     if (!ctx->pool) {
279         ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
280                                                             vdpau_pool_alloc, NULL);
281         if (!ctx->internal->pool_internal)
282             return AVERROR(ENOMEM);
283     }
284 
285     priv->get_data = device_priv->get_data;
286     priv->put_data = device_priv->put_data;
287 
288     return 0;
289 }
290 
vdpau_get_buffer(AVHWFramesContext * ctx,AVFrame * frame)291 static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
292 {
293     frame->buf[0] = av_buffer_pool_get(ctx->pool);
294     if (!frame->buf[0])
295         return AVERROR(ENOMEM);
296 
297     frame->data[3] = frame->buf[0]->data;
298     frame->format  = AV_PIX_FMT_VDPAU;
299     frame->width   = ctx->width;
300     frame->height  = ctx->height;
301 
302     return 0;
303 }
304 
vdpau_transfer_get_formats(AVHWFramesContext * ctx,enum AVHWFrameTransferDirection dir,enum AVPixelFormat ** formats)305 static int vdpau_transfer_get_formats(AVHWFramesContext *ctx,
306                                       enum AVHWFrameTransferDirection dir,
307                                       enum AVPixelFormat **formats)
308 {
309     VDPAUFramesContext *priv  = ctx->internal->priv;
310 
311     enum AVPixelFormat *fmts;
312 
313     if (priv->nb_pix_fmts == 1) {
314         av_log(ctx, AV_LOG_ERROR,
315                "No target formats are supported for this chroma type\n");
316         return AVERROR(ENOSYS);
317     }
318 
319     fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
320     if (!fmts)
321         return AVERROR(ENOMEM);
322 
323     memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
324     *formats = fmts;
325 
326     return 0;
327 }
328 
vdpau_transfer_data_from(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)329 static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
330                                     const AVFrame *src)
331 {
332     VDPAUFramesContext *priv = ctx->internal->priv;
333     VdpVideoSurface     surf = (VdpVideoSurface)(uintptr_t)src->data[3];
334 
335     void *data[3];
336     uint32_t linesize[3];
337 
338     const VDPAUPixFmtMap *map;
339     VdpYCbCrFormat vdpau_format;
340     VdpStatus err;
341     int i;
342 
343     for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
344         data[i] = dst->data[i];
345         if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
346             av_log(ctx, AV_LOG_ERROR,
347                    "The linesize %d cannot be represented as uint32\n",
348                    dst->linesize[i]);
349             return AVERROR(ERANGE);
350         }
351         linesize[i] = dst->linesize[i];
352     }
353 
354     map = vdpau_pix_fmts[priv->chroma_idx].map;
355     for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
356         if (map[i].pix_fmt == dst->format) {
357             vdpau_format = map[i].vdpau_fmt;
358             break;
359         }
360     }
361     if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
362         av_log(ctx, AV_LOG_ERROR,
363                "Unsupported target pixel format: %s\n",
364                av_get_pix_fmt_name(dst->format));
365         return AVERROR(EINVAL);
366     }
367 
368     if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
369 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
370             || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
371 #endif
372 #ifdef VDP_YCBCR_FORMAT_P016
373             || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16)
374 #endif
375             )
376         FFSWAP(void*, data[1], data[2]);
377 
378     err = priv->get_data(surf, vdpau_format, data, linesize);
379     if (err != VDP_STATUS_OK) {
380         av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
381         return AVERROR_UNKNOWN;
382     }
383 
384     return 0;
385 }
386 
vdpau_transfer_data_to(AVHWFramesContext * ctx,AVFrame * dst,const AVFrame * src)387 static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
388                                   const AVFrame *src)
389 {
390     VDPAUFramesContext *priv = ctx->internal->priv;
391     VdpVideoSurface     surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
392 
393     const void *data[3];
394     uint32_t linesize[3];
395 
396     const VDPAUPixFmtMap *map;
397     VdpYCbCrFormat vdpau_format;
398     VdpStatus err;
399     int i;
400 
401     for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
402         data[i] = src->data[i];
403         if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
404             av_log(ctx, AV_LOG_ERROR,
405                    "The linesize %d cannot be represented as uint32\n",
406                    src->linesize[i]);
407             return AVERROR(ERANGE);
408         }
409         linesize[i] = src->linesize[i];
410     }
411 
412     map = vdpau_pix_fmts[priv->chroma_idx].map;
413     for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
414         if (map[i].pix_fmt == src->format) {
415             vdpau_format = map[i].vdpau_fmt;
416             break;
417         }
418     }
419     if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
420         av_log(ctx, AV_LOG_ERROR,
421                "Unsupported source pixel format: %s\n",
422                av_get_pix_fmt_name(src->format));
423         return AVERROR(EINVAL);
424     }
425 
426     if ((vdpau_format == VDP_YCBCR_FORMAT_YV12)
427 #ifdef VDP_YCBCR_FORMAT_Y_U_V_444
428             || (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444)
429 #endif
430             )
431         FFSWAP(const void*, data[1], data[2]);
432 
433     err = priv->put_data(surf, vdpau_format, data, linesize);
434     if (err != VDP_STATUS_OK) {
435         av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
436         return AVERROR_UNKNOWN;
437     }
438 
439     return 0;
440 }
441 
442 #if HAVE_VDPAU_X11
443 #include <vdpau/vdpau_x11.h>
444 #include <X11/Xlib.h>
445 
446 typedef struct VDPAUDevicePriv {
447     VdpDeviceDestroy *device_destroy;
448     Display *dpy;
449 } VDPAUDevicePriv;
450 
vdpau_device_free(AVHWDeviceContext * ctx)451 static void vdpau_device_free(AVHWDeviceContext *ctx)
452 {
453     AVVDPAUDeviceContext *hwctx = ctx->hwctx;
454     VDPAUDevicePriv       *priv = ctx->user_opaque;
455 
456     if (priv->device_destroy)
457         priv->device_destroy(hwctx->device);
458     if (priv->dpy)
459         XCloseDisplay(priv->dpy);
460     av_freep(&priv);
461 }
462 
vdpau_device_create(AVHWDeviceContext * ctx,const char * device,AVDictionary * opts,int flags)463 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
464                                AVDictionary *opts, int flags)
465 {
466     AVVDPAUDeviceContext *hwctx = ctx->hwctx;
467 
468     VDPAUDevicePriv *priv;
469     VdpStatus err;
470     VdpGetInformationString *get_information_string;
471     const char *display, *vendor;
472 
473     priv = av_mallocz(sizeof(*priv));
474     if (!priv)
475         return AVERROR(ENOMEM);
476 
477     ctx->user_opaque = priv;
478     ctx->free        = vdpau_device_free;
479 
480     priv->dpy = XOpenDisplay(device);
481     if (!priv->dpy) {
482         av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
483                XDisplayName(device));
484         return AVERROR_UNKNOWN;
485     }
486     display = XDisplayString(priv->dpy);
487 
488     err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
489                                 &hwctx->device, &hwctx->get_proc_address);
490     if (err != VDP_STATUS_OK) {
491         av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
492                display);
493         return AVERROR_UNKNOWN;
494     }
495 
496     GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
497     GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY,         priv->device_destroy);
498 
499     get_information_string(&vendor);
500     av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
501            "X11 display %s\n", vendor, display);
502 
503     return 0;
504 }
505 #endif
506 
507 const HWContextType ff_hwcontext_type_vdpau = {
508     .type                 = AV_HWDEVICE_TYPE_VDPAU,
509     .name                 = "VDPAU",
510 
511     .device_hwctx_size    = sizeof(AVVDPAUDeviceContext),
512     .device_priv_size     = sizeof(VDPAUDeviceContext),
513     .frames_priv_size     = sizeof(VDPAUFramesContext),
514 
515 #if HAVE_VDPAU_X11
516     .device_create        = vdpau_device_create,
517 #endif
518     .device_init          = vdpau_device_init,
519     .device_uninit        = vdpau_device_uninit,
520     .frames_get_constraints = vdpau_frames_get_constraints,
521     .frames_init          = vdpau_frames_init,
522     .frames_get_buffer    = vdpau_get_buffer,
523     .transfer_get_formats = vdpau_transfer_get_formats,
524     .transfer_data_to     = vdpau_transfer_data_to,
525     .transfer_data_from   = vdpau_transfer_data_from,
526 
527     .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE },
528 };
529