• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Video Decode and Presentation API for UNIX (VDPAU) is used for
3  * HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1.
4  *
5  * Copyright (c) 2008 NVIDIA
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include "config_components.h"
25 
26 #include <limits.h>
27 
28 #include "avcodec.h"
29 #include "decode.h"
30 #include "internal.h"
31 #include "mpegvideodec.h"
32 #include "vc1.h"
33 #include "vdpau.h"
34 #include "vdpau_internal.h"
35 
36 // XXX: at the time of adding this ifdefery, av_assert* wasn't use outside.
37 // When dropping it, make sure other av_assert* were not added since then.
38 
39 /**
40  * @addtogroup VDPAU_Decoding
41  *
42  * @{
43  */
44 
vdpau_error(VdpStatus status)45 static int vdpau_error(VdpStatus status)
46 {
47     switch (status) {
48     case VDP_STATUS_OK:
49         return 0;
50     case VDP_STATUS_NO_IMPLEMENTATION:
51         return AVERROR(ENOSYS);
52     case VDP_STATUS_DISPLAY_PREEMPTED:
53         return AVERROR(EIO);
54     case VDP_STATUS_INVALID_HANDLE:
55         return AVERROR(EBADF);
56     case VDP_STATUS_INVALID_POINTER:
57         return AVERROR(EFAULT);
58     case VDP_STATUS_RESOURCES:
59         return AVERROR(ENOBUFS);
60     case VDP_STATUS_HANDLE_DEVICE_MISMATCH:
61         return AVERROR(EXDEV);
62     case VDP_STATUS_ERROR:
63         return AVERROR(EIO);
64     default:
65         return AVERROR(EINVAL);
66     }
67 }
68 
av_alloc_vdpaucontext(void)69 AVVDPAUContext *av_alloc_vdpaucontext(void)
70 {
71     return av_vdpau_alloc_context();
72 }
73 
74 #define MAKE_ACCESSORS(str, name, type, field) \
75     type av_##name##_get_##field(const str *s) { return s->field; } \
76     void av_##name##_set_##field(str *s, type v) { s->field = v; }
MAKE_ACCESSORS(AVVDPAUContext,vdpau_hwaccel,AVVDPAU_Render2,render2)77 MAKE_ACCESSORS(AVVDPAUContext, vdpau_hwaccel, AVVDPAU_Render2, render2)
78 
79 int av_vdpau_get_surface_parameters(AVCodecContext *avctx,
80                                     VdpChromaType *type,
81                                     uint32_t *width, uint32_t *height)
82 {
83     VdpChromaType t;
84     uint32_t w = avctx->coded_width;
85     uint32_t h = avctx->coded_height;
86 
87     /* See <vdpau/vdpau.h> for per-type alignment constraints. */
88     switch (avctx->sw_pix_fmt) {
89     case AV_PIX_FMT_YUV420P:
90     case AV_PIX_FMT_YUVJ420P:
91     case AV_PIX_FMT_YUV420P10:
92     case AV_PIX_FMT_YUV420P12:
93         t = VDP_CHROMA_TYPE_420;
94         w = (w + 1) & ~1;
95         h = (h + 3) & ~3;
96         break;
97     case AV_PIX_FMT_YUV422P:
98     case AV_PIX_FMT_YUVJ422P:
99         t = VDP_CHROMA_TYPE_422;
100         w = (w + 1) & ~1;
101         h = (h + 1) & ~1;
102         break;
103     case AV_PIX_FMT_YUV444P:
104     case AV_PIX_FMT_YUVJ444P:
105     case AV_PIX_FMT_YUV444P10:
106     case AV_PIX_FMT_YUV444P12:
107         t = VDP_CHROMA_TYPE_444;
108         h = (h + 1) & ~1;
109         break;
110     default:
111         return AVERROR(ENOSYS);
112     }
113 
114     if (type)
115         *type = t;
116     if (width)
117         *width = w;
118     if (height)
119         *height = h;
120     return 0;
121 }
122 
ff_vdpau_common_frame_params(AVCodecContext * avctx,AVBufferRef * hw_frames_ctx)123 int ff_vdpau_common_frame_params(AVCodecContext *avctx,
124                                  AVBufferRef *hw_frames_ctx)
125 {
126     AVHWFramesContext *hw_frames = (AVHWFramesContext*)hw_frames_ctx->data;
127     VdpChromaType type;
128     uint32_t width;
129     uint32_t height;
130 
131     if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
132         return AVERROR(EINVAL);
133 
134     hw_frames->format    = AV_PIX_FMT_VDPAU;
135     hw_frames->sw_format = avctx->sw_pix_fmt;
136     hw_frames->width     = width;
137     hw_frames->height    = height;
138 
139     return 0;
140 }
141 
ff_vdpau_common_init(AVCodecContext * avctx,VdpDecoderProfile profile,int level)142 int ff_vdpau_common_init(AVCodecContext *avctx, VdpDecoderProfile profile,
143                          int level)
144 {
145     VDPAUHWContext *hwctx = avctx->hwaccel_context;
146     VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
147     VdpVideoSurfaceQueryCapabilities *surface_query_caps;
148     VdpDecoderQueryCapabilities *decoder_query_caps;
149     VdpDecoderCreate *create;
150     VdpGetInformationString *info;
151     const char *info_string;
152     void *func;
153     VdpStatus status;
154     VdpBool supported;
155     uint32_t max_level, max_mb, max_width, max_height;
156     VdpChromaType type;
157     uint32_t width;
158     uint32_t height;
159     int ret;
160 
161     vdctx->width            = UINT32_MAX;
162     vdctx->height           = UINT32_MAX;
163 
164     if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height))
165         return AVERROR(ENOSYS);
166 
167     if (hwctx) {
168         hwctx->reset            = 0;
169 
170         if (hwctx->context.decoder != VDP_INVALID_HANDLE) {
171             vdctx->decoder = hwctx->context.decoder;
172             vdctx->render  = hwctx->context.render;
173             vdctx->device  = VDP_INVALID_HANDLE;
174             return 0; /* Decoder created by user */
175         }
176 
177         vdctx->device           = hwctx->device;
178         vdctx->get_proc_address = hwctx->get_proc_address;
179 
180         if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
181             level = 0;
182 
183         if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) &&
184             type != VDP_CHROMA_TYPE_420)
185             return AVERROR(ENOSYS);
186     } else {
187         AVHWFramesContext *frames_ctx;
188         AVVDPAUDeviceContext *dev_ctx;
189 
190         ret = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VDPAU);
191         if (ret < 0)
192             return ret;
193 
194         frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
195         dev_ctx = frames_ctx->device_ctx->hwctx;
196 
197         vdctx->device           = dev_ctx->device;
198         vdctx->get_proc_address = dev_ctx->get_proc_address;
199 
200         if (avctx->hwaccel_flags & AV_HWACCEL_FLAG_IGNORE_LEVEL)
201             level = 0;
202     }
203 
204     if (level < 0)
205         return AVERROR(ENOTSUP);
206 
207     status = vdctx->get_proc_address(vdctx->device,
208                                      VDP_FUNC_ID_GET_INFORMATION_STRING,
209                                      &func);
210     if (status != VDP_STATUS_OK)
211         return vdpau_error(status);
212     else
213         info = func;
214 
215     status = info(&info_string);
216     if (status != VDP_STATUS_OK)
217         return vdpau_error(status);
218     if (avctx->codec_id == AV_CODEC_ID_HEVC && strncmp(info_string, "NVIDIA ", 7) == 0 &&
219         !(avctx->hwaccel_flags & AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH)) {
220         int driver_version = 0;
221         sscanf(info_string, "NVIDIA VDPAU Driver Shared Library  %d", &driver_version);
222         if (driver_version < 410) {
223             av_log(avctx, AV_LOG_VERBOSE, "HEVC with NVIDIA VDPAU drivers is buggy, skipping.\n");
224             return AVERROR(ENOTSUP);
225         }
226     }
227 
228     status = vdctx->get_proc_address(vdctx->device,
229                                      VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
230                                      &func);
231     if (status != VDP_STATUS_OK)
232         return vdpau_error(status);
233     else
234         surface_query_caps = func;
235 
236     status = surface_query_caps(vdctx->device, type, &supported,
237                                 &max_width, &max_height);
238     if (status != VDP_STATUS_OK)
239         return vdpau_error(status);
240     if (supported != VDP_TRUE ||
241         max_width < width || max_height < height)
242         return AVERROR(ENOTSUP);
243 
244     status = vdctx->get_proc_address(vdctx->device,
245                                      VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
246                                      &func);
247     if (status != VDP_STATUS_OK)
248         return vdpau_error(status);
249     else
250         decoder_query_caps = func;
251 
252     status = decoder_query_caps(vdctx->device, profile, &supported, &max_level,
253                                 &max_mb, &max_width, &max_height);
254 #ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE
255     if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) {
256         profile = VDP_DECODER_PROFILE_H264_MAIN;
257         status = decoder_query_caps(vdctx->device, profile, &supported,
258                                     &max_level, &max_mb,
259                                     &max_width, &max_height);
260     }
261 #endif
262     if (status != VDP_STATUS_OK)
263         return vdpau_error(status);
264 
265     if (supported != VDP_TRUE || max_level < level ||
266         max_width < width || max_height < height)
267         return AVERROR(ENOTSUP);
268 
269     status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE,
270                                      &func);
271     if (status != VDP_STATUS_OK)
272         return vdpau_error(status);
273     else
274         create = func;
275 
276     status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER,
277                                      &func);
278     if (status != VDP_STATUS_OK)
279         return vdpau_error(status);
280     else
281         vdctx->render = func;
282 
283     status = create(vdctx->device, profile, width, height, avctx->refs,
284                     &vdctx->decoder);
285     if (status == VDP_STATUS_OK) {
286         vdctx->width  = avctx->coded_width;
287         vdctx->height = avctx->coded_height;
288     }
289 
290     return vdpau_error(status);
291 }
292 
ff_vdpau_common_uninit(AVCodecContext * avctx)293 int ff_vdpau_common_uninit(AVCodecContext *avctx)
294 {
295     VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
296     VdpDecoderDestroy *destroy;
297     void *func;
298     VdpStatus status;
299 
300     if (vdctx->device == VDP_INVALID_HANDLE)
301         return 0; /* Decoder created and destroyed by user */
302     if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX)
303         return 0;
304 
305     status = vdctx->get_proc_address(vdctx->device,
306                                      VDP_FUNC_ID_DECODER_DESTROY, &func);
307     if (status != VDP_STATUS_OK)
308         return vdpau_error(status);
309     else
310         destroy = func;
311 
312     status = destroy(vdctx->decoder);
313     return vdpau_error(status);
314 }
315 
ff_vdpau_common_reinit(AVCodecContext * avctx)316 static int ff_vdpau_common_reinit(AVCodecContext *avctx)
317 {
318     VDPAUHWContext *hwctx = avctx->hwaccel_context;
319     VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
320 
321     if (vdctx->device == VDP_INVALID_HANDLE)
322         return 0; /* Decoder created by user */
323     if (avctx->coded_width == vdctx->width &&
324         avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset))
325         return 0;
326 
327     avctx->hwaccel->uninit(avctx);
328     return avctx->hwaccel->init(avctx);
329 }
330 
ff_vdpau_common_start_frame(struct vdpau_picture_context * pic_ctx,av_unused const uint8_t * buffer,av_unused uint32_t size)331 int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx,
332                                 av_unused const uint8_t *buffer,
333                                 av_unused uint32_t size)
334 {
335     pic_ctx->bitstream_buffers_allocated = 0;
336     pic_ctx->bitstream_buffers_used      = 0;
337     pic_ctx->bitstream_buffers           = NULL;
338     return 0;
339 }
340 
ff_vdpau_common_end_frame(AVCodecContext * avctx,AVFrame * frame,struct vdpau_picture_context * pic_ctx)341 int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame,
342                               struct vdpau_picture_context *pic_ctx)
343 {
344     VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data;
345     AVVDPAUContext *hwctx = avctx->hwaccel_context;
346     VdpVideoSurface surf = ff_vdpau_get_surface_id(frame);
347     VdpStatus status;
348     int val;
349 
350     val = ff_vdpau_common_reinit(avctx);
351     if (val < 0)
352         return val;
353 
354     if (hwctx && !hwctx->render && hwctx->render2) {
355         status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info,
356                                 pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers);
357     } else
358     status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info,
359                            pic_ctx->bitstream_buffers_used,
360                            pic_ctx->bitstream_buffers);
361 
362     av_freep(&pic_ctx->bitstream_buffers);
363 
364     return vdpau_error(status);
365 }
366 
367 #if CONFIG_MPEG1_VDPAU_HWACCEL || \
368     CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \
369     CONFIG_VC1_VDPAU_HWACCEL   || CONFIG_WMV3_VDPAU_HWACCEL
ff_vdpau_mpeg_end_frame(AVCodecContext * avctx)370 int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx)
371 {
372     MpegEncContext *s = avctx->priv_data;
373     Picture *pic = s->current_picture_ptr;
374     struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private;
375     int val;
376 
377     val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx);
378     if (val < 0)
379         return val;
380 
381     ff_mpeg_draw_horiz_band(s, 0, s->avctx->height);
382     return 0;
383 }
384 #endif
385 
ff_vdpau_add_buffer(struct vdpau_picture_context * pic_ctx,const uint8_t * buf,uint32_t size)386 int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx,
387                         const uint8_t *buf, uint32_t size)
388 {
389     VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers;
390 
391     buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated,
392                               (pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers));
393     if (!buffers)
394         return AVERROR(ENOMEM);
395 
396     pic_ctx->bitstream_buffers = buffers;
397     buffers += pic_ctx->bitstream_buffers_used++;
398 
399     buffers->struct_version  = VDP_BITSTREAM_BUFFER_VERSION;
400     buffers->bitstream       = buf;
401     buffers->bitstream_bytes = size;
402     return 0;
403 }
404 
av_vdpau_alloc_context(void)405 AVVDPAUContext *av_vdpau_alloc_context(void)
406 {
407     return av_mallocz(sizeof(VDPAUHWContext));
408 }
409 
av_vdpau_bind_context(AVCodecContext * avctx,VdpDevice device,VdpGetProcAddress * get_proc,unsigned flags)410 int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device,
411                           VdpGetProcAddress *get_proc, unsigned flags)
412 {
413     VDPAUHWContext *hwctx;
414 
415     if (flags & ~(AV_HWACCEL_FLAG_IGNORE_LEVEL|AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH))
416         return AVERROR(EINVAL);
417 
418     if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx)))
419         return AVERROR(ENOMEM);
420 
421     hwctx = avctx->hwaccel_context;
422 
423     memset(hwctx, 0, sizeof(*hwctx));
424     hwctx->context.decoder  = VDP_INVALID_HANDLE;
425     hwctx->device           = device;
426     hwctx->get_proc_address = get_proc;
427     hwctx->flags            = flags;
428     hwctx->reset            = 1;
429     return 0;
430 }
431 
432 /* @}*/
433