• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2000,2001 Fabrice Bellard
3  * Copyright (c) 2006 Luca Abeni
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Video4Linux2 grab interface
25  *
26  * Part of this file is based on the V4L2 video capture example
27  * (http://linuxtv.org/downloads/v4l-dvb-apis/capture-example.html)
28  *
29  * Thanks to Michael Niedermayer for providing the mapping between
30  * V4L2_PIX_FMT_* and AV_PIX_FMT_*
31  */
32 
33 #include <stdatomic.h>
34 
35 #include "libavutil/avassert.h"
36 #include "libavutil/avstring.h"
37 #include "libavutil/imgutils.h"
38 #include "libavutil/parseutils.h"
39 #include "libavutil/pixdesc.h"
40 #include "libavutil/time.h"
41 #include "libavcodec/codec_desc.h"
42 #include "libavformat/demux.h"
43 #include "libavformat/internal.h"
44 #include "avdevice.h"
45 #include "timefilter.h"
46 #include "v4l2-common.h"
47 #include <dirent.h>
48 
49 #if CONFIG_LIBV4L2
50 #include <libv4l2.h>
51 #endif
52 
53 static const int desired_video_buffers = 256;
54 
55 #define V4L_ALLFORMATS  3
56 #define V4L_RAWFORMATS  1
57 #define V4L_COMPFORMATS 2
58 
59 /**
60  * Return timestamps to the user exactly as returned by the kernel
61  */
62 #define V4L_TS_DEFAULT  0
63 /**
64  * Autodetect the kind of timestamps returned by the kernel and convert to
65  * absolute (wall clock) timestamps.
66  */
67 #define V4L_TS_ABS      1
68 /**
69  * Assume kernel timestamps are from the monotonic clock and convert to
70  * absolute timestamps.
71  */
72 #define V4L_TS_MONO2ABS 2
73 
74 /**
75  * Once the kind of timestamps returned by the kernel have been detected,
76  * the value of the timefilter (NULL or not) determines whether a conversion
77  * takes place.
78  */
79 #define V4L_TS_CONVERT_READY V4L_TS_DEFAULT
80 
81 struct video_data {
82     AVClass *class;
83     int fd;
84     int pixelformat; /* V4L2_PIX_FMT_* */
85     int width, height;
86     int frame_size;
87     int interlaced;
88     int top_field_first;
89     int ts_mode;
90     TimeFilter *timefilter;
91     int64_t last_time_m;
92 
93     int buffers;
94     atomic_int buffers_queued;
95     void **buf_start;
96     unsigned int *buf_len;
97     char *standard;
98     v4l2_std_id std_id;
99     int channel;
100     char *pixel_format; /**< Set by a private option. */
101     int list_format;    /**< Set by a private option. */
102     int list_standard;  /**< Set by a private option. */
103     char *framerate;    /**< Set by a private option. */
104 
105     int use_libv4l2;
106     int (*open_f)(const char *file, int oflag, ...);
107     int (*close_f)(int fd);
108     int (*dup_f)(int fd);
109 #ifdef __GLIBC__
110     int (*ioctl_f)(int fd, unsigned long int request, ...);
111 #else
112     int (*ioctl_f)(int fd, int request, ...);
113 #endif
114     ssize_t (*read_f)(int fd, void *buffer, size_t n);
115     void *(*mmap_f)(void *start, size_t length, int prot, int flags, int fd, int64_t offset);
116     int (*munmap_f)(void *_start, size_t length);
117 };
118 
119 struct buff_data {
120     struct video_data *s;
121     int index;
122 };
123 
device_open(AVFormatContext * ctx,const char * device_path)124 static int device_open(AVFormatContext *ctx, const char* device_path)
125 {
126     struct video_data *s = ctx->priv_data;
127     struct v4l2_capability cap;
128     int fd;
129     int err;
130     int flags = O_RDWR;
131 
132 #define SET_WRAPPERS(prefix) do {       \
133     s->open_f   = prefix ## open;       \
134     s->close_f  = prefix ## close;      \
135     s->dup_f    = prefix ## dup;        \
136     s->ioctl_f  = prefix ## ioctl;      \
137     s->read_f   = prefix ## read;       \
138     s->mmap_f   = prefix ## mmap;       \
139     s->munmap_f = prefix ## munmap;     \
140 } while (0)
141 
142     if (s->use_libv4l2) {
143 #if CONFIG_LIBV4L2
144         SET_WRAPPERS(v4l2_);
145 #else
146         av_log(ctx, AV_LOG_ERROR, "libavdevice is not built with libv4l2 support.\n");
147         return AVERROR(EINVAL);
148 #endif
149     } else {
150         SET_WRAPPERS();
151     }
152 
153 #define v4l2_open   s->open_f
154 #define v4l2_close  s->close_f
155 #define v4l2_dup    s->dup_f
156 #define v4l2_ioctl  s->ioctl_f
157 #define v4l2_read   s->read_f
158 #define v4l2_mmap   s->mmap_f
159 #define v4l2_munmap s->munmap_f
160 
161     if (ctx->flags & AVFMT_FLAG_NONBLOCK) {
162         flags |= O_NONBLOCK;
163     }
164 
165     fd = v4l2_open(device_path, flags, 0);
166     if (fd < 0) {
167         err = AVERROR(errno);
168         av_log(ctx, AV_LOG_ERROR, "Cannot open video device %s: %s\n",
169                device_path, av_err2str(err));
170         return err;
171     }
172 
173     if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
174         err = AVERROR(errno);
175         av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n",
176                av_err2str(err));
177         goto fail;
178     }
179 
180     av_log(ctx, AV_LOG_VERBOSE, "fd:%d capabilities:%x\n",
181            fd, cap.capabilities);
182 
183     if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
184         av_log(ctx, AV_LOG_ERROR, "Not a video capture device.\n");
185         err = AVERROR(ENODEV);
186         goto fail;
187     }
188 
189     if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
190         av_log(ctx, AV_LOG_ERROR,
191                "The device does not support the streaming I/O method.\n");
192         err = AVERROR(ENOSYS);
193         goto fail;
194     }
195 
196     return fd;
197 
198 fail:
199     v4l2_close(fd);
200     return err;
201 }
202 
device_init(AVFormatContext * ctx,int * width,int * height,uint32_t pixelformat)203 static int device_init(AVFormatContext *ctx, int *width, int *height,
204                        uint32_t pixelformat)
205 {
206     struct video_data *s = ctx->priv_data;
207     struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
208     int res = 0;
209 
210     fmt.fmt.pix.width = *width;
211     fmt.fmt.pix.height = *height;
212     fmt.fmt.pix.pixelformat = pixelformat;
213     fmt.fmt.pix.field = V4L2_FIELD_ANY;
214 
215     /* Some drivers will fail and return EINVAL when the pixelformat
216        is not supported (even if type field is valid and supported) */
217     if (v4l2_ioctl(s->fd, VIDIOC_S_FMT, &fmt) < 0)
218         res = AVERROR(errno);
219 
220     if ((*width != fmt.fmt.pix.width) || (*height != fmt.fmt.pix.height)) {
221         av_log(ctx, AV_LOG_INFO,
222                "The V4L2 driver changed the video from %dx%d to %dx%d\n",
223                *width, *height, fmt.fmt.pix.width, fmt.fmt.pix.height);
224         *width = fmt.fmt.pix.width;
225         *height = fmt.fmt.pix.height;
226     }
227 
228     if (pixelformat != fmt.fmt.pix.pixelformat) {
229         av_log(ctx, AV_LOG_DEBUG,
230                "The V4L2 driver changed the pixel format "
231                "from 0x%08X to 0x%08X\n",
232                pixelformat, fmt.fmt.pix.pixelformat);
233         res = AVERROR(EINVAL);
234     }
235 
236     if (fmt.fmt.pix.field == V4L2_FIELD_INTERLACED) {
237         av_log(ctx, AV_LOG_DEBUG,
238                "The V4L2 driver is using the interlaced mode\n");
239         s->interlaced = 1;
240     }
241 
242     return res;
243 }
244 
first_field(const struct video_data * s)245 static int first_field(const struct video_data *s)
246 {
247     int res;
248     v4l2_std_id std;
249 
250     res = v4l2_ioctl(s->fd, VIDIOC_G_STD, &std);
251     if (res < 0)
252         return 0;
253     if (std & V4L2_STD_NTSC)
254         return 0;
255 
256     return 1;
257 }
258 
259 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
list_framesizes(AVFormatContext * ctx,uint32_t pixelformat)260 static void list_framesizes(AVFormatContext *ctx, uint32_t pixelformat)
261 {
262     const struct video_data *s = ctx->priv_data;
263     struct v4l2_frmsizeenum vfse = { .pixel_format = pixelformat };
264 
265     while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FRAMESIZES, &vfse)) {
266         switch (vfse.type) {
267         case V4L2_FRMSIZE_TYPE_DISCRETE:
268             av_log(ctx, AV_LOG_INFO, " %ux%u",
269                    vfse.discrete.width, vfse.discrete.height);
270         break;
271         case V4L2_FRMSIZE_TYPE_CONTINUOUS:
272         case V4L2_FRMSIZE_TYPE_STEPWISE:
273             av_log(ctx, AV_LOG_INFO, " {%u-%u, %u}x{%u-%u, %u}",
274                    vfse.stepwise.min_width,
275                    vfse.stepwise.max_width,
276                    vfse.stepwise.step_width,
277                    vfse.stepwise.min_height,
278                    vfse.stepwise.max_height,
279                    vfse.stepwise.step_height);
280         }
281         vfse.index++;
282     }
283 }
284 #endif
285 
list_formats(AVFormatContext * ctx,int type)286 static void list_formats(AVFormatContext *ctx, int type)
287 {
288     const struct video_data *s = ctx->priv_data;
289     struct v4l2_fmtdesc vfd = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
290 
291     while(!v4l2_ioctl(s->fd, VIDIOC_ENUM_FMT, &vfd)) {
292         enum AVCodecID codec_id = ff_fmt_v4l2codec(vfd.pixelformat);
293         enum AVPixelFormat pix_fmt = ff_fmt_v4l2ff(vfd.pixelformat, codec_id);
294 
295         vfd.index++;
296 
297         if (!(vfd.flags & V4L2_FMT_FLAG_COMPRESSED) &&
298             type & V4L_RAWFORMATS) {
299             const char *fmt_name = av_get_pix_fmt_name(pix_fmt);
300             av_log(ctx, AV_LOG_INFO, "Raw       : %11s : %20s :",
301                    fmt_name ? fmt_name : "Unsupported",
302                    vfd.description);
303         } else if (vfd.flags & V4L2_FMT_FLAG_COMPRESSED &&
304                    type & V4L_COMPFORMATS) {
305             const AVCodecDescriptor *desc = avcodec_descriptor_get(codec_id);
306             av_log(ctx, AV_LOG_INFO, "Compressed: %11s : %20s :",
307                    desc ? desc->name : "Unsupported",
308                    vfd.description);
309         } else {
310             continue;
311         }
312 
313 #ifdef V4L2_FMT_FLAG_EMULATED
314         if (vfd.flags & V4L2_FMT_FLAG_EMULATED)
315             av_log(ctx, AV_LOG_INFO, " Emulated :");
316 #endif
317 #if HAVE_STRUCT_V4L2_FRMIVALENUM_DISCRETE
318         list_framesizes(ctx, vfd.pixelformat);
319 #endif
320         av_log(ctx, AV_LOG_INFO, "\n");
321     }
322 }
323 
list_standards(AVFormatContext * ctx)324 static void list_standards(AVFormatContext *ctx)
325 {
326     int ret;
327     struct video_data *s = ctx->priv_data;
328     struct v4l2_standard standard;
329 
330     if (s->std_id == 0)
331         return;
332 
333     for (standard.index = 0; ; standard.index++) {
334         if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
335             ret = AVERROR(errno);
336             if (ret == AVERROR(EINVAL)) {
337                 break;
338             } else {
339                 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
340                 return;
341             }
342         }
343         av_log(ctx, AV_LOG_INFO, "%2d, %16"PRIx64", %s\n",
344                standard.index, (uint64_t)standard.id, standard.name);
345     }
346 }
347 
mmap_init(AVFormatContext * ctx)348 static int mmap_init(AVFormatContext *ctx)
349 {
350     int i, res;
351     struct video_data *s = ctx->priv_data;
352     struct v4l2_requestbuffers req = {
353         .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
354         .count  = desired_video_buffers,
355         .memory = V4L2_MEMORY_MMAP
356     };
357 
358     if (v4l2_ioctl(s->fd, VIDIOC_REQBUFS, &req) < 0) {
359         res = AVERROR(errno);
360         av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_REQBUFS): %s\n", av_err2str(res));
361         return res;
362     }
363 
364     if (req.count < 2) {
365         av_log(ctx, AV_LOG_ERROR, "Insufficient buffer memory\n");
366         return AVERROR(ENOMEM);
367     }
368     s->buffers = req.count;
369     s->buf_start = av_malloc_array(s->buffers, sizeof(void *));
370     if (!s->buf_start) {
371         av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer pointers\n");
372         return AVERROR(ENOMEM);
373     }
374     s->buf_len = av_malloc_array(s->buffers, sizeof(unsigned int));
375     if (!s->buf_len) {
376         av_log(ctx, AV_LOG_ERROR, "Cannot allocate buffer sizes\n");
377         av_freep(&s->buf_start);
378         return AVERROR(ENOMEM);
379     }
380 
381     for (i = 0; i < req.count; i++) {
382         struct v4l2_buffer buf = {
383             .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
384             .index  = i,
385             .memory = V4L2_MEMORY_MMAP
386         };
387         if (v4l2_ioctl(s->fd, VIDIOC_QUERYBUF, &buf) < 0) {
388             res = AVERROR(errno);
389             av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYBUF): %s\n", av_err2str(res));
390             return res;
391         }
392 
393         s->buf_len[i] = buf.length;
394         if (s->frame_size > 0 && s->buf_len[i] < s->frame_size) {
395             av_log(ctx, AV_LOG_ERROR,
396                    "buf_len[%d] = %d < expected frame size %d\n",
397                    i, s->buf_len[i], s->frame_size);
398             return AVERROR(ENOMEM);
399         }
400         s->buf_start[i] = v4l2_mmap(NULL, buf.length,
401                                PROT_READ | PROT_WRITE, MAP_SHARED,
402                                s->fd, buf.m.offset);
403 
404         if (s->buf_start[i] == MAP_FAILED) {
405             res = AVERROR(errno);
406             av_log(ctx, AV_LOG_ERROR, "mmap: %s\n", av_err2str(res));
407             return res;
408         }
409     }
410 
411     return 0;
412 }
413 
enqueue_buffer(struct video_data * s,struct v4l2_buffer * buf)414 static int enqueue_buffer(struct video_data *s, struct v4l2_buffer *buf)
415 {
416     int res = 0;
417 
418     if (v4l2_ioctl(s->fd, VIDIOC_QBUF, buf) < 0) {
419         res = AVERROR(errno);
420         av_log(NULL, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n", av_err2str(res));
421     } else {
422         atomic_fetch_add(&s->buffers_queued, 1);
423     }
424 
425     return res;
426 }
427 
mmap_release_buffer(void * opaque,uint8_t * data)428 static void mmap_release_buffer(void *opaque, uint8_t *data)
429 {
430     struct v4l2_buffer buf = { 0 };
431     struct buff_data *buf_descriptor = opaque;
432     struct video_data *s = buf_descriptor->s;
433 
434     buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
435     buf.memory = V4L2_MEMORY_MMAP;
436     buf.index = buf_descriptor->index;
437     av_free(buf_descriptor);
438 
439     enqueue_buffer(s, &buf);
440 }
441 
442 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
av_gettime_monotonic(void)443 static int64_t av_gettime_monotonic(void)
444 {
445     return av_gettime_relative();
446 }
447 #endif
448 
init_convert_timestamp(AVFormatContext * ctx,int64_t ts)449 static int init_convert_timestamp(AVFormatContext *ctx, int64_t ts)
450 {
451     struct video_data *s = ctx->priv_data;
452     int64_t now;
453 
454     now = av_gettime();
455     if (s->ts_mode == V4L_TS_ABS &&
456         ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE) {
457         av_log(ctx, AV_LOG_INFO, "Detected absolute timestamps\n");
458         s->ts_mode = V4L_TS_CONVERT_READY;
459         return 0;
460     }
461 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
462     if (ctx->streams[0]->avg_frame_rate.num) {
463         now = av_gettime_monotonic();
464         if (s->ts_mode == V4L_TS_MONO2ABS ||
465             (ts <= now + 1 * AV_TIME_BASE && ts >= now - 10 * AV_TIME_BASE)) {
466             AVRational tb = {AV_TIME_BASE, 1};
467             int64_t period = av_rescale_q(1, tb, ctx->streams[0]->avg_frame_rate);
468             av_log(ctx, AV_LOG_INFO, "Detected monotonic timestamps, converting\n");
469             /* microseconds instead of seconds, MHz instead of Hz */
470             s->timefilter = ff_timefilter_new(1, period, 1.0E-6);
471             if (!s->timefilter)
472                 return AVERROR(ENOMEM);
473             s->ts_mode = V4L_TS_CONVERT_READY;
474             return 0;
475         }
476     }
477 #endif
478     av_log(ctx, AV_LOG_ERROR, "Unknown timestamps\n");
479     return AVERROR(EIO);
480 }
481 
convert_timestamp(AVFormatContext * ctx,int64_t * ts)482 static int convert_timestamp(AVFormatContext *ctx, int64_t *ts)
483 {
484     struct video_data *s = ctx->priv_data;
485 
486     if (s->ts_mode) {
487         int r = init_convert_timestamp(ctx, *ts);
488         if (r < 0)
489             return r;
490     }
491 #if HAVE_CLOCK_GETTIME && defined(CLOCK_MONOTONIC)
492     if (s->timefilter) {
493         int64_t nowa = av_gettime();
494         int64_t nowm = av_gettime_monotonic();
495         ff_timefilter_update(s->timefilter, nowa, nowm - s->last_time_m);
496         s->last_time_m = nowm;
497         *ts = ff_timefilter_eval(s->timefilter, *ts - nowm);
498     }
499 #endif
500     return 0;
501 }
502 
mmap_read_frame(AVFormatContext * ctx,AVPacket * pkt)503 static int mmap_read_frame(AVFormatContext *ctx, AVPacket *pkt)
504 {
505     struct video_data *s = ctx->priv_data;
506     struct v4l2_buffer buf = {
507         .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
508         .memory = V4L2_MEMORY_MMAP
509     };
510     struct timeval buf_ts;
511     int res;
512 
513     pkt->size = 0;
514 
515     /* FIXME: Some special treatment might be needed in case of loss of signal... */
516     while ((res = v4l2_ioctl(s->fd, VIDIOC_DQBUF, &buf)) < 0 && (errno == EINTR));
517     if (res < 0) {
518         if (errno == EAGAIN)
519             return AVERROR(EAGAIN);
520 
521         res = AVERROR(errno);
522         av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_DQBUF): %s\n",
523                av_err2str(res));
524         return res;
525     }
526 
527     buf_ts = buf.timestamp;
528 
529     if (buf.index >= s->buffers) {
530         av_log(ctx, AV_LOG_ERROR, "Invalid buffer index received.\n");
531         return AVERROR(EINVAL);
532     }
533     atomic_fetch_add(&s->buffers_queued, -1);
534     // always keep at least one buffer queued
535     av_assert0(atomic_load(&s->buffers_queued) >= 1);
536 
537 #ifdef V4L2_BUF_FLAG_ERROR
538     if (buf.flags & V4L2_BUF_FLAG_ERROR) {
539         av_log(ctx, AV_LOG_WARNING,
540                "Dequeued v4l2 buffer contains corrupted data (%d bytes).\n",
541                buf.bytesused);
542         buf.bytesused = 0;
543     } else
544 #endif
545     {
546         /* CPIA is a compressed format and we don't know the exact number of bytes
547          * used by a frame, so set it here as the driver announces it. */
548         if (ctx->video_codec_id == AV_CODEC_ID_CPIA)
549             s->frame_size = buf.bytesused;
550 
551         if (s->frame_size > 0 && buf.bytesused != s->frame_size) {
552             av_log(ctx, AV_LOG_WARNING,
553                    "Dequeued v4l2 buffer contains %d bytes, but %d were expected. Flags: 0x%08X.\n",
554                    buf.bytesused, s->frame_size, buf.flags);
555             buf.bytesused = 0;
556         }
557     }
558 
559     /* Image is at s->buff_start[buf.index] */
560     if (atomic_load(&s->buffers_queued) == FFMAX(s->buffers / 8, 1)) {
561         /* when we start getting low on queued buffers, fall back on copying data */
562         res = av_new_packet(pkt, buf.bytesused);
563         if (res < 0) {
564             av_log(ctx, AV_LOG_ERROR, "Error allocating a packet.\n");
565             enqueue_buffer(s, &buf);
566             return res;
567         }
568         memcpy(pkt->data, s->buf_start[buf.index], buf.bytesused);
569 
570         res = enqueue_buffer(s, &buf);
571         if (res) {
572             av_packet_unref(pkt);
573             return res;
574         }
575     } else {
576         struct buff_data *buf_descriptor;
577 
578         pkt->data     = s->buf_start[buf.index];
579         pkt->size     = buf.bytesused;
580 
581         buf_descriptor = av_malloc(sizeof(struct buff_data));
582         if (!buf_descriptor) {
583             /* Something went wrong... Since av_malloc() failed, we cannot even
584              * allocate a buffer for memcpying into it
585              */
586             av_log(ctx, AV_LOG_ERROR, "Failed to allocate a buffer descriptor\n");
587             enqueue_buffer(s, &buf);
588 
589             return AVERROR(ENOMEM);
590         }
591         buf_descriptor->index = buf.index;
592         buf_descriptor->s     = s;
593 
594         pkt->buf = av_buffer_create(pkt->data, pkt->size, mmap_release_buffer,
595                                     buf_descriptor, 0);
596         if (!pkt->buf) {
597             av_log(ctx, AV_LOG_ERROR, "Failed to create a buffer\n");
598             enqueue_buffer(s, &buf);
599             av_freep(&buf_descriptor);
600             return AVERROR(ENOMEM);
601         }
602     }
603     pkt->pts = buf_ts.tv_sec * INT64_C(1000000) + buf_ts.tv_usec;
604     convert_timestamp(ctx, &pkt->pts);
605 
606     return pkt->size;
607 }
608 
mmap_start(AVFormatContext * ctx)609 static int mmap_start(AVFormatContext *ctx)
610 {
611     struct video_data *s = ctx->priv_data;
612     enum v4l2_buf_type type;
613     int i, res;
614 
615     for (i = 0; i < s->buffers; i++) {
616         struct v4l2_buffer buf = {
617             .type   = V4L2_BUF_TYPE_VIDEO_CAPTURE,
618             .index  = i,
619             .memory = V4L2_MEMORY_MMAP
620         };
621 
622         if (v4l2_ioctl(s->fd, VIDIOC_QBUF, &buf) < 0) {
623             res = AVERROR(errno);
624             av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QBUF): %s\n",
625                    av_err2str(res));
626             return res;
627         }
628     }
629     atomic_store(&s->buffers_queued, s->buffers);
630 
631     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
632     if (v4l2_ioctl(s->fd, VIDIOC_STREAMON, &type) < 0) {
633         res = AVERROR(errno);
634         av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_STREAMON): %s\n",
635                av_err2str(res));
636         return res;
637     }
638 
639     return 0;
640 }
641 
mmap_close(struct video_data * s)642 static void mmap_close(struct video_data *s)
643 {
644     enum v4l2_buf_type type;
645     int i;
646 
647     type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
648     /* We do not check for the result, because we could
649      * not do anything about it anyway...
650      */
651     v4l2_ioctl(s->fd, VIDIOC_STREAMOFF, &type);
652     for (i = 0; i < s->buffers; i++) {
653         v4l2_munmap(s->buf_start[i], s->buf_len[i]);
654     }
655     av_freep(&s->buf_start);
656     av_freep(&s->buf_len);
657 }
658 
v4l2_set_parameters(AVFormatContext * ctx)659 static int v4l2_set_parameters(AVFormatContext *ctx)
660 {
661     struct video_data *s = ctx->priv_data;
662     struct v4l2_standard standard = { 0 };
663     struct v4l2_streamparm streamparm = { 0 };
664     struct v4l2_fract *tpf;
665     AVRational framerate_q = { 0 };
666     int i, ret;
667 
668     if (s->framerate &&
669         (ret = av_parse_video_rate(&framerate_q, s->framerate)) < 0) {
670         av_log(ctx, AV_LOG_ERROR, "Could not parse framerate '%s'.\n",
671                s->framerate);
672         return ret;
673     }
674 
675     if (s->standard) {
676         if (s->std_id) {
677             ret = 0;
678             av_log(ctx, AV_LOG_DEBUG, "Setting standard: %s\n", s->standard);
679             /* set tv standard */
680             for (i = 0; ; i++) {
681                 standard.index = i;
682                 if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
683                     ret = AVERROR(errno);
684                     break;
685                 }
686                 if (!av_strcasecmp(standard.name, s->standard))
687                     break;
688             }
689             if (ret < 0) {
690                 av_log(ctx, AV_LOG_ERROR, "Unknown or unsupported standard '%s'\n", s->standard);
691                 return ret;
692             }
693 
694             if (v4l2_ioctl(s->fd, VIDIOC_S_STD, &standard.id) < 0) {
695                 ret = AVERROR(errno);
696                 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_STD): %s\n", av_err2str(ret));
697                 return ret;
698             }
699         } else {
700             av_log(ctx, AV_LOG_WARNING,
701                    "This device does not support any standard\n");
702         }
703     }
704 
705     /* get standard */
706     if (v4l2_ioctl(s->fd, VIDIOC_G_STD, &s->std_id) == 0) {
707         tpf = &standard.frameperiod;
708         for (i = 0; ; i++) {
709             standard.index = i;
710             if (v4l2_ioctl(s->fd, VIDIOC_ENUMSTD, &standard) < 0) {
711                 ret = AVERROR(errno);
712                 if (ret == AVERROR(EINVAL)
713 #ifdef ENODATA
714                     || ret == AVERROR(ENODATA)
715 #endif
716                 ) {
717                     tpf = &streamparm.parm.capture.timeperframe;
718                     break;
719                 }
720                 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMSTD): %s\n", av_err2str(ret));
721                 return ret;
722             }
723             if (standard.id == s->std_id) {
724                 av_log(ctx, AV_LOG_DEBUG,
725                        "Current standard: %s, id: %"PRIx64", frameperiod: %d/%d\n",
726                        standard.name, (uint64_t)standard.id, tpf->numerator, tpf->denominator);
727                 break;
728             }
729         }
730     } else {
731         tpf = &streamparm.parm.capture.timeperframe;
732     }
733 
734     streamparm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
735     if (v4l2_ioctl(s->fd, VIDIOC_G_PARM, &streamparm) < 0) {
736         ret = AVERROR(errno);
737         av_log(ctx, AV_LOG_WARNING, "ioctl(VIDIOC_G_PARM): %s\n", av_err2str(ret));
738     } else if (framerate_q.num && framerate_q.den) {
739         if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) {
740             tpf = &streamparm.parm.capture.timeperframe;
741 
742             av_log(ctx, AV_LOG_DEBUG, "Setting time per frame to %d/%d\n",
743                    framerate_q.den, framerate_q.num);
744             tpf->numerator   = framerate_q.den;
745             tpf->denominator = framerate_q.num;
746 
747             if (v4l2_ioctl(s->fd, VIDIOC_S_PARM, &streamparm) < 0) {
748                 ret = AVERROR(errno);
749                 av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_PARM): %s\n",
750                        av_err2str(ret));
751                 return ret;
752             }
753 
754             if (framerate_q.num != tpf->denominator ||
755                 framerate_q.den != tpf->numerator) {
756                 av_log(ctx, AV_LOG_INFO,
757                        "The driver changed the time per frame from "
758                        "%d/%d to %d/%d\n",
759                        framerate_q.den, framerate_q.num,
760                        tpf->numerator, tpf->denominator);
761             }
762         } else {
763             av_log(ctx, AV_LOG_WARNING,
764                    "The driver does not permit changing the time per frame\n");
765         }
766     }
767     if (tpf->denominator > 0 && tpf->numerator > 0) {
768         ctx->streams[0]->avg_frame_rate.num = tpf->denominator;
769         ctx->streams[0]->avg_frame_rate.den = tpf->numerator;
770         ctx->streams[0]->r_frame_rate = ctx->streams[0]->avg_frame_rate;
771     } else
772         av_log(ctx, AV_LOG_WARNING, "Time per frame unknown\n");
773 
774     return 0;
775 }
776 
device_try_init(AVFormatContext * ctx,enum AVPixelFormat pix_fmt,int * width,int * height,uint32_t * desired_format,enum AVCodecID * codec_id)777 static int device_try_init(AVFormatContext *ctx,
778                            enum AVPixelFormat pix_fmt,
779                            int *width,
780                            int *height,
781                            uint32_t *desired_format,
782                            enum AVCodecID *codec_id)
783 {
784     int ret, i;
785 
786     *desired_format = ff_fmt_ff2v4l(pix_fmt, ctx->video_codec_id);
787 
788     if (*desired_format) {
789         ret = device_init(ctx, width, height, *desired_format);
790         if (ret < 0) {
791             *desired_format = 0;
792             if (ret != AVERROR(EINVAL))
793                 return ret;
794         }
795     }
796 
797     if (!*desired_format) {
798         for (i = 0; ff_fmt_conversion_table[i].codec_id != AV_CODEC_ID_NONE; i++) {
799             if (ctx->video_codec_id == AV_CODEC_ID_NONE ||
800                 ff_fmt_conversion_table[i].codec_id == ctx->video_codec_id) {
801                 av_log(ctx, AV_LOG_DEBUG, "Trying to set codec:%s pix_fmt:%s\n",
802                        avcodec_get_name(ff_fmt_conversion_table[i].codec_id),
803                        (char *)av_x_if_null(av_get_pix_fmt_name(ff_fmt_conversion_table[i].ff_fmt), "none"));
804 
805                 *desired_format = ff_fmt_conversion_table[i].v4l2_fmt;
806                 ret = device_init(ctx, width, height, *desired_format);
807                 if (ret >= 0)
808                     break;
809                 else if (ret != AVERROR(EINVAL))
810                     return ret;
811                 *desired_format = 0;
812             }
813         }
814 
815         if (*desired_format == 0) {
816             av_log(ctx, AV_LOG_ERROR, "Cannot find a proper format for "
817                    "codec '%s' (id %d), pixel format '%s' (id %d)\n",
818                    avcodec_get_name(ctx->video_codec_id), ctx->video_codec_id,
819                    (char *)av_x_if_null(av_get_pix_fmt_name(pix_fmt), "none"), pix_fmt);
820             ret = AVERROR(EINVAL);
821         }
822     }
823 
824     *codec_id = ff_fmt_v4l2codec(*desired_format);
825     if (*codec_id == AV_CODEC_ID_NONE)
826         av_assert0(ret == AVERROR(EINVAL));
827     return ret;
828 }
829 
v4l2_read_probe(const AVProbeData * p)830 static int v4l2_read_probe(const AVProbeData *p)
831 {
832     if (av_strstart(p->filename, "/dev/video", NULL))
833         return AVPROBE_SCORE_MAX - 1;
834     return 0;
835 }
836 
v4l2_read_header(AVFormatContext * ctx)837 static int v4l2_read_header(AVFormatContext *ctx)
838 {
839     struct video_data *s = ctx->priv_data;
840     AVStream *st;
841     int res = 0;
842     uint32_t desired_format;
843     enum AVCodecID codec_id = AV_CODEC_ID_NONE;
844     enum AVPixelFormat pix_fmt = AV_PIX_FMT_NONE;
845     struct v4l2_input input = { 0 };
846 
847     st = avformat_new_stream(ctx, NULL);
848     if (!st)
849         return AVERROR(ENOMEM);
850 
851 #if CONFIG_LIBV4L2
852     /* silence libv4l2 logging. if fopen() fails v4l2_log_file will be NULL
853        and errors will get sent to stderr */
854     if (s->use_libv4l2)
855         v4l2_log_file = fopen("/dev/null", "w");
856 #endif
857 
858     s->fd = device_open(ctx, ctx->url);
859     if (s->fd < 0)
860         return s->fd;
861 
862     if (s->channel != -1) {
863         /* set video input */
864         av_log(ctx, AV_LOG_DEBUG, "Selecting input_channel: %d\n", s->channel);
865         if (v4l2_ioctl(s->fd, VIDIOC_S_INPUT, &s->channel) < 0) {
866             res = AVERROR(errno);
867             av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_S_INPUT): %s\n", av_err2str(res));
868             goto fail;
869         }
870     } else {
871         /* get current video input */
872         if (v4l2_ioctl(s->fd, VIDIOC_G_INPUT, &s->channel) < 0) {
873             res = AVERROR(errno);
874             av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_INPUT): %s\n", av_err2str(res));
875             goto fail;
876         }
877     }
878 
879     /* enum input */
880     input.index = s->channel;
881     if (v4l2_ioctl(s->fd, VIDIOC_ENUMINPUT, &input) < 0) {
882         res = AVERROR(errno);
883         av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_ENUMINPUT): %s\n", av_err2str(res));
884         goto fail;
885     }
886     s->std_id = input.std;
887     av_log(ctx, AV_LOG_DEBUG, "Current input_channel: %d, input_name: %s, input_std: %"PRIx64"\n",
888            s->channel, input.name, (uint64_t)input.std);
889 
890     if (s->list_format) {
891         list_formats(ctx, s->list_format);
892         res = AVERROR_EXIT;
893         goto fail;
894     }
895 
896     if (s->list_standard) {
897         list_standards(ctx);
898         res = AVERROR_EXIT;
899         goto fail;
900     }
901 
902     avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
903 
904     if (s->pixel_format) {
905         const AVCodecDescriptor *desc = avcodec_descriptor_get_by_name(s->pixel_format);
906 
907         if (desc)
908             ctx->video_codec_id = desc->id;
909 
910         pix_fmt = av_get_pix_fmt(s->pixel_format);
911 
912         if (pix_fmt == AV_PIX_FMT_NONE && !desc) {
913             av_log(ctx, AV_LOG_ERROR, "No such input format: %s.\n",
914                    s->pixel_format);
915 
916             res = AVERROR(EINVAL);
917             goto fail;
918         }
919     }
920 
921     if (!s->width && !s->height) {
922         struct v4l2_format fmt = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE };
923 
924         av_log(ctx, AV_LOG_VERBOSE,
925                "Querying the device for the current frame size\n");
926         if (v4l2_ioctl(s->fd, VIDIOC_G_FMT, &fmt) < 0) {
927             res = AVERROR(errno);
928             av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_G_FMT): %s\n",
929                    av_err2str(res));
930             goto fail;
931         }
932 
933         s->width  = fmt.fmt.pix.width;
934         s->height = fmt.fmt.pix.height;
935         av_log(ctx, AV_LOG_VERBOSE,
936                "Setting frame size to %dx%d\n", s->width, s->height);
937     }
938 
939     res = device_try_init(ctx, pix_fmt, &s->width, &s->height, &desired_format, &codec_id);
940     if (res < 0)
941         goto fail;
942 
943     /* If no pixel_format was specified, the codec_id was not known up
944      * until now. Set video_codec_id in the context, as codec_id will
945      * not be available outside this function
946      */
947     if (codec_id != AV_CODEC_ID_NONE && ctx->video_codec_id == AV_CODEC_ID_NONE)
948         ctx->video_codec_id = codec_id;
949 
950     if ((res = av_image_check_size(s->width, s->height, 0, ctx)) < 0)
951         goto fail;
952 
953     s->pixelformat = desired_format;
954 
955     if ((res = v4l2_set_parameters(ctx)) < 0)
956         goto fail;
957 
958     st->codecpar->format = ff_fmt_v4l2ff(desired_format, codec_id);
959     if (st->codecpar->format != AV_PIX_FMT_NONE)
960         s->frame_size = av_image_get_buffer_size(st->codecpar->format,
961                                                  s->width, s->height, 1);
962 
963     if ((res = mmap_init(ctx)) ||
964         (res = mmap_start(ctx)) < 0)
965             goto fail;
966 
967     s->top_field_first = first_field(s);
968 
969     st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
970     st->codecpar->codec_id = codec_id;
971     if (codec_id == AV_CODEC_ID_RAWVIDEO)
972         st->codecpar->codec_tag =
973             avcodec_pix_fmt_to_codec_tag(st->codecpar->format);
974     else if (codec_id == AV_CODEC_ID_H264) {
975         avpriv_stream_set_need_parsing(st, AVSTREAM_PARSE_FULL_ONCE);
976     }
977     if (desired_format == V4L2_PIX_FMT_YVU420)
978         st->codecpar->codec_tag = MKTAG('Y', 'V', '1', '2');
979     else if (desired_format == V4L2_PIX_FMT_YVU410)
980         st->codecpar->codec_tag = MKTAG('Y', 'V', 'U', '9');
981     st->codecpar->width = s->width;
982     st->codecpar->height = s->height;
983     if (st->avg_frame_rate.den)
984         st->codecpar->bit_rate = s->frame_size * av_q2d(st->avg_frame_rate) * 8;
985 
986     return 0;
987 
988 fail:
989     v4l2_close(s->fd);
990     return res;
991 }
992 
v4l2_read_packet(AVFormatContext * ctx,AVPacket * pkt)993 static int v4l2_read_packet(AVFormatContext *ctx, AVPacket *pkt)
994 {
995     int res;
996 
997     if ((res = mmap_read_frame(ctx, pkt)) < 0) {
998         return res;
999     }
1000 
1001     return pkt->size;
1002 }
1003 
v4l2_read_close(AVFormatContext * ctx)1004 static int v4l2_read_close(AVFormatContext *ctx)
1005 {
1006     struct video_data *s = ctx->priv_data;
1007 
1008     if (atomic_load(&s->buffers_queued) != s->buffers)
1009         av_log(ctx, AV_LOG_WARNING, "Some buffers are still owned by the caller on "
1010                "close.\n");
1011 
1012     mmap_close(s);
1013 
1014     v4l2_close(s->fd);
1015     return 0;
1016 }
1017 
v4l2_is_v4l_dev(const char * name)1018 static int v4l2_is_v4l_dev(const char *name)
1019 {
1020     return !strncmp(name, "video", 5) ||
1021            !strncmp(name, "radio", 5) ||
1022            !strncmp(name, "vbi", 3) ||
1023            !strncmp(name, "v4l-subdev", 10);
1024 }
1025 
v4l2_get_device_list(AVFormatContext * ctx,AVDeviceInfoList * device_list)1026 static int v4l2_get_device_list(AVFormatContext *ctx, AVDeviceInfoList *device_list)
1027 {
1028     struct video_data *s = ctx->priv_data;
1029     DIR *dir;
1030     struct dirent *entry;
1031     int ret = 0;
1032 
1033     if (!device_list)
1034         return AVERROR(EINVAL);
1035 
1036     dir = opendir("/dev");
1037     if (!dir) {
1038         ret = AVERROR(errno);
1039         av_log(ctx, AV_LOG_ERROR, "Couldn't open the directory: %s\n", av_err2str(ret));
1040         return ret;
1041     }
1042     while ((entry = readdir(dir))) {
1043         AVDeviceInfo *device = NULL;
1044         struct v4l2_capability cap;
1045         int fd = -1, size;
1046         char device_name[256];
1047 
1048         if (!v4l2_is_v4l_dev(entry->d_name))
1049             continue;
1050 
1051         size = snprintf(device_name, sizeof(device_name), "/dev/%s", entry->d_name);
1052         if (size >= sizeof(device_name)) {
1053             av_log(ctx, AV_LOG_ERROR, "Device name too long.\n");
1054             ret = AVERROR(ENOSYS);
1055             break;
1056         }
1057 
1058         if ((fd = device_open(ctx, device_name)) < 0)
1059             continue;
1060 
1061         if (v4l2_ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
1062             ret = AVERROR(errno);
1063             av_log(ctx, AV_LOG_ERROR, "ioctl(VIDIOC_QUERYCAP): %s\n", av_err2str(ret));
1064             goto fail;
1065         }
1066 
1067         device = av_mallocz(sizeof(AVDeviceInfo));
1068         if (!device) {
1069             ret = AVERROR(ENOMEM);
1070             goto fail;
1071         }
1072         device->device_name = av_strdup(device_name);
1073         device->device_description = av_strdup(cap.card);
1074         if (!device->device_name || !device->device_description) {
1075             ret = AVERROR(ENOMEM);
1076             goto fail;
1077         }
1078 
1079         if ((ret = av_dynarray_add_nofree(&device_list->devices,
1080                                           &device_list->nb_devices, device)) < 0)
1081             goto fail;
1082 
1083         v4l2_close(fd);
1084         continue;
1085 
1086       fail:
1087         if (device) {
1088             av_freep(&device->device_name);
1089             av_freep(&device->device_description);
1090             av_freep(&device);
1091         }
1092         v4l2_close(fd);
1093         break;
1094     }
1095     closedir(dir);
1096     return ret;
1097 }
1098 
1099 #define OFFSET(x) offsetof(struct video_data, x)
1100 #define DEC AV_OPT_FLAG_DECODING_PARAM
1101 
1102 static const AVOption options[] = {
1103     { "standard",     "set TV standard, used only by analog frame grabber",       OFFSET(standard),     AV_OPT_TYPE_STRING, {.str = NULL }, 0, 0,       DEC },
1104     { "channel",      "set TV channel, used only by frame grabber",               OFFSET(channel),      AV_OPT_TYPE_INT,    {.i64 = -1 },  -1, INT_MAX, DEC },
1105     { "video_size",   "set frame size",                                           OFFSET(width),        AV_OPT_TYPE_IMAGE_SIZE, {.str = NULL},  0, 0,   DEC },
1106     { "pixel_format", "set preferred pixel format",                               OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL},  0, 0,       DEC },
1107     { "input_format", "set preferred pixel format (for raw video) or codec name", OFFSET(pixel_format), AV_OPT_TYPE_STRING, {.str = NULL},  0, 0,       DEC },
1108     { "framerate",    "set frame rate",                                           OFFSET(framerate),    AV_OPT_TYPE_STRING, {.str = NULL},  0, 0,       DEC },
1109 
1110     { "list_formats", "list available formats and exit",                          OFFSET(list_format),  AV_OPT_TYPE_INT,    {.i64 = 0 },  0, INT_MAX, DEC, "list_formats" },
1111     { "all",          "show all available formats",                               OFFSET(list_format),  AV_OPT_TYPE_CONST,  {.i64 = V4L_ALLFORMATS  },    0, INT_MAX, DEC, "list_formats" },
1112     { "raw",          "show only non-compressed formats",                         OFFSET(list_format),  AV_OPT_TYPE_CONST,  {.i64 = V4L_RAWFORMATS  },    0, INT_MAX, DEC, "list_formats" },
1113     { "compressed",   "show only compressed formats",                             OFFSET(list_format),  AV_OPT_TYPE_CONST,  {.i64 = V4L_COMPFORMATS },    0, INT_MAX, DEC, "list_formats" },
1114 
1115     { "list_standards", "list supported standards and exit",                      OFFSET(list_standard), AV_OPT_TYPE_INT,   {.i64 = 0 },  0, 1, DEC, "list_standards" },
1116     { "all",            "show all supported standards",                           OFFSET(list_standard), AV_OPT_TYPE_CONST, {.i64 = 1 },  0, 0, DEC, "list_standards" },
1117 
1118     { "timestamps",   "set type of timestamps for grabbed frames",                OFFSET(ts_mode),      AV_OPT_TYPE_INT,    {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1119     { "ts",           "set type of timestamps for grabbed frames",                OFFSET(ts_mode),      AV_OPT_TYPE_INT,    {.i64 = 0 }, 0, 2, DEC, "timestamps" },
1120     { "default",      "use timestamps from the kernel",                           OFFSET(ts_mode),      AV_OPT_TYPE_CONST,  {.i64 = V4L_TS_DEFAULT  }, 0, 2, DEC, "timestamps" },
1121     { "abs",          "use absolute timestamps (wall clock)",                     OFFSET(ts_mode),      AV_OPT_TYPE_CONST,  {.i64 = V4L_TS_ABS      }, 0, 2, DEC, "timestamps" },
1122     { "mono2abs",     "force conversion from monotonic to absolute timestamps",   OFFSET(ts_mode),      AV_OPT_TYPE_CONST,  {.i64 = V4L_TS_MONO2ABS }, 0, 2, DEC, "timestamps" },
1123     { "use_libv4l2",  "use libv4l2 (v4l-utils) conversion functions",             OFFSET(use_libv4l2),  AV_OPT_TYPE_BOOL,   {.i64 = 0}, 0, 1, DEC },
1124     { NULL },
1125 };
1126 
1127 static const AVClass v4l2_class = {
1128     .class_name = "V4L2 indev",
1129     .item_name  = av_default_item_name,
1130     .option     = options,
1131     .version    = LIBAVUTIL_VERSION_INT,
1132     .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
1133 };
1134 
1135 const AVInputFormat ff_v4l2_demuxer = {
1136     .name           = "video4linux2,v4l2",
1137     .long_name      = NULL_IF_CONFIG_SMALL("Video4Linux2 device grab"),
1138     .priv_data_size = sizeof(struct video_data),
1139     .read_probe     = v4l2_read_probe,
1140     .read_header    = v4l2_read_header,
1141     .read_packet    = v4l2_read_packet,
1142     .read_close     = v4l2_read_close,
1143     .get_device_list = v4l2_get_device_list,
1144     .flags          = AVFMT_NOFILE,
1145     .priv_class     = &v4l2_class,
1146 };
1147