1 /* GStreamer
2 *
3 * Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
4 * 2006 Edgard Lima <edgard.lima@gmail.com>
5 *
6 * gstv4l2object.c: base class for V4L2 elements
7 *
8 * This library is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU Library General Public License as published
10 * by the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version. This library is distributed in the hope
12 * that it will be useful, but WITHOUT ANY WARRANTY; without even the
13 * implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
14 * PURPOSE. See the GNU Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the Free Software
17 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301,
18 * USA.
19 */
20
21 #ifdef HAVE_CONFIG_H
22 #include <config.h>
23 #endif
24
25 #include <sys/stat.h>
26 #include <fcntl.h>
27 #include <errno.h>
28 #include <string.h>
29 #include <sys/mman.h>
30 #include <sys/ioctl.h>
31
32
33 #ifdef HAVE_GUDEV
34 #include <gudev/gudev.h>
35 #endif
36
37 #include "ext/videodev2.h"
38 #include "gstv4l2object.h"
39 #include "gstv4l2tuner.h"
40 #include "gstv4l2colorbalance.h"
41
42 #include "gst/gst-i18n-plugin.h"
43
44 #include <gst/video/video.h>
45 #include <gst/allocators/gstdmabuf.h>
46
47 GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
48 #define GST_CAT_DEFAULT v4l2_debug
49
50 #define DEFAULT_PROP_DEVICE_NAME NULL
51 #define DEFAULT_PROP_DEVICE_FD -1
52 #define DEFAULT_PROP_FLAGS 0
53 #define DEFAULT_PROP_TV_NORM 0
54 #define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
55
56 #define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
57
58 enum
59 {
60 PROP_0,
61 V4L2_STD_OBJECT_PROPS,
62 };
63
64 /*
65 * common format / caps utilities:
66 */
67 typedef enum
68 {
69 GST_V4L2_RAW = 1 << 0,
70 GST_V4L2_CODEC = 1 << 1,
71 GST_V4L2_TRANSPORT = 1 << 2,
72 GST_V4L2_NO_PARSE = 1 << 3,
73 GST_V4L2_ALL = 0xffff
74 } GstV4L2FormatFlags;
75
76 typedef struct
77 {
78 guint32 format;
79 gboolean dimensions;
80 GstV4L2FormatFlags flags;
81 } GstV4L2FormatDesc;
82
83 static const GstV4L2FormatDesc gst_v4l2_formats[] = {
84 /* RGB formats */
85 {V4L2_PIX_FMT_RGB332, TRUE, GST_V4L2_RAW},
86 {V4L2_PIX_FMT_ARGB555, TRUE, GST_V4L2_RAW},
87 {V4L2_PIX_FMT_XRGB555, TRUE, GST_V4L2_RAW},
88 {V4L2_PIX_FMT_ARGB555X, TRUE, GST_V4L2_RAW},
89 {V4L2_PIX_FMT_XRGB555X, TRUE, GST_V4L2_RAW},
90 {V4L2_PIX_FMT_RGB565, TRUE, GST_V4L2_RAW},
91 {V4L2_PIX_FMT_RGB565X, TRUE, GST_V4L2_RAW},
92 {V4L2_PIX_FMT_BGR666, TRUE, GST_V4L2_RAW},
93 {V4L2_PIX_FMT_BGR24, TRUE, GST_V4L2_RAW},
94 {V4L2_PIX_FMT_RGB24, TRUE, GST_V4L2_RAW},
95 {V4L2_PIX_FMT_ABGR32, TRUE, GST_V4L2_RAW},
96 {V4L2_PIX_FMT_XBGR32, TRUE, GST_V4L2_RAW},
97 {V4L2_PIX_FMT_BGRA32, TRUE, GST_V4L2_RAW},
98 {V4L2_PIX_FMT_BGRX32, TRUE, GST_V4L2_RAW},
99 {V4L2_PIX_FMT_RGBA32, TRUE, GST_V4L2_RAW},
100 {V4L2_PIX_FMT_RGBX32, TRUE, GST_V4L2_RAW},
101 {V4L2_PIX_FMT_ARGB32, TRUE, GST_V4L2_RAW},
102 {V4L2_PIX_FMT_XRGB32, TRUE, GST_V4L2_RAW},
103
104 /* Deprecated Packed RGB Image Formats (alpha ambiguity) */
105 {V4L2_PIX_FMT_RGB444, TRUE, GST_V4L2_RAW},
106 {V4L2_PIX_FMT_RGB555, TRUE, GST_V4L2_RAW},
107 {V4L2_PIX_FMT_RGB555X, TRUE, GST_V4L2_RAW},
108 {V4L2_PIX_FMT_BGR32, TRUE, GST_V4L2_RAW},
109 {V4L2_PIX_FMT_RGB32, TRUE, GST_V4L2_RAW},
110
111 /* Grey formats */
112 {V4L2_PIX_FMT_GREY, TRUE, GST_V4L2_RAW},
113 {V4L2_PIX_FMT_Y4, TRUE, GST_V4L2_RAW},
114 {V4L2_PIX_FMT_Y6, TRUE, GST_V4L2_RAW},
115 {V4L2_PIX_FMT_Y10, TRUE, GST_V4L2_RAW},
116 {V4L2_PIX_FMT_Y12, TRUE, GST_V4L2_RAW},
117 {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
118 {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
119 {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
120
121 /* Palette formats */
122 {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
123
124 /* Chrominance formats */
125 {V4L2_PIX_FMT_UV8, TRUE, GST_V4L2_RAW},
126
127 /* Luminance+Chrominance formats */
128 {V4L2_PIX_FMT_YVU410, TRUE, GST_V4L2_RAW},
129 {V4L2_PIX_FMT_YVU420, TRUE, GST_V4L2_RAW},
130 {V4L2_PIX_FMT_YVU420M, TRUE, GST_V4L2_RAW},
131 {V4L2_PIX_FMT_YUYV, TRUE, GST_V4L2_RAW},
132 {V4L2_PIX_FMT_YYUV, TRUE, GST_V4L2_RAW},
133 {V4L2_PIX_FMT_YVYU, TRUE, GST_V4L2_RAW},
134 {V4L2_PIX_FMT_UYVY, TRUE, GST_V4L2_RAW},
135 {V4L2_PIX_FMT_VYUY, TRUE, GST_V4L2_RAW},
136 {V4L2_PIX_FMT_YUV422P, TRUE, GST_V4L2_RAW},
137 {V4L2_PIX_FMT_YUV411P, TRUE, GST_V4L2_RAW},
138 {V4L2_PIX_FMT_Y41P, TRUE, GST_V4L2_RAW},
139 {V4L2_PIX_FMT_YUV444, TRUE, GST_V4L2_RAW},
140 {V4L2_PIX_FMT_YUV555, TRUE, GST_V4L2_RAW},
141 {V4L2_PIX_FMT_YUV565, TRUE, GST_V4L2_RAW},
142 {V4L2_PIX_FMT_YUV32, TRUE, GST_V4L2_RAW},
143 {V4L2_PIX_FMT_YUV410, TRUE, GST_V4L2_RAW},
144 {V4L2_PIX_FMT_YUV420, TRUE, GST_V4L2_RAW},
145 {V4L2_PIX_FMT_YUV420M, TRUE, GST_V4L2_RAW},
146 {V4L2_PIX_FMT_HI240, TRUE, GST_V4L2_RAW},
147 {V4L2_PIX_FMT_HM12, TRUE, GST_V4L2_RAW},
148 {V4L2_PIX_FMT_M420, TRUE, GST_V4L2_RAW},
149
150 /* two planes -- one Y, one Cr + Cb interleaved */
151 {V4L2_PIX_FMT_NV12, TRUE, GST_V4L2_RAW},
152 {V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
153 {V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
154 {V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
155 {V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
156 {V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
157 {V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
158 {V4L2_PIX_FMT_NV16M, TRUE, GST_V4L2_RAW},
159 {V4L2_PIX_FMT_NV61, TRUE, GST_V4L2_RAW},
160 {V4L2_PIX_FMT_NV61M, TRUE, GST_V4L2_RAW},
161 {V4L2_PIX_FMT_NV24, TRUE, GST_V4L2_RAW},
162 {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
163
164 /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
165 {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
166 {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
167 {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
168 {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
169
170 /* compressed formats */
171 {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
172 {V4L2_PIX_FMT_JPEG, FALSE, GST_V4L2_CODEC},
173 {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
174 {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
175 {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
176 {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
177 {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
178 {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
179 {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
180 {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
181 {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
182 {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
183 {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
184 {V4L2_PIX_FMT_MPEG4, FALSE, GST_V4L2_CODEC},
185 {V4L2_PIX_FMT_XVID, FALSE, GST_V4L2_CODEC},
186 {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC},
187 {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC},
188 {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
189 {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE},
190
191 /* Vendor-specific formats */
192 {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC},
193 {V4L2_PIX_FMT_SN9C10X, TRUE, GST_V4L2_CODEC},
194 {V4L2_PIX_FMT_PWC1, TRUE, GST_V4L2_CODEC},
195 {V4L2_PIX_FMT_PWC2, TRUE, GST_V4L2_CODEC},
196 };
197
198 #define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
199
200 static GSList *gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object);
201
202
203 #define GST_TYPE_V4L2_DEVICE_FLAGS (gst_v4l2_device_get_type ())
204 static GType
gst_v4l2_device_get_type(void)205 gst_v4l2_device_get_type (void)
206 {
207 static GType v4l2_device_type = 0;
208
209 if (v4l2_device_type == 0) {
210 static const GFlagsValue values[] = {
211 {V4L2_CAP_VIDEO_CAPTURE, "Device supports video capture", "capture"},
212 {V4L2_CAP_VIDEO_OUTPUT, "Device supports video playback", "output"},
213 {V4L2_CAP_VIDEO_OVERLAY, "Device supports video overlay", "overlay"},
214
215 {V4L2_CAP_VBI_CAPTURE, "Device supports the VBI capture", "vbi-capture"},
216 {V4L2_CAP_VBI_OUTPUT, "Device supports the VBI output", "vbi-output"},
217
218 {V4L2_CAP_TUNER, "Device has a tuner or modulator", "tuner"},
219 {V4L2_CAP_AUDIO, "Device has audio inputs or outputs", "audio"},
220
221 {0, NULL, NULL}
222 };
223
224 v4l2_device_type =
225 g_flags_register_static ("GstV4l2DeviceTypeFlags", values);
226 }
227
228 return v4l2_device_type;
229 }
230
231 #define GST_TYPE_V4L2_TV_NORM (gst_v4l2_tv_norm_get_type ())
232 static GType
gst_v4l2_tv_norm_get_type(void)233 gst_v4l2_tv_norm_get_type (void)
234 {
235 static GType v4l2_tv_norm = 0;
236
237 if (!v4l2_tv_norm) {
238 static const GEnumValue tv_norms[] = {
239 {0, "none", "none"},
240
241 {V4L2_STD_NTSC, "NTSC", "NTSC"},
242 {V4L2_STD_NTSC_M, "NTSC-M", "NTSC-M"},
243 {V4L2_STD_NTSC_M_JP, "NTSC-M-JP", "NTSC-M-JP"},
244 {V4L2_STD_NTSC_M_KR, "NTSC-M-KR", "NTSC-M-KR"},
245 {V4L2_STD_NTSC_443, "NTSC-443", "NTSC-443"},
246
247 {V4L2_STD_PAL, "PAL", "PAL"},
248 {V4L2_STD_PAL_BG, "PAL-BG", "PAL-BG"},
249 {V4L2_STD_PAL_B, "PAL-B", "PAL-B"},
250 {V4L2_STD_PAL_B1, "PAL-B1", "PAL-B1"},
251 {V4L2_STD_PAL_G, "PAL-G", "PAL-G"},
252 {V4L2_STD_PAL_H, "PAL-H", "PAL-H"},
253 {V4L2_STD_PAL_I, "PAL-I", "PAL-I"},
254 {V4L2_STD_PAL_DK, "PAL-DK", "PAL-DK"},
255 {V4L2_STD_PAL_D, "PAL-D", "PAL-D"},
256 {V4L2_STD_PAL_D1, "PAL-D1", "PAL-D1"},
257 {V4L2_STD_PAL_K, "PAL-K", "PAL-K"},
258 {V4L2_STD_PAL_M, "PAL-M", "PAL-M"},
259 {V4L2_STD_PAL_N, "PAL-N", "PAL-N"},
260 {V4L2_STD_PAL_Nc, "PAL-Nc", "PAL-Nc"},
261 {V4L2_STD_PAL_60, "PAL-60", "PAL-60"},
262
263 {V4L2_STD_SECAM, "SECAM", "SECAM"},
264 {V4L2_STD_SECAM_B, "SECAM-B", "SECAM-B"},
265 {V4L2_STD_SECAM_G, "SECAM-G", "SECAM-G"},
266 {V4L2_STD_SECAM_H, "SECAM-H", "SECAM-H"},
267 {V4L2_STD_SECAM_DK, "SECAM-DK", "SECAM-DK"},
268 {V4L2_STD_SECAM_D, "SECAM-D", "SECAM-D"},
269 {V4L2_STD_SECAM_K, "SECAM-K", "SECAM-K"},
270 {V4L2_STD_SECAM_K1, "SECAM-K1", "SECAM-K1"},
271 {V4L2_STD_SECAM_L, "SECAM-L", "SECAM-L"},
272 {V4L2_STD_SECAM_LC, "SECAM-Lc", "SECAM-Lc"},
273
274 {0, NULL, NULL}
275 };
276
277 v4l2_tv_norm = g_enum_register_static ("V4L2_TV_norms", tv_norms);
278 }
279
280 return v4l2_tv_norm;
281 }
282
283 GType
gst_v4l2_io_mode_get_type(void)284 gst_v4l2_io_mode_get_type (void)
285 {
286 static GType v4l2_io_mode = 0;
287
288 if (!v4l2_io_mode) {
289 static const GEnumValue io_modes[] = {
290 {GST_V4L2_IO_AUTO, "GST_V4L2_IO_AUTO", "auto"},
291 {GST_V4L2_IO_RW, "GST_V4L2_IO_RW", "rw"},
292 {GST_V4L2_IO_MMAP, "GST_V4L2_IO_MMAP", "mmap"},
293 {GST_V4L2_IO_USERPTR, "GST_V4L2_IO_USERPTR", "userptr"},
294 {GST_V4L2_IO_DMABUF, "GST_V4L2_IO_DMABUF", "dmabuf"},
295 {GST_V4L2_IO_DMABUF_IMPORT, "GST_V4L2_IO_DMABUF_IMPORT",
296 "dmabuf-import"},
297
298 {0, NULL, NULL}
299 };
300 v4l2_io_mode = g_enum_register_static ("GstV4l2IOMode", io_modes);
301 }
302 return v4l2_io_mode;
303 }
304
305 void
gst_v4l2_object_install_properties_helper(GObjectClass * gobject_class,const char * default_device)306 gst_v4l2_object_install_properties_helper (GObjectClass * gobject_class,
307 const char *default_device)
308 {
309 g_object_class_install_property (gobject_class, PROP_DEVICE,
310 g_param_spec_string ("device", "Device", "Device location",
311 default_device, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
312 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
313 g_param_spec_string ("device-name", "Device name",
314 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
315 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
316 g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
317 g_param_spec_int ("device-fd", "File descriptor",
318 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
319 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
320 g_object_class_install_property (gobject_class, PROP_FLAGS,
321 g_param_spec_flags ("flags", "Flags", "Device type flags",
322 GST_TYPE_V4L2_DEVICE_FLAGS, DEFAULT_PROP_FLAGS,
323 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
324
325 /**
326 * GstV4l2Src:brightness:
327 *
328 * Picture brightness, or more precisely, the black level
329 */
330 g_object_class_install_property (gobject_class, PROP_BRIGHTNESS,
331 g_param_spec_int ("brightness", "Brightness",
332 "Picture brightness, or more precisely, the black level", G_MININT,
333 G_MAXINT, 0,
334 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
335 /**
336 * GstV4l2Src:contrast:
337 *
338 * Picture contrast or luma gain
339 */
340 g_object_class_install_property (gobject_class, PROP_CONTRAST,
341 g_param_spec_int ("contrast", "Contrast",
342 "Picture contrast or luma gain", G_MININT,
343 G_MAXINT, 0,
344 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
345 /**
346 * GstV4l2Src:saturation:
347 *
348 * Picture color saturation or chroma gain
349 */
350 g_object_class_install_property (gobject_class, PROP_SATURATION,
351 g_param_spec_int ("saturation", "Saturation",
352 "Picture color saturation or chroma gain", G_MININT,
353 G_MAXINT, 0,
354 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
355 /**
356 * GstV4l2Src:hue:
357 *
358 * Hue or color balance
359 */
360 g_object_class_install_property (gobject_class, PROP_HUE,
361 g_param_spec_int ("hue", "Hue",
362 "Hue or color balance", G_MININT,
363 G_MAXINT, 0,
364 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE));
365
366 /**
367 * GstV4l2Src:norm:
368 *
369 * TV norm
370 */
371 g_object_class_install_property (gobject_class, PROP_TV_NORM,
372 g_param_spec_enum ("norm", "TV norm",
373 "video standard",
374 GST_TYPE_V4L2_TV_NORM, DEFAULT_PROP_TV_NORM,
375 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
376
377 /**
378 * GstV4l2Src:io-mode:
379 *
380 * IO Mode
381 */
382 g_object_class_install_property (gobject_class, PROP_IO_MODE,
383 g_param_spec_enum ("io-mode", "IO mode",
384 "I/O mode",
385 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
386 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
387
388 /**
389 * GstV4l2Src:extra-controls:
390 *
391 * Additional v4l2 controls for the device. The controls are identified
392 * by the control name (lowercase with '_' for any non-alphanumeric
393 * characters).
394 *
395 * Since: 1.2
396 */
397 g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
398 g_param_spec_boxed ("extra-controls", "Extra Controls",
399 "Extra v4l2 controls (CIDs) for the device",
400 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
401
402 /**
403 * GstV4l2Src:pixel-aspect-ratio:
404 *
405 * The pixel aspect ratio of the device. This overwrites the pixel aspect
406 * ratio queried from the device.
407 *
408 * Since: 1.2
409 */
410 g_object_class_install_property (gobject_class, PROP_PIXEL_ASPECT_RATIO,
411 g_param_spec_string ("pixel-aspect-ratio", "Pixel Aspect Ratio",
412 "Overwrite the pixel aspect ratio of the device", "1/1",
413 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
414
415 /**
416 * GstV4l2Src:force-aspect-ratio:
417 *
418 * When enabled, the pixel aspect ratio queried from the device or set
419 * with the pixel-aspect-ratio property will be enforced.
420 *
421 * Since: 1.2
422 */
423 g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
424 g_param_spec_boolean ("force-aspect-ratio", "Force aspect ratio",
425 "When enabled, the pixel aspect ratio will be enforced", TRUE,
426 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
427
428 gst_type_mark_as_plugin_api (GST_TYPE_V4L2_DEVICE_FLAGS, 0);
429 gst_type_mark_as_plugin_api (GST_TYPE_V4L2_TV_NORM, 0);
430 gst_type_mark_as_plugin_api (GST_TYPE_V4L2_IO_MODE, 0);
431 }
432
433 void
gst_v4l2_object_install_m2m_properties_helper(GObjectClass * gobject_class)434 gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class)
435 {
436 g_object_class_install_property (gobject_class, PROP_DEVICE,
437 g_param_spec_string ("device", "Device", "Device location",
438 NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
439
440 g_object_class_install_property (gobject_class, PROP_DEVICE_NAME,
441 g_param_spec_string ("device-name", "Device name",
442 "Name of the device", DEFAULT_PROP_DEVICE_NAME,
443 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
444
445 g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
446 g_param_spec_int ("device-fd", "File descriptor",
447 "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD,
448 G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
449
450 g_object_class_install_property (gobject_class, PROP_OUTPUT_IO_MODE,
451 g_param_spec_enum ("output-io-mode", "Output IO mode",
452 "Output side I/O mode (matches sink pad)",
453 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
454 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
455
456 g_object_class_install_property (gobject_class, PROP_CAPTURE_IO_MODE,
457 g_param_spec_enum ("capture-io-mode", "Capture IO mode",
458 "Capture I/O mode (matches src pad)",
459 GST_TYPE_V4L2_IO_MODE, DEFAULT_PROP_IO_MODE,
460 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
461
462 g_object_class_install_property (gobject_class, PROP_EXTRA_CONTROLS,
463 g_param_spec_boxed ("extra-controls", "Extra Controls",
464 "Extra v4l2 controls (CIDs) for the device",
465 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
466 }
467
468 /* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
469 #ifdef HAVE_LIBV4L2
470 #if SIZEOF_OFF_T < 8
471
472 static gpointer
v4l2_mmap_wrapper(gpointer start,gsize length,gint prot,gint flags,gint fd,off_t offset)473 v4l2_mmap_wrapper (gpointer start, gsize length, gint prot, gint flags, gint fd,
474 off_t offset)
475 {
476 return v4l2_mmap (start, length, prot, flags, fd, (gint64) offset);
477 }
478
479 #define v4l2_mmap v4l2_mmap_wrapper
480
481 #endif /* SIZEOF_OFF_T < 8 */
482 #endif /* HAVE_LIBV4L2 */
483
484 GstV4l2Object *
gst_v4l2_object_new(GstElement * element,GstObject * debug_object,enum v4l2_buf_type type,const char * default_device,GstV4l2GetInOutFunction get_in_out_func,GstV4l2SetInOutFunction set_in_out_func,GstV4l2UpdateFpsFunction update_fps_func)485 gst_v4l2_object_new (GstElement * element,
486 GstObject * debug_object,
487 enum v4l2_buf_type type,
488 const char *default_device,
489 GstV4l2GetInOutFunction get_in_out_func,
490 GstV4l2SetInOutFunction set_in_out_func,
491 GstV4l2UpdateFpsFunction update_fps_func)
492 {
493 GstV4l2Object *v4l2object;
494
495 /*
496 * some default values
497 */
498 v4l2object = g_new0 (GstV4l2Object, 1);
499
500 v4l2object->type = type;
501 v4l2object->formats = NULL;
502
503 v4l2object->element = element;
504 v4l2object->dbg_obj = debug_object;
505 v4l2object->get_in_out_func = get_in_out_func;
506 v4l2object->set_in_out_func = set_in_out_func;
507 v4l2object->update_fps_func = update_fps_func;
508
509 v4l2object->video_fd = -1;
510 v4l2object->active = FALSE;
511 v4l2object->videodev = g_strdup (default_device);
512
513 v4l2object->norms = NULL;
514 v4l2object->channels = NULL;
515 v4l2object->colors = NULL;
516
517 v4l2object->keep_aspect = TRUE;
518
519 v4l2object->n_v4l2_planes = 0;
520
521 v4l2object->no_initial_format = FALSE;
522
523 /* We now disable libv4l2 by default, but have an env to enable it. */
524 #ifdef HAVE_LIBV4L2
525 if (g_getenv ("GST_V4L2_USE_LIBV4L2")) {
526 v4l2object->fd_open = v4l2_fd_open;
527 v4l2object->close = v4l2_close;
528 v4l2object->dup = v4l2_dup;
529 v4l2object->ioctl = v4l2_ioctl;
530 v4l2object->read = v4l2_read;
531 v4l2object->mmap = v4l2_mmap;
532 v4l2object->munmap = v4l2_munmap;
533 } else
534 #endif
535 {
536 v4l2object->fd_open = NULL;
537 v4l2object->close = close;
538 v4l2object->dup = dup;
539 v4l2object->ioctl = ioctl;
540 v4l2object->read = read;
541 v4l2object->mmap = mmap;
542 v4l2object->munmap = munmap;
543 }
544
545 return v4l2object;
546 }
547
548 static gboolean gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object);
549
550
551 void
gst_v4l2_object_destroy(GstV4l2Object * v4l2object)552 gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
553 {
554 g_return_if_fail (v4l2object != NULL);
555
556 g_free (v4l2object->videodev);
557 g_free (v4l2object->par);
558 g_free (v4l2object->channel);
559
560 if (v4l2object->formats) {
561 gst_v4l2_object_clear_format_list (v4l2object);
562 }
563
564 if (v4l2object->probed_caps) {
565 gst_caps_unref (v4l2object->probed_caps);
566 }
567
568 if (v4l2object->extra_controls) {
569 gst_structure_free (v4l2object->extra_controls);
570 }
571
572 g_free (v4l2object);
573 }
574
575
576 static gboolean
gst_v4l2_object_clear_format_list(GstV4l2Object * v4l2object)577 gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
578 {
579 g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
580 g_slist_free (v4l2object->formats);
581 v4l2object->formats = NULL;
582
583 return TRUE;
584 }
585
586 static gint
gst_v4l2_object_prop_to_cid(guint prop_id)587 gst_v4l2_object_prop_to_cid (guint prop_id)
588 {
589 gint cid = -1;
590
591 switch (prop_id) {
592 case PROP_BRIGHTNESS:
593 cid = V4L2_CID_BRIGHTNESS;
594 break;
595 case PROP_CONTRAST:
596 cid = V4L2_CID_CONTRAST;
597 break;
598 case PROP_SATURATION:
599 cid = V4L2_CID_SATURATION;
600 break;
601 case PROP_HUE:
602 cid = V4L2_CID_HUE;
603 break;
604 default:
605 GST_WARNING ("unmapped property id: %d", prop_id);
606 }
607 return cid;
608 }
609
610
611 gboolean
gst_v4l2_object_set_property_helper(GstV4l2Object * v4l2object,guint prop_id,const GValue * value,GParamSpec * pspec)612 gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object,
613 guint prop_id, const GValue * value, GParamSpec * pspec)
614 {
615 switch (prop_id) {
616 case PROP_DEVICE:
617 g_free (v4l2object->videodev);
618 v4l2object->videodev = g_value_dup_string (value);
619 break;
620 case PROP_BRIGHTNESS:
621 case PROP_CONTRAST:
622 case PROP_SATURATION:
623 case PROP_HUE:
624 {
625 gint cid = gst_v4l2_object_prop_to_cid (prop_id);
626
627 if (cid != -1) {
628 if (GST_V4L2_IS_OPEN (v4l2object)) {
629 gst_v4l2_set_attribute (v4l2object, cid, g_value_get_int (value));
630 }
631 }
632 return TRUE;
633 }
634 break;
635 case PROP_TV_NORM:
636 v4l2object->tv_norm = g_value_get_enum (value);
637 break;
638 #if 0
639 case PROP_CHANNEL:
640 if (GST_V4L2_IS_OPEN (v4l2object)) {
641 GstTuner *tuner = GST_TUNER (v4l2object->element);
642 GstTunerChannel *channel = gst_tuner_find_channel_by_name (tuner,
643 (gchar *) g_value_get_string (value));
644
645 if (channel) {
646 /* like gst_tuner_set_channel (tuner, channel)
647 without g_object_notify */
648 gst_v4l2_tuner_set_channel (v4l2object, channel);
649 }
650 } else {
651 g_free (v4l2object->channel);
652 v4l2object->channel = g_value_dup_string (value);
653 }
654 break;
655 case PROP_FREQUENCY:
656 if (GST_V4L2_IS_OPEN (v4l2object)) {
657 GstTuner *tuner = GST_TUNER (v4l2object->element);
658 GstTunerChannel *channel = gst_tuner_get_channel (tuner);
659
660 if (channel &&
661 GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
662 /* like
663 gst_tuner_set_frequency (tuner, channel, g_value_get_ulong (value))
664 without g_object_notify */
665 gst_v4l2_tuner_set_frequency (v4l2object, channel,
666 g_value_get_ulong (value));
667 }
668 } else {
669 v4l2object->frequency = g_value_get_ulong (value);
670 }
671 break;
672 #endif
673
674 case PROP_IO_MODE:
675 v4l2object->req_mode = g_value_get_enum (value);
676 break;
677 case PROP_CAPTURE_IO_MODE:
678 g_return_val_if_fail (!V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
679 v4l2object->req_mode = g_value_get_enum (value);
680 break;
681 case PROP_OUTPUT_IO_MODE:
682 g_return_val_if_fail (V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
683 v4l2object->req_mode = g_value_get_enum (value);
684 break;
685 case PROP_EXTRA_CONTROLS:{
686 const GstStructure *s = gst_value_get_structure (value);
687
688 if (v4l2object->extra_controls)
689 gst_structure_free (v4l2object->extra_controls);
690
691 v4l2object->extra_controls = s ? gst_structure_copy (s) : NULL;
692 if (GST_V4L2_IS_OPEN (v4l2object))
693 gst_v4l2_set_controls (v4l2object, v4l2object->extra_controls);
694 break;
695 }
696 case PROP_PIXEL_ASPECT_RATIO:
697 if (v4l2object->par) {
698 g_value_unset (v4l2object->par);
699 g_free (v4l2object->par);
700 }
701 v4l2object->par = g_new0 (GValue, 1);
702 g_value_init (v4l2object->par, GST_TYPE_FRACTION);
703 if (!g_value_transform (value, v4l2object->par)) {
704 g_warning ("Could not transform string to aspect ratio");
705 gst_value_set_fraction (v4l2object->par, 1, 1);
706 }
707
708 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "set PAR to %d/%d",
709 gst_value_get_fraction_numerator (v4l2object->par),
710 gst_value_get_fraction_denominator (v4l2object->par));
711 break;
712 case PROP_FORCE_ASPECT_RATIO:
713 v4l2object->keep_aspect = g_value_get_boolean (value);
714 break;
715 default:
716 return FALSE;
717 break;
718 }
719 return TRUE;
720 }
721
722
723 gboolean
gst_v4l2_object_get_property_helper(GstV4l2Object * v4l2object,guint prop_id,GValue * value,GParamSpec * pspec)724 gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object,
725 guint prop_id, GValue * value, GParamSpec * pspec)
726 {
727 switch (prop_id) {
728 case PROP_DEVICE:
729 g_value_set_string (value, v4l2object->videodev);
730 break;
731 case PROP_DEVICE_NAME:
732 {
733 const guchar *name = NULL;
734
735 if (GST_V4L2_IS_OPEN (v4l2object))
736 name = v4l2object->vcap.card;
737
738 g_value_set_string (value, (gchar *) name);
739 break;
740 }
741 case PROP_DEVICE_FD:
742 {
743 if (GST_V4L2_IS_OPEN (v4l2object))
744 g_value_set_int (value, v4l2object->video_fd);
745 else
746 g_value_set_int (value, DEFAULT_PROP_DEVICE_FD);
747 break;
748 }
749 case PROP_FLAGS:
750 {
751 guint flags = 0;
752
753 if (GST_V4L2_IS_OPEN (v4l2object)) {
754 flags |= v4l2object->device_caps &
755 (V4L2_CAP_VIDEO_CAPTURE |
756 V4L2_CAP_VIDEO_OUTPUT |
757 V4L2_CAP_VIDEO_OVERLAY |
758 V4L2_CAP_VBI_CAPTURE |
759 V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
760
761 if (v4l2object->device_caps & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
762 flags |= V4L2_CAP_VIDEO_CAPTURE;
763
764 if (v4l2object->device_caps & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
765 flags |= V4L2_CAP_VIDEO_OUTPUT;
766 }
767 g_value_set_flags (value, flags);
768 break;
769 }
770 case PROP_BRIGHTNESS:
771 case PROP_CONTRAST:
772 case PROP_SATURATION:
773 case PROP_HUE:
774 {
775 gint cid = gst_v4l2_object_prop_to_cid (prop_id);
776
777 if (cid != -1) {
778 if (GST_V4L2_IS_OPEN (v4l2object)) {
779 gint v;
780 if (gst_v4l2_get_attribute (v4l2object, cid, &v)) {
781 g_value_set_int (value, v);
782 }
783 }
784 }
785 return TRUE;
786 }
787 break;
788 case PROP_TV_NORM:
789 g_value_set_enum (value, v4l2object->tv_norm);
790 break;
791 case PROP_IO_MODE:
792 g_value_set_enum (value, v4l2object->req_mode);
793 break;
794 case PROP_CAPTURE_IO_MODE:
795 g_return_val_if_fail (!V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
796 g_value_set_enum (value, v4l2object->req_mode);
797 break;
798 case PROP_OUTPUT_IO_MODE:
799 g_return_val_if_fail (V4L2_TYPE_IS_OUTPUT (v4l2object->type), FALSE);
800 g_value_set_enum (value, v4l2object->req_mode);
801 break;
802 case PROP_EXTRA_CONTROLS:
803 gst_value_set_structure (value, v4l2object->extra_controls);
804 break;
805 case PROP_PIXEL_ASPECT_RATIO:
806 if (v4l2object->par)
807 g_value_transform (v4l2object->par, value);
808 break;
809 case PROP_FORCE_ASPECT_RATIO:
810 g_value_set_boolean (value, v4l2object->keep_aspect);
811 break;
812 default:
813 return FALSE;
814 break;
815 }
816 return TRUE;
817 }
818
819 static void
gst_v4l2_get_driver_min_buffers(GstV4l2Object * v4l2object)820 gst_v4l2_get_driver_min_buffers (GstV4l2Object * v4l2object)
821 {
822 struct v4l2_control control = { 0, };
823
824 g_return_if_fail (GST_V4L2_IS_OPEN (v4l2object));
825
826 if (V4L2_TYPE_IS_OUTPUT (v4l2object->type))
827 control.id = V4L2_CID_MIN_BUFFERS_FOR_OUTPUT;
828 else
829 control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
830
831 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
832 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
833 "driver requires a minimum of %d buffers", control.value);
834 v4l2object->min_buffers = control.value;
835 } else {
836 v4l2object->min_buffers = 0;
837 }
838 }
839
840 static void
gst_v4l2_set_defaults(GstV4l2Object * v4l2object)841 gst_v4l2_set_defaults (GstV4l2Object * v4l2object)
842 {
843 GstTunerNorm *norm = NULL;
844 GstTunerChannel *channel = NULL;
845 GstTuner *tuner;
846
847 if (!GST_IS_TUNER (v4l2object->element))
848 return;
849
850 tuner = GST_TUNER (v4l2object->element);
851
852 if (v4l2object->tv_norm)
853 norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
854 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
855 "norm=%p", (guint64) v4l2object->tv_norm, norm);
856 if (norm) {
857 gst_tuner_set_norm (tuner, norm);
858 } else {
859 norm =
860 GST_TUNER_NORM (gst_tuner_get_norm (GST_TUNER (v4l2object->element)));
861 if (norm) {
862 v4l2object->tv_norm =
863 gst_v4l2_tuner_get_std_id_by_norm (v4l2object, norm);
864 gst_tuner_norm_changed (tuner, norm);
865 }
866 }
867
868 if (v4l2object->channel)
869 channel = gst_tuner_find_channel_by_name (tuner, v4l2object->channel);
870 if (channel) {
871 gst_tuner_set_channel (tuner, channel);
872 } else {
873 channel =
874 GST_TUNER_CHANNEL (gst_tuner_get_channel (GST_TUNER
875 (v4l2object->element)));
876 if (channel) {
877 g_free (v4l2object->channel);
878 v4l2object->channel = g_strdup (channel->label);
879 gst_tuner_channel_changed (tuner, channel);
880 }
881 }
882
883 if (channel
884 && GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
885 if (v4l2object->frequency != 0) {
886 gst_tuner_set_frequency (tuner, channel, v4l2object->frequency);
887 } else {
888 v4l2object->frequency = gst_tuner_get_frequency (tuner, channel);
889 if (v4l2object->frequency == 0) {
890 /* guess */
891 gst_tuner_set_frequency (tuner, channel, 1000);
892 } else {
893 }
894 }
895 }
896 }
897
898 gboolean
gst_v4l2_object_open(GstV4l2Object * v4l2object,GstV4l2Error * error)899 gst_v4l2_object_open (GstV4l2Object * v4l2object, GstV4l2Error * error)
900 {
901 if (gst_v4l2_open (v4l2object, error))
902 gst_v4l2_set_defaults (v4l2object);
903 else
904 return FALSE;
905
906 return TRUE;
907 }
908
909 gboolean
gst_v4l2_object_open_shared(GstV4l2Object * v4l2object,GstV4l2Object * other)910 gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other)
911 {
912 gboolean ret;
913
914 ret = gst_v4l2_dup (v4l2object, other);
915
916 return ret;
917 }
918
919 gboolean
gst_v4l2_object_close(GstV4l2Object * v4l2object)920 gst_v4l2_object_close (GstV4l2Object * v4l2object)
921 {
922 if (!gst_v4l2_close (v4l2object))
923 return FALSE;
924
925 gst_caps_replace (&v4l2object->probed_caps, NULL);
926
927 /* reset our copy of the device caps */
928 v4l2object->device_caps = 0;
929
930 if (v4l2object->formats) {
931 gst_v4l2_object_clear_format_list (v4l2object);
932 }
933
934 if (v4l2object->par) {
935 g_value_unset (v4l2object->par);
936 g_free (v4l2object->par);
937 v4l2object->par = NULL;
938 }
939
940 if (v4l2object->channel) {
941 g_free (v4l2object->channel);
942 v4l2object->channel = NULL;
943 }
944
945 return TRUE;
946 }
947
948 static struct v4l2_fmtdesc *
gst_v4l2_object_get_format_from_fourcc(GstV4l2Object * v4l2object,guint32 fourcc)949 gst_v4l2_object_get_format_from_fourcc (GstV4l2Object * v4l2object,
950 guint32 fourcc)
951 {
952 struct v4l2_fmtdesc *fmt;
953 GSList *walk;
954
955 if (fourcc == 0)
956 return NULL;
957
958 walk = gst_v4l2_object_get_format_list (v4l2object);
959 while (walk) {
960 fmt = (struct v4l2_fmtdesc *) walk->data;
961 if (fmt->pixelformat == fourcc)
962 return fmt;
963 /* special case for jpeg */
964 if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
965 fmt->pixelformat == V4L2_PIX_FMT_JPEG ||
966 fmt->pixelformat == V4L2_PIX_FMT_PJPG) {
967 if (fourcc == V4L2_PIX_FMT_JPEG || fourcc == V4L2_PIX_FMT_MJPEG ||
968 fourcc == V4L2_PIX_FMT_PJPG) {
969 return fmt;
970 }
971 }
972 walk = g_slist_next (walk);
973 }
974
975 return NULL;
976 }
977
978
979
980 /* complete made up ranking, the values themselves are meaningless */
981 /* These ranks MUST be X such that X<<15 fits on a signed int - see
982 the comment at the end of gst_v4l2_object_format_get_rank. */
983 #define YUV_BASE_RANK 1000
984 #define JPEG_BASE_RANK 500
985 #define DV_BASE_RANK 200
986 #define RGB_BASE_RANK 100
987 #define YUV_ODD_BASE_RANK 50
988 #define RGB_ODD_BASE_RANK 25
989 #define BAYER_BASE_RANK 15
990 #define S910_BASE_RANK 10
991 #define GREY_BASE_RANK 5
992 #define PWC_BASE_RANK 1
993
994 static gint
gst_v4l2_object_format_get_rank(const struct v4l2_fmtdesc * fmt)995 gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
996 {
997 guint32 fourcc = fmt->pixelformat;
998 gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0);
999 gint rank = 0;
1000
1001 switch (fourcc) {
1002 case V4L2_PIX_FMT_MJPEG:
1003 case V4L2_PIX_FMT_PJPG:
1004 rank = JPEG_BASE_RANK;
1005 break;
1006 case V4L2_PIX_FMT_JPEG:
1007 rank = JPEG_BASE_RANK + 1;
1008 break;
1009 case V4L2_PIX_FMT_MPEG: /* MPEG */
1010 rank = JPEG_BASE_RANK + 2;
1011 break;
1012
1013 case V4L2_PIX_FMT_RGB332:
1014 case V4L2_PIX_FMT_ARGB555:
1015 case V4L2_PIX_FMT_XRGB555:
1016 case V4L2_PIX_FMT_RGB555:
1017 case V4L2_PIX_FMT_ARGB555X:
1018 case V4L2_PIX_FMT_XRGB555X:
1019 case V4L2_PIX_FMT_RGB555X:
1020 case V4L2_PIX_FMT_BGR666:
1021 case V4L2_PIX_FMT_RGB565:
1022 case V4L2_PIX_FMT_RGB565X:
1023 case V4L2_PIX_FMT_RGB444:
1024 case V4L2_PIX_FMT_Y4:
1025 case V4L2_PIX_FMT_Y6:
1026 case V4L2_PIX_FMT_Y10:
1027 case V4L2_PIX_FMT_Y12:
1028 case V4L2_PIX_FMT_Y10BPACK:
1029 case V4L2_PIX_FMT_YUV555:
1030 case V4L2_PIX_FMT_YUV565:
1031 case V4L2_PIX_FMT_YUV32:
1032 case V4L2_PIX_FMT_NV12MT_16X16:
1033 case V4L2_PIX_FMT_NV42:
1034 case V4L2_PIX_FMT_H264_MVC:
1035 rank = RGB_ODD_BASE_RANK;
1036 break;
1037
1038 case V4L2_PIX_FMT_RGB24:
1039 case V4L2_PIX_FMT_BGR24:
1040 rank = RGB_BASE_RANK - 1;
1041 break;
1042
1043 case V4L2_PIX_FMT_RGB32:
1044 case V4L2_PIX_FMT_BGR32:
1045 case V4L2_PIX_FMT_ABGR32:
1046 case V4L2_PIX_FMT_XBGR32:
1047 case V4L2_PIX_FMT_BGRA32:
1048 case V4L2_PIX_FMT_BGRX32:
1049 case V4L2_PIX_FMT_RGBA32:
1050 case V4L2_PIX_FMT_RGBX32:
1051 case V4L2_PIX_FMT_ARGB32:
1052 case V4L2_PIX_FMT_XRGB32:
1053 rank = RGB_BASE_RANK;
1054 break;
1055
1056 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1057 rank = GREY_BASE_RANK;
1058 break;
1059
1060 case V4L2_PIX_FMT_NV12MT: /* NV12 64x32 tile */
1061 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1062 case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
1063 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1064 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1065 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1066 case V4L2_PIX_FMT_NV16M: /* Same as NV16 */
1067 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1068 case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
1069 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1070 rank = YUV_ODD_BASE_RANK;
1071 break;
1072
1073 case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */
1074 rank = YUV_BASE_RANK + 3;
1075 break;
1076 case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */
1077 rank = YUV_BASE_RANK + 2;
1078 break;
1079 case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */
1080 case V4L2_PIX_FMT_YUV420M:
1081 rank = YUV_BASE_RANK + 7;
1082 break;
1083 case V4L2_PIX_FMT_NV12: /* Y/CbCr 4:2:0, 12 bits per pixel */
1084 case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
1085 rank = YUV_BASE_RANK + 8;
1086 break;
1087 case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */
1088 rank = YUV_BASE_RANK + 10;
1089 break;
1090 case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
1091 rank = YUV_BASE_RANK + 6;
1092 break;
1093 case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
1094 rank = YUV_BASE_RANK + 9;
1095 break;
1096 case V4L2_PIX_FMT_YUV444:
1097 rank = YUV_BASE_RANK + 6;
1098 break;
1099 case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */
1100 rank = YUV_BASE_RANK + 5;
1101 break;
1102 case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */
1103 rank = YUV_BASE_RANK + 4;
1104 break;
1105 case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */
1106 rank = YUV_BASE_RANK + 8;
1107 break;
1108
1109 case V4L2_PIX_FMT_DV:
1110 rank = DV_BASE_RANK;
1111 break;
1112
1113 case V4L2_PIX_FMT_WNVA: /* Winnov hw compress */
1114 rank = 0;
1115 break;
1116
1117 case V4L2_PIX_FMT_SBGGR8:
1118 case V4L2_PIX_FMT_SGBRG8:
1119 case V4L2_PIX_FMT_SGRBG8:
1120 case V4L2_PIX_FMT_SRGGB8:
1121 rank = BAYER_BASE_RANK;
1122 break;
1123
1124 case V4L2_PIX_FMT_SN9C10X:
1125 rank = S910_BASE_RANK;
1126 break;
1127
1128 case V4L2_PIX_FMT_PWC1:
1129 rank = PWC_BASE_RANK;
1130 break;
1131 case V4L2_PIX_FMT_PWC2:
1132 rank = PWC_BASE_RANK;
1133 break;
1134
1135 default:
1136 rank = 0;
1137 break;
1138 }
1139
1140 /* All ranks are below 1<<15 so a shift by 15
1141 * will a) make all non-emulated formats larger
1142 * than emulated and b) will not overflow
1143 */
1144 if (!emulated)
1145 rank <<= 15;
1146
1147 return rank;
1148 }
1149
1150
1151
1152 static gint
format_cmp_func(gconstpointer a,gconstpointer b)1153 format_cmp_func (gconstpointer a, gconstpointer b)
1154 {
1155 const struct v4l2_fmtdesc *fa = a;
1156 const struct v4l2_fmtdesc *fb = b;
1157
1158 if (fa->pixelformat == fb->pixelformat)
1159 return 0;
1160
1161 return gst_v4l2_object_format_get_rank (fb) -
1162 gst_v4l2_object_format_get_rank (fa);
1163 }
1164
1165 /******************************************************
1166 * gst_v4l2_object_fill_format_list():
1167 * create list of supported capture formats
1168 * return value: TRUE on success, FALSE on error
1169 ******************************************************/
1170 static gboolean
gst_v4l2_object_fill_format_list(GstV4l2Object * v4l2object,enum v4l2_buf_type type)1171 gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object,
1172 enum v4l2_buf_type type)
1173 {
1174 gint n;
1175 struct v4l2_fmtdesc *format;
1176
1177 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting src format enumerations");
1178
1179 /* format enumeration */
1180 for (n = 0;; n++) {
1181 format = g_new0 (struct v4l2_fmtdesc, 1);
1182
1183 format->index = n;
1184 format->type = type;
1185
1186 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
1187 if (errno == EINVAL) {
1188 g_free (format);
1189 break; /* end of enumeration */
1190 } else {
1191 goto failed;
1192 }
1193 }
1194
1195 GST_LOG_OBJECT (v4l2object->dbg_obj, "index: %u", format->index);
1196 GST_LOG_OBJECT (v4l2object->dbg_obj, "type: %d", format->type);
1197 GST_LOG_OBJECT (v4l2object->dbg_obj, "flags: %08x", format->flags);
1198 GST_LOG_OBJECT (v4l2object->dbg_obj, "description: '%s'",
1199 format->description);
1200 GST_LOG_OBJECT (v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
1201 GST_FOURCC_ARGS (format->pixelformat));
1202
1203 /* sort formats according to our preference; we do this, because caps
1204 * are probed in the order the formats are in the list, and the order of
1205 * formats in the final probed caps matters for things like fixation */
1206 v4l2object->formats = g_slist_insert_sorted (v4l2object->formats, format,
1207 (GCompareFunc) format_cmp_func);
1208 }
1209
1210 #ifndef GST_DISABLE_GST_DEBUG
1211 {
1212 GSList *l;
1213
1214 GST_INFO_OBJECT (v4l2object->dbg_obj, "got %d format(s):", n);
1215 for (l = v4l2object->formats; l != NULL; l = l->next) {
1216 format = l->data;
1217
1218 GST_INFO_OBJECT (v4l2object->dbg_obj,
1219 " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat),
1220 ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
1221 }
1222 }
1223 #endif
1224
1225 return TRUE;
1226
1227 /* ERRORS */
1228 failed:
1229 {
1230 g_free (format);
1231
1232 if (v4l2object->element)
1233 return FALSE;
1234
1235 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
1236 (_("Failed to enumerate possible video formats device '%s' can work "
1237 "with"), v4l2object->videodev),
1238 ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)",
1239 n, v4l2object->videodev, errno, g_strerror (errno)));
1240
1241 return FALSE;
1242 }
1243 }
1244
1245 /*
1246 * Get the list of supported capture formats, a list of
1247 * `struct v4l2_fmtdesc`.
1248 */
1249 static GSList *
gst_v4l2_object_get_format_list(GstV4l2Object * v4l2object)1250 gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
1251 {
1252 if (!v4l2object->formats) {
1253
1254 /* check usual way */
1255 gst_v4l2_object_fill_format_list (v4l2object, v4l2object->type);
1256
1257 /* if our driver supports multi-planar
1258 * and if formats are still empty then we can workaround driver bug
1259 * by also looking up formats as if our device was not supporting
1260 * multiplanar */
1261 if (!v4l2object->formats) {
1262 switch (v4l2object->type) {
1263 case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1264 gst_v4l2_object_fill_format_list (v4l2object,
1265 V4L2_BUF_TYPE_VIDEO_CAPTURE);
1266 break;
1267
1268 case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1269 gst_v4l2_object_fill_format_list (v4l2object,
1270 V4L2_BUF_TYPE_VIDEO_OUTPUT);
1271 break;
1272
1273 default:
1274 break;
1275 }
1276 }
1277 }
1278 return v4l2object->formats;
1279 }
1280
1281 static GstVideoFormat
gst_v4l2_object_v4l2fourcc_to_video_format(guint32 fourcc)1282 gst_v4l2_object_v4l2fourcc_to_video_format (guint32 fourcc)
1283 {
1284 GstVideoFormat format;
1285
1286 switch (fourcc) {
1287 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1288 format = GST_VIDEO_FORMAT_GRAY8;
1289 break;
1290 case V4L2_PIX_FMT_Y16:
1291 format = GST_VIDEO_FORMAT_GRAY16_LE;
1292 break;
1293 case V4L2_PIX_FMT_Y16_BE:
1294 format = GST_VIDEO_FORMAT_GRAY16_BE;
1295 break;
1296 case V4L2_PIX_FMT_XRGB555:
1297 case V4L2_PIX_FMT_RGB555:
1298 format = GST_VIDEO_FORMAT_RGB15;
1299 break;
1300 case V4L2_PIX_FMT_XRGB555X:
1301 case V4L2_PIX_FMT_RGB555X:
1302 format = GST_VIDEO_FORMAT_BGR15;
1303 break;
1304 case V4L2_PIX_FMT_RGB565:
1305 format = GST_VIDEO_FORMAT_RGB16;
1306 break;
1307 case V4L2_PIX_FMT_RGB24:
1308 format = GST_VIDEO_FORMAT_RGB;
1309 break;
1310 case V4L2_PIX_FMT_BGR24:
1311 format = GST_VIDEO_FORMAT_BGR;
1312 break;
1313 case V4L2_PIX_FMT_XRGB32:
1314 case V4L2_PIX_FMT_RGB32:
1315 format = GST_VIDEO_FORMAT_xRGB;
1316 break;
1317 case V4L2_PIX_FMT_RGBX32:
1318 format = GST_VIDEO_FORMAT_RGBx;
1319 break;
1320 case V4L2_PIX_FMT_XBGR32:
1321 case V4L2_PIX_FMT_BGR32:
1322 format = GST_VIDEO_FORMAT_BGRx;
1323 break;
1324 case V4L2_PIX_FMT_BGRX32:
1325 format = GST_VIDEO_FORMAT_xBGR;
1326 break;
1327 case V4L2_PIX_FMT_ABGR32:
1328 format = GST_VIDEO_FORMAT_BGRA;
1329 break;
1330 case V4L2_PIX_FMT_BGRA32:
1331 format = GST_VIDEO_FORMAT_ABGR;
1332 break;
1333 case V4L2_PIX_FMT_RGBA32:
1334 format = GST_VIDEO_FORMAT_RGBA;
1335 break;
1336 case V4L2_PIX_FMT_ARGB32:
1337 format = GST_VIDEO_FORMAT_ARGB;
1338 break;
1339 case V4L2_PIX_FMT_NV12:
1340 case V4L2_PIX_FMT_NV12M:
1341 format = GST_VIDEO_FORMAT_NV12;
1342 break;
1343 case V4L2_PIX_FMT_NV12MT:
1344 format = GST_VIDEO_FORMAT_NV12_64Z32;
1345 break;
1346 case V4L2_PIX_FMT_NV21:
1347 case V4L2_PIX_FMT_NV21M:
1348 format = GST_VIDEO_FORMAT_NV21;
1349 break;
1350 case V4L2_PIX_FMT_YVU410:
1351 format = GST_VIDEO_FORMAT_YVU9;
1352 break;
1353 case V4L2_PIX_FMT_YUV410:
1354 format = GST_VIDEO_FORMAT_YUV9;
1355 break;
1356 case V4L2_PIX_FMT_YUV420:
1357 case V4L2_PIX_FMT_YUV420M:
1358 format = GST_VIDEO_FORMAT_I420;
1359 break;
1360 case V4L2_PIX_FMT_YUYV:
1361 format = GST_VIDEO_FORMAT_YUY2;
1362 break;
1363 case V4L2_PIX_FMT_YVU420:
1364 format = GST_VIDEO_FORMAT_YV12;
1365 break;
1366 case V4L2_PIX_FMT_UYVY:
1367 format = GST_VIDEO_FORMAT_UYVY;
1368 break;
1369 case V4L2_PIX_FMT_YUV411P:
1370 format = GST_VIDEO_FORMAT_Y41B;
1371 break;
1372 case V4L2_PIX_FMT_YUV422P:
1373 format = GST_VIDEO_FORMAT_Y42B;
1374 break;
1375 case V4L2_PIX_FMT_YVYU:
1376 format = GST_VIDEO_FORMAT_YVYU;
1377 break;
1378 case V4L2_PIX_FMT_NV16:
1379 case V4L2_PIX_FMT_NV16M:
1380 format = GST_VIDEO_FORMAT_NV16;
1381 break;
1382 case V4L2_PIX_FMT_NV61:
1383 case V4L2_PIX_FMT_NV61M:
1384 format = GST_VIDEO_FORMAT_NV61;
1385 break;
1386 case V4L2_PIX_FMT_NV24:
1387 format = GST_VIDEO_FORMAT_NV24;
1388 break;
1389 default:
1390 format = GST_VIDEO_FORMAT_UNKNOWN;
1391 break;
1392 }
1393
1394 return format;
1395 }
1396
1397 static gboolean
gst_v4l2_object_v4l2fourcc_is_rgb(guint32 fourcc)1398 gst_v4l2_object_v4l2fourcc_is_rgb (guint32 fourcc)
1399 {
1400 gboolean ret = FALSE;
1401
1402 switch (fourcc) {
1403 case V4L2_PIX_FMT_XRGB555:
1404 case V4L2_PIX_FMT_RGB555:
1405 case V4L2_PIX_FMT_XRGB555X:
1406 case V4L2_PIX_FMT_RGB555X:
1407 case V4L2_PIX_FMT_RGB565:
1408 case V4L2_PIX_FMT_RGB24:
1409 case V4L2_PIX_FMT_BGR24:
1410 case V4L2_PIX_FMT_XRGB32:
1411 case V4L2_PIX_FMT_RGB32:
1412 case V4L2_PIX_FMT_RGBA32:
1413 case V4L2_PIX_FMT_RGBX32:
1414 case V4L2_PIX_FMT_XBGR32:
1415 case V4L2_PIX_FMT_BGR32:
1416 case V4L2_PIX_FMT_BGRA32:
1417 case V4L2_PIX_FMT_BGRX32:
1418 case V4L2_PIX_FMT_ABGR32:
1419 case V4L2_PIX_FMT_ARGB32:
1420 case V4L2_PIX_FMT_SBGGR8:
1421 case V4L2_PIX_FMT_SGBRG8:
1422 case V4L2_PIX_FMT_SGRBG8:
1423 case V4L2_PIX_FMT_SRGGB8:
1424 ret = TRUE;
1425 break;
1426 default:
1427 break;
1428 }
1429
1430 return ret;
1431 }
1432
1433 static GstStructure *
gst_v4l2_object_v4l2fourcc_to_bare_struct(guint32 fourcc)1434 gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc)
1435 {
1436 GstStructure *structure = NULL;
1437
1438 switch (fourcc) {
1439 case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
1440 case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
1441 case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
1442 structure = gst_structure_new_empty ("image/jpeg");
1443 break;
1444 case V4L2_PIX_FMT_MPEG1:
1445 structure = gst_structure_new ("video/mpeg",
1446 "mpegversion", G_TYPE_INT, 1, NULL);
1447 break;
1448 case V4L2_PIX_FMT_MPEG2:
1449 structure = gst_structure_new ("video/mpeg",
1450 "mpegversion", G_TYPE_INT, 2, NULL);
1451 break;
1452 case V4L2_PIX_FMT_MPEG4:
1453 case V4L2_PIX_FMT_XVID:
1454 structure = gst_structure_new ("video/mpeg",
1455 "mpegversion", G_TYPE_INT, 4, "systemstream",
1456 G_TYPE_BOOLEAN, FALSE, NULL);
1457 break;
1458 case V4L2_PIX_FMT_FWHT:
1459 structure = gst_structure_new_empty ("video/x-fwht");
1460 break;
1461 case V4L2_PIX_FMT_H263:
1462 structure = gst_structure_new ("video/x-h263",
1463 "variant", G_TYPE_STRING, "itu", NULL);
1464 break;
1465 case V4L2_PIX_FMT_H264: /* H.264 */
1466 structure = gst_structure_new ("video/x-h264",
1467 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1468 G_TYPE_STRING, "au", NULL);
1469 break;
1470 case V4L2_PIX_FMT_H264_NO_SC:
1471 structure = gst_structure_new ("video/x-h264",
1472 "stream-format", G_TYPE_STRING, "avc", "alignment",
1473 G_TYPE_STRING, "au", NULL);
1474 break;
1475 case V4L2_PIX_FMT_HEVC: /* H.265 */
1476 structure = gst_structure_new ("video/x-h265",
1477 "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
1478 G_TYPE_STRING, "au", NULL);
1479 break;
1480 case V4L2_PIX_FMT_VC1_ANNEX_G:
1481 case V4L2_PIX_FMT_VC1_ANNEX_L:
1482 structure = gst_structure_new ("video/x-wmv",
1483 "wmvversion", G_TYPE_INT, 3, "format", G_TYPE_STRING, "WVC1", NULL);
1484 break;
1485 case V4L2_PIX_FMT_VP8:
1486 structure = gst_structure_new_empty ("video/x-vp8");
1487 break;
1488 case V4L2_PIX_FMT_VP9:
1489 structure = gst_structure_new_empty ("video/x-vp9");
1490 break;
1491 case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
1492 case V4L2_PIX_FMT_Y16:
1493 case V4L2_PIX_FMT_Y16_BE:
1494 case V4L2_PIX_FMT_XRGB555:
1495 case V4L2_PIX_FMT_RGB555:
1496 case V4L2_PIX_FMT_XRGB555X:
1497 case V4L2_PIX_FMT_RGB555X:
1498 case V4L2_PIX_FMT_RGB565:
1499 case V4L2_PIX_FMT_RGB24:
1500 case V4L2_PIX_FMT_BGR24:
1501 case V4L2_PIX_FMT_RGB32:
1502 case V4L2_PIX_FMT_XRGB32:
1503 case V4L2_PIX_FMT_ARGB32:
1504 case V4L2_PIX_FMT_RGBX32:
1505 case V4L2_PIX_FMT_RGBA32:
1506 case V4L2_PIX_FMT_BGR32:
1507 case V4L2_PIX_FMT_BGRX32:
1508 case V4L2_PIX_FMT_BGRA32:
1509 case V4L2_PIX_FMT_XBGR32:
1510 case V4L2_PIX_FMT_ABGR32:
1511 case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
1512 case V4L2_PIX_FMT_NV12M:
1513 case V4L2_PIX_FMT_NV12MT:
1514 case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
1515 case V4L2_PIX_FMT_NV21M:
1516 case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
1517 case V4L2_PIX_FMT_NV16M:
1518 case V4L2_PIX_FMT_NV61: /* 16 Y/CrCb 4:2:2 */
1519 case V4L2_PIX_FMT_NV61M:
1520 case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
1521 case V4L2_PIX_FMT_YVU410:
1522 case V4L2_PIX_FMT_YUV410:
1523 case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
1524 case V4L2_PIX_FMT_YUV420M:
1525 case V4L2_PIX_FMT_YUYV:
1526 case V4L2_PIX_FMT_YVU420:
1527 case V4L2_PIX_FMT_UYVY:
1528 case V4L2_PIX_FMT_YUV422P:
1529 case V4L2_PIX_FMT_YVYU:
1530 case V4L2_PIX_FMT_YUV411P:{
1531 GstVideoFormat format;
1532 format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
1533 if (format != GST_VIDEO_FORMAT_UNKNOWN)
1534 structure = gst_structure_new ("video/x-raw",
1535 "format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
1536 break;
1537 }
1538 case V4L2_PIX_FMT_DV:
1539 structure =
1540 gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE,
1541 NULL);
1542 break;
1543 case V4L2_PIX_FMT_MPEG: /* MPEG */
1544 structure = gst_structure_new ("video/mpegts",
1545 "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
1546 break;
1547 case V4L2_PIX_FMT_WNVA: /* Winnov hw compress */
1548 break;
1549 case V4L2_PIX_FMT_SBGGR8:
1550 case V4L2_PIX_FMT_SGBRG8:
1551 case V4L2_PIX_FMT_SGRBG8:
1552 case V4L2_PIX_FMT_SRGGB8:
1553 structure = gst_structure_new ("video/x-bayer", "format", G_TYPE_STRING,
1554 fourcc == V4L2_PIX_FMT_SBGGR8 ? "bggr" :
1555 fourcc == V4L2_PIX_FMT_SGBRG8 ? "gbrg" :
1556 fourcc == V4L2_PIX_FMT_SGRBG8 ? "grbg" :
1557 /* fourcc == V4L2_PIX_FMT_SRGGB8 ? */ "rggb", NULL);
1558 break;
1559 case V4L2_PIX_FMT_SN9C10X:
1560 structure = gst_structure_new_empty ("video/x-sonix");
1561 break;
1562 case V4L2_PIX_FMT_PWC1:
1563 structure = gst_structure_new_empty ("video/x-pwc1");
1564 break;
1565 case V4L2_PIX_FMT_PWC2:
1566 structure = gst_structure_new_empty ("video/x-pwc2");
1567 break;
1568 case V4L2_PIX_FMT_RGB332:
1569 case V4L2_PIX_FMT_BGR666:
1570 case V4L2_PIX_FMT_ARGB555X:
1571 case V4L2_PIX_FMT_RGB565X:
1572 case V4L2_PIX_FMT_RGB444:
1573 case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
1574 case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
1575 case V4L2_PIX_FMT_Y4:
1576 case V4L2_PIX_FMT_Y6:
1577 case V4L2_PIX_FMT_Y10:
1578 case V4L2_PIX_FMT_Y12:
1579 case V4L2_PIX_FMT_Y10BPACK:
1580 case V4L2_PIX_FMT_YUV444:
1581 case V4L2_PIX_FMT_YUV555:
1582 case V4L2_PIX_FMT_YUV565:
1583 case V4L2_PIX_FMT_Y41P:
1584 case V4L2_PIX_FMT_YUV32:
1585 case V4L2_PIX_FMT_NV12MT_16X16:
1586 case V4L2_PIX_FMT_NV42:
1587 case V4L2_PIX_FMT_H264_MVC:
1588 default:
1589 GST_DEBUG ("Unsupported fourcc 0x%08x %" GST_FOURCC_FORMAT,
1590 fourcc, GST_FOURCC_ARGS (fourcc));
1591 break;
1592 }
1593
1594 return structure;
1595 }
1596
1597 GstStructure *
gst_v4l2_object_v4l2fourcc_to_structure(guint32 fourcc)1598 gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
1599 {
1600 GstStructure *template;
1601 gint i;
1602
1603 template = gst_v4l2_object_v4l2fourcc_to_bare_struct (fourcc);
1604
1605 if (template == NULL)
1606 goto done;
1607
1608 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
1609 if (gst_v4l2_formats[i].format != fourcc)
1610 continue;
1611
1612 if (gst_v4l2_formats[i].dimensions) {
1613 gst_structure_set (template,
1614 "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1615 "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1616 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1617 }
1618 break;
1619 }
1620
1621 done:
1622 return template;
1623 }
1624
1625 /* Add an 'alternate' variant of the caps with the feature */
1626 static void
add_alternate_variant(GstV4l2Object * v4l2object,GstCaps * caps,GstStructure * structure)1627 add_alternate_variant (GstV4l2Object * v4l2object, GstCaps * caps,
1628 GstStructure * structure)
1629 {
1630 GstStructure *alt_s;
1631
1632 if (v4l2object && v4l2object->never_interlaced)
1633 return;
1634
1635 if (!gst_structure_has_name (structure, "video/x-raw"))
1636 return;
1637
1638 alt_s = gst_structure_copy (structure);
1639 gst_structure_set (alt_s, "interlace-mode", G_TYPE_STRING, "alternate", NULL);
1640
1641 gst_caps_append_structure_full (caps, alt_s,
1642 gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL));
1643 }
1644
1645 static GstCaps *
gst_v4l2_object_get_caps_helper(GstV4L2FormatFlags flags)1646 gst_v4l2_object_get_caps_helper (GstV4L2FormatFlags flags)
1647 {
1648 GstStructure *structure;
1649 GstCaps *caps, *caps_interlaced;
1650 guint i;
1651
1652 caps = gst_caps_new_empty ();
1653 caps_interlaced = gst_caps_new_empty ();
1654 for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
1655
1656 if ((gst_v4l2_formats[i].flags & flags) == 0)
1657 continue;
1658
1659 structure =
1660 gst_v4l2_object_v4l2fourcc_to_bare_struct (gst_v4l2_formats[i].format);
1661
1662 if (structure) {
1663 GstStructure *alt_s = NULL;
1664
1665 if (gst_v4l2_formats[i].dimensions) {
1666 gst_structure_set (structure,
1667 "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1668 "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
1669 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1670 }
1671
1672 switch (gst_v4l2_formats[i].format) {
1673 case V4L2_PIX_FMT_RGB32:
1674 alt_s = gst_structure_copy (structure);
1675 gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
1676 break;
1677 case V4L2_PIX_FMT_BGR32:
1678 alt_s = gst_structure_copy (structure);
1679 gst_structure_set (alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
1680 default:
1681 break;
1682 }
1683
1684 gst_caps_append_structure (caps, structure);
1685
1686 if (alt_s) {
1687 gst_caps_append_structure (caps, alt_s);
1688 add_alternate_variant (NULL, caps_interlaced, alt_s);
1689 }
1690
1691 add_alternate_variant (NULL, caps_interlaced, structure);
1692 }
1693 }
1694
1695 caps = gst_caps_simplify (caps);
1696 caps_interlaced = gst_caps_simplify (caps_interlaced);
1697
1698 return gst_caps_merge (caps, caps_interlaced);
1699 }
1700
1701 GstCaps *
gst_v4l2_object_get_all_caps(void)1702 gst_v4l2_object_get_all_caps (void)
1703 {
1704 static GstCaps *caps = NULL;
1705
1706 if (g_once_init_enter (&caps)) {
1707 GstCaps *all_caps = gst_v4l2_object_get_caps_helper (GST_V4L2_ALL);
1708 GST_MINI_OBJECT_FLAG_SET (all_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1709 g_once_init_leave (&caps, all_caps);
1710 }
1711
1712 return caps;
1713 }
1714
1715 GstCaps *
gst_v4l2_object_get_raw_caps(void)1716 gst_v4l2_object_get_raw_caps (void)
1717 {
1718 static GstCaps *caps = NULL;
1719
1720 if (g_once_init_enter (&caps)) {
1721 GstCaps *raw_caps = gst_v4l2_object_get_caps_helper (GST_V4L2_RAW);
1722 GST_MINI_OBJECT_FLAG_SET (raw_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1723 g_once_init_leave (&caps, raw_caps);
1724 }
1725
1726 return caps;
1727 }
1728
1729 GstCaps *
gst_v4l2_object_get_codec_caps(void)1730 gst_v4l2_object_get_codec_caps (void)
1731 {
1732 static GstCaps *caps = NULL;
1733
1734 if (g_once_init_enter (&caps)) {
1735 GstCaps *codec_caps = gst_v4l2_object_get_caps_helper (GST_V4L2_CODEC);
1736 GST_MINI_OBJECT_FLAG_SET (codec_caps, GST_MINI_OBJECT_FLAG_MAY_BE_LEAKED);
1737 g_once_init_leave (&caps, codec_caps);
1738 }
1739
1740 return caps;
1741 }
1742
1743 /* collect data for the given caps
1744 * @caps: given input caps
1745 * @format: location for the v4l format
1746 * @w/@h: location for width and height
1747 * @fps_n/@fps_d: location for framerate
1748 * @size: location for expected size of the frame or 0 if unknown
1749 */
1750 static gboolean
gst_v4l2_object_get_caps_info(GstV4l2Object * v4l2object,GstCaps * caps,struct v4l2_fmtdesc ** format,GstVideoInfo * info)1751 gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
1752 struct v4l2_fmtdesc **format, GstVideoInfo * info)
1753 {
1754 GstStructure *structure;
1755 guint32 fourcc = 0, fourcc_nc = 0;
1756 const gchar *mimetype;
1757 struct v4l2_fmtdesc *fmt = NULL;
1758
1759 structure = gst_caps_get_structure (caps, 0);
1760
1761 mimetype = gst_structure_get_name (structure);
1762
1763 if (!gst_video_info_from_caps (info, caps))
1764 goto invalid_format;
1765
1766 if (g_str_equal (mimetype, "video/x-raw")) {
1767 switch (GST_VIDEO_INFO_FORMAT (info)) {
1768 case GST_VIDEO_FORMAT_I420:
1769 fourcc = V4L2_PIX_FMT_YUV420;
1770 fourcc_nc = V4L2_PIX_FMT_YUV420M;
1771 break;
1772 case GST_VIDEO_FORMAT_YUY2:
1773 fourcc = V4L2_PIX_FMT_YUYV;
1774 break;
1775 case GST_VIDEO_FORMAT_UYVY:
1776 fourcc = V4L2_PIX_FMT_UYVY;
1777 break;
1778 case GST_VIDEO_FORMAT_YV12:
1779 fourcc = V4L2_PIX_FMT_YVU420;
1780 break;
1781 case GST_VIDEO_FORMAT_Y41B:
1782 fourcc = V4L2_PIX_FMT_YUV411P;
1783 break;
1784 case GST_VIDEO_FORMAT_Y42B:
1785 fourcc = V4L2_PIX_FMT_YUV422P;
1786 break;
1787 case GST_VIDEO_FORMAT_NV12:
1788 fourcc = V4L2_PIX_FMT_NV12;
1789 fourcc_nc = V4L2_PIX_FMT_NV12M;
1790 break;
1791 case GST_VIDEO_FORMAT_NV12_64Z32:
1792 fourcc_nc = V4L2_PIX_FMT_NV12MT;
1793 break;
1794 case GST_VIDEO_FORMAT_NV21:
1795 fourcc = V4L2_PIX_FMT_NV21;
1796 fourcc_nc = V4L2_PIX_FMT_NV21M;
1797 break;
1798 case GST_VIDEO_FORMAT_NV16:
1799 fourcc = V4L2_PIX_FMT_NV16;
1800 fourcc_nc = V4L2_PIX_FMT_NV16M;
1801 break;
1802 case GST_VIDEO_FORMAT_NV61:
1803 fourcc = V4L2_PIX_FMT_NV61;
1804 fourcc_nc = V4L2_PIX_FMT_NV61M;
1805 break;
1806 case GST_VIDEO_FORMAT_NV24:
1807 fourcc = V4L2_PIX_FMT_NV24;
1808 break;
1809 case GST_VIDEO_FORMAT_YVYU:
1810 fourcc = V4L2_PIX_FMT_YVYU;
1811 break;
1812 case GST_VIDEO_FORMAT_RGB15:
1813 fourcc = V4L2_PIX_FMT_RGB555;
1814 fourcc_nc = V4L2_PIX_FMT_XRGB555;
1815 break;
1816 case GST_VIDEO_FORMAT_RGB16:
1817 fourcc = V4L2_PIX_FMT_RGB565;
1818 break;
1819 case GST_VIDEO_FORMAT_RGB:
1820 fourcc = V4L2_PIX_FMT_RGB24;
1821 break;
1822 case GST_VIDEO_FORMAT_BGR:
1823 fourcc = V4L2_PIX_FMT_BGR24;
1824 break;
1825 case GST_VIDEO_FORMAT_xRGB:
1826 fourcc = V4L2_PIX_FMT_RGB32;
1827 fourcc_nc = V4L2_PIX_FMT_XRGB32;
1828 break;
1829 case GST_VIDEO_FORMAT_RGBx:
1830 fourcc = V4L2_PIX_FMT_RGBX32;
1831 break;
1832 case GST_VIDEO_FORMAT_ARGB:
1833 fourcc = V4L2_PIX_FMT_RGB32;
1834 fourcc_nc = V4L2_PIX_FMT_ARGB32;
1835 break;
1836 case GST_VIDEO_FORMAT_RGBA:
1837 fourcc = V4L2_PIX_FMT_RGBA32;
1838 break;
1839 case GST_VIDEO_FORMAT_BGRx:
1840 fourcc = V4L2_PIX_FMT_BGR32;
1841 fourcc_nc = V4L2_PIX_FMT_XBGR32;
1842 break;
1843 case GST_VIDEO_FORMAT_xBGR:
1844 fourcc = V4L2_PIX_FMT_BGRX32;
1845 break;
1846 case GST_VIDEO_FORMAT_BGRA:
1847 fourcc = V4L2_PIX_FMT_BGR32;
1848 fourcc_nc = V4L2_PIX_FMT_ABGR32;
1849 break;
1850 case GST_VIDEO_FORMAT_ABGR:
1851 fourcc = V4L2_PIX_FMT_BGRA32;
1852 break;
1853 case GST_VIDEO_FORMAT_GRAY8:
1854 fourcc = V4L2_PIX_FMT_GREY;
1855 break;
1856 case GST_VIDEO_FORMAT_GRAY16_LE:
1857 fourcc = V4L2_PIX_FMT_Y16;
1858 break;
1859 case GST_VIDEO_FORMAT_GRAY16_BE:
1860 fourcc = V4L2_PIX_FMT_Y16_BE;
1861 break;
1862 case GST_VIDEO_FORMAT_BGR15:
1863 fourcc = V4L2_PIX_FMT_RGB555X;
1864 fourcc_nc = V4L2_PIX_FMT_XRGB555X;
1865 break;
1866 default:
1867 break;
1868 }
1869 } else {
1870 if (g_str_equal (mimetype, "video/mpegts")) {
1871 fourcc = V4L2_PIX_FMT_MPEG;
1872 } else if (g_str_equal (mimetype, "video/x-dv")) {
1873 fourcc = V4L2_PIX_FMT_DV;
1874 } else if (g_str_equal (mimetype, "image/jpeg")) {
1875 fourcc = V4L2_PIX_FMT_JPEG;
1876 } else if (g_str_equal (mimetype, "video/mpeg")) {
1877 gint version;
1878 if (gst_structure_get_int (structure, "mpegversion", &version)) {
1879 switch (version) {
1880 case 1:
1881 fourcc = V4L2_PIX_FMT_MPEG1;
1882 break;
1883 case 2:
1884 fourcc = V4L2_PIX_FMT_MPEG2;
1885 break;
1886 case 4:
1887 fourcc = V4L2_PIX_FMT_MPEG4;
1888 fourcc_nc = V4L2_PIX_FMT_XVID;
1889 break;
1890 default:
1891 break;
1892 }
1893 }
1894 } else if (g_str_equal (mimetype, "video/x-fwht")) {
1895 fourcc = V4L2_PIX_FMT_FWHT;
1896 } else if (g_str_equal (mimetype, "video/x-h263")) {
1897 fourcc = V4L2_PIX_FMT_H263;
1898 } else if (g_str_equal (mimetype, "video/x-h264")) {
1899 const gchar *stream_format =
1900 gst_structure_get_string (structure, "stream-format");
1901 if (g_str_equal (stream_format, "avc"))
1902 fourcc = V4L2_PIX_FMT_H264_NO_SC;
1903 else
1904 fourcc = V4L2_PIX_FMT_H264;
1905 } else if (g_str_equal (mimetype, "video/x-h265")) {
1906 fourcc = V4L2_PIX_FMT_HEVC;
1907 } else if (g_str_equal (mimetype, "video/x-vp8")) {
1908 fourcc = V4L2_PIX_FMT_VP8;
1909 } else if (g_str_equal (mimetype, "video/x-vp9")) {
1910 fourcc = V4L2_PIX_FMT_VP9;
1911 } else if (g_str_equal (mimetype, "video/x-bayer")) {
1912 const gchar *format = gst_structure_get_string (structure, "format");
1913 if (format) {
1914 if (!g_ascii_strcasecmp (format, "bggr"))
1915 fourcc = V4L2_PIX_FMT_SBGGR8;
1916 else if (!g_ascii_strcasecmp (format, "gbrg"))
1917 fourcc = V4L2_PIX_FMT_SGBRG8;
1918 else if (!g_ascii_strcasecmp (format, "grbg"))
1919 fourcc = V4L2_PIX_FMT_SGRBG8;
1920 else if (!g_ascii_strcasecmp (format, "rggb"))
1921 fourcc = V4L2_PIX_FMT_SRGGB8;
1922 }
1923 } else if (g_str_equal (mimetype, "video/x-sonix")) {
1924 fourcc = V4L2_PIX_FMT_SN9C10X;
1925 } else if (g_str_equal (mimetype, "video/x-pwc1")) {
1926 fourcc = V4L2_PIX_FMT_PWC1;
1927 } else if (g_str_equal (mimetype, "video/x-pwc2")) {
1928 fourcc = V4L2_PIX_FMT_PWC2;
1929 }
1930 }
1931
1932
1933 /* Prefer the non-contiguous if supported */
1934 v4l2object->prefered_non_contiguous = TRUE;
1935
1936 if (fourcc_nc)
1937 fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc_nc);
1938 else if (fourcc == 0)
1939 goto unhandled_format;
1940
1941 if (fmt == NULL) {
1942 fmt = gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
1943 v4l2object->prefered_non_contiguous = FALSE;
1944 }
1945
1946 if (fmt == NULL)
1947 goto unsupported_format;
1948
1949 *format = fmt;
1950
1951 return TRUE;
1952
1953 /* ERRORS */
1954 invalid_format:
1955 {
1956 GST_DEBUG_OBJECT (v4l2object, "invalid format");
1957 return FALSE;
1958 }
1959 unhandled_format:
1960 {
1961 GST_DEBUG_OBJECT (v4l2object, "unhandled format");
1962 return FALSE;
1963 }
1964 unsupported_format:
1965 {
1966 GST_DEBUG_OBJECT (v4l2object, "unsupported format");
1967 return FALSE;
1968 }
1969 }
1970
1971 static gboolean
1972 gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
1973 guint32 pixelformat, gint * width, gint * height);
1974
1975 static void
gst_v4l2_object_add_aspect_ratio(GstV4l2Object * v4l2object,GstStructure * s)1976 gst_v4l2_object_add_aspect_ratio (GstV4l2Object * v4l2object, GstStructure * s)
1977 {
1978 if (v4l2object->keep_aspect && v4l2object->par)
1979 gst_structure_set_value (s, "pixel-aspect-ratio", v4l2object->par);
1980 }
1981
1982 /* returns TRUE if the value was changed in place, otherwise FALSE */
1983 static gboolean
gst_v4l2src_value_simplify(GValue * val)1984 gst_v4l2src_value_simplify (GValue * val)
1985 {
1986 /* simplify list of one value to one value */
1987 if (GST_VALUE_HOLDS_LIST (val) && gst_value_list_get_size (val) == 1) {
1988 const GValue *list_val;
1989 GValue new_val = G_VALUE_INIT;
1990
1991 list_val = gst_value_list_get_value (val, 0);
1992 g_value_init (&new_val, G_VALUE_TYPE (list_val));
1993 g_value_copy (list_val, &new_val);
1994 g_value_unset (val);
1995 *val = new_val;
1996 return TRUE;
1997 }
1998
1999 return FALSE;
2000 }
2001
2002 static gboolean
gst_v4l2_object_get_interlace_mode(enum v4l2_field field,GstVideoInterlaceMode * interlace_mode)2003 gst_v4l2_object_get_interlace_mode (enum v4l2_field field,
2004 GstVideoInterlaceMode * interlace_mode)
2005 {
2006 switch (field) {
2007 case V4L2_FIELD_ANY:
2008 GST_ERROR
2009 ("Driver bug detected - check driver with v4l2-compliance from http://git.linuxtv.org/v4l-utils.git\n");
2010 return FALSE;
2011 case V4L2_FIELD_NONE:
2012 *interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
2013 return TRUE;
2014 case V4L2_FIELD_INTERLACED:
2015 case V4L2_FIELD_INTERLACED_TB:
2016 case V4L2_FIELD_INTERLACED_BT:
2017 *interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
2018 return TRUE;
2019 case V4L2_FIELD_ALTERNATE:
2020 *interlace_mode = GST_VIDEO_INTERLACE_MODE_ALTERNATE;
2021 return TRUE;
2022 default:
2023 GST_ERROR ("Unknown enum v4l2_field %d", field);
2024 return FALSE;
2025 }
2026 }
2027
2028 static gboolean
gst_v4l2_object_get_colorspace(GstV4l2Object * v4l2object,struct v4l2_format * fmt,GstVideoColorimetry * cinfo)2029 gst_v4l2_object_get_colorspace (GstV4l2Object * v4l2object,
2030 struct v4l2_format *fmt, GstVideoColorimetry * cinfo)
2031 {
2032 gboolean is_rgb =
2033 gst_v4l2_object_v4l2fourcc_is_rgb (fmt->fmt.pix.pixelformat);
2034 enum v4l2_colorspace colorspace;
2035 enum v4l2_quantization range;
2036 enum v4l2_ycbcr_encoding matrix;
2037 enum v4l2_xfer_func transfer;
2038 gboolean ret = TRUE;
2039
2040 if (V4L2_TYPE_IS_MULTIPLANAR (fmt->type)) {
2041 colorspace = fmt->fmt.pix_mp.colorspace;
2042 range = fmt->fmt.pix_mp.quantization;
2043 matrix = fmt->fmt.pix_mp.ycbcr_enc;
2044 transfer = fmt->fmt.pix_mp.xfer_func;
2045 } else {
2046 colorspace = fmt->fmt.pix.colorspace;
2047 range = fmt->fmt.pix.quantization;
2048 matrix = fmt->fmt.pix.ycbcr_enc;
2049 transfer = fmt->fmt.pix.xfer_func;
2050 }
2051
2052 /* First step, set the defaults for each primaries */
2053 switch (colorspace) {
2054 case V4L2_COLORSPACE_SMPTE170M:
2055 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2056 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2057 cinfo->transfer = GST_VIDEO_TRANSFER_BT601;
2058 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE170M;
2059 break;
2060 case V4L2_COLORSPACE_REC709:
2061 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2062 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2063 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2064 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
2065 break;
2066 case V4L2_COLORSPACE_SRGB:
2067 case V4L2_COLORSPACE_JPEG:
2068 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2069 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2070 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2071 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
2072 break;
2073 case V4L2_COLORSPACE_OPRGB:
2074 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2075 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2076 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2077 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_ADOBERGB;
2078 break;
2079 case V4L2_COLORSPACE_BT2020:
2080 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2081 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2082 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2083 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020;
2084 break;
2085 case V4L2_COLORSPACE_SMPTE240M:
2086 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2087 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2088 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2089 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_SMPTE240M;
2090 break;
2091 case V4L2_COLORSPACE_470_SYSTEM_M:
2092 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2093 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2094 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2095 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470M;
2096 break;
2097 case V4L2_COLORSPACE_470_SYSTEM_BG:
2098 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2099 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2100 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2101 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT470BG;
2102 break;
2103 case V4L2_COLORSPACE_RAW:
2104 /* Explicitly unknown */
2105 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2106 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2107 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2108 cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_UNKNOWN;
2109 break;
2110 default:
2111 GST_DEBUG ("Unknown enum v4l2_colorspace %d", colorspace);
2112 ret = FALSE;
2113 break;
2114 }
2115
2116 if (!ret)
2117 goto done;
2118
2119 /* Second step, apply any custom variation */
2120 switch (range) {
2121 case V4L2_QUANTIZATION_FULL_RANGE:
2122 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2123 break;
2124 case V4L2_QUANTIZATION_LIM_RANGE:
2125 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2126 break;
2127 case V4L2_QUANTIZATION_DEFAULT:
2128 /* replicated V4L2_MAP_QUANTIZATION_DEFAULT macro behavior */
2129 if (is_rgb && colorspace == V4L2_COLORSPACE_BT2020)
2130 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2131 else if (is_rgb || matrix == V4L2_YCBCR_ENC_XV601
2132 || matrix == V4L2_YCBCR_ENC_XV709
2133 || colorspace == V4L2_COLORSPACE_JPEG)
2134 cinfo->range = GST_VIDEO_COLOR_RANGE_0_255;
2135 else
2136 cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
2137 break;
2138 default:
2139 GST_WARNING ("Unknown enum v4l2_quantization value %d", range);
2140 cinfo->range = GST_VIDEO_COLOR_RANGE_UNKNOWN;
2141 break;
2142 }
2143
2144 switch (matrix) {
2145 case V4L2_YCBCR_ENC_XV601:
2146 case V4L2_YCBCR_ENC_SYCC:
2147 GST_FIXME ("XV601 and SYCC not defined, assuming 601");
2148 /* fallthrough */
2149 case V4L2_YCBCR_ENC_601:
2150 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
2151 break;
2152 case V4L2_YCBCR_ENC_XV709:
2153 GST_FIXME ("XV709 not defined, assuming 709");
2154 /* fallthrough */
2155 case V4L2_YCBCR_ENC_709:
2156 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709;
2157 break;
2158 case V4L2_YCBCR_ENC_BT2020_CONST_LUM:
2159 GST_FIXME ("BT2020 with constant luma is not defined, assuming BT2020");
2160 /* fallthrough */
2161 case V4L2_YCBCR_ENC_BT2020:
2162 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020;
2163 break;
2164 case V4L2_YCBCR_ENC_SMPTE240M:
2165 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_SMPTE240M;
2166 break;
2167 case V4L2_YCBCR_ENC_DEFAULT:
2168 /* nothing, just use defaults for colorspace */
2169 break;
2170 default:
2171 GST_WARNING ("Unknown enum v4l2_ycbcr_encoding value %d", matrix);
2172 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_UNKNOWN;
2173 break;
2174 }
2175
2176 /* Set identity matrix for R'G'B' formats to avoid creating
2177 * confusion. This though is cosmetic as it's now properly ignored by
2178 * the video info API and videoconvert. */
2179 if (is_rgb)
2180 cinfo->matrix = GST_VIDEO_COLOR_MATRIX_RGB;
2181
2182 switch (transfer) {
2183 case V4L2_XFER_FUNC_709:
2184 if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
2185 cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
2186 else if (colorspace == V4L2_COLORSPACE_SMPTE170M)
2187 cinfo->transfer = GST_VIDEO_TRANSFER_BT601;
2188 else
2189 cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
2190
2191 if (v4l2object->transfer)
2192 cinfo->transfer = v4l2object->transfer;
2193 break;
2194 case V4L2_XFER_FUNC_SRGB:
2195 cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
2196 break;
2197 case V4L2_XFER_FUNC_OPRGB:
2198 cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
2199 break;
2200 case V4L2_XFER_FUNC_SMPTE240M:
2201 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE240M;
2202 break;
2203 case V4L2_XFER_FUNC_NONE:
2204 cinfo->transfer = GST_VIDEO_TRANSFER_GAMMA10;
2205 break;
2206 case V4L2_XFER_FUNC_SMPTE2084:
2207 cinfo->transfer = GST_VIDEO_TRANSFER_SMPTE2084;
2208 break;
2209 case V4L2_XFER_FUNC_DEFAULT:
2210 /* nothing, just use defaults for colorspace */
2211 break;
2212 default:
2213 GST_WARNING ("Unknown enum v4l2_xfer_func value %d", transfer);
2214 cinfo->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
2215 break;
2216 }
2217
2218 done:
2219 return ret;
2220 }
2221
2222 static gboolean
gst_v4l2_object_get_streamparm(GstV4l2Object * v4l2object,GstVideoInfo * info)2223 gst_v4l2_object_get_streamparm (GstV4l2Object * v4l2object, GstVideoInfo * info)
2224 {
2225 struct v4l2_streamparm streamparm;
2226 memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
2227 streamparm.type = v4l2object->type;
2228 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_PARM, &streamparm) < 0) {
2229 GST_WARNING_OBJECT (v4l2object->dbg_obj, "VIDIOC_G_PARM failed");
2230 return FALSE;
2231 }
2232 if ((streamparm.parm.capture.timeperframe.numerator != 0)
2233 && (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
2234 || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
2235 GST_VIDEO_INFO_FPS_N (info) =
2236 streamparm.parm.capture.timeperframe.denominator;
2237 GST_VIDEO_INFO_FPS_D (info) =
2238 streamparm.parm.capture.timeperframe.numerator;
2239 }
2240 return TRUE;
2241 }
2242
2243 static int
gst_v4l2_object_try_fmt(GstV4l2Object * v4l2object,struct v4l2_format * try_fmt)2244 gst_v4l2_object_try_fmt (GstV4l2Object * v4l2object,
2245 struct v4l2_format *try_fmt)
2246 {
2247 int fd = v4l2object->video_fd;
2248 struct v4l2_format fmt;
2249 int r;
2250
2251 memcpy (&fmt, try_fmt, sizeof (fmt));
2252 r = v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &fmt);
2253
2254 if (r < 0 && errno == ENOTTY) {
2255 /* The driver might not implement TRY_FMT, in which case we will try
2256 S_FMT to probe */
2257 if (GST_V4L2_IS_ACTIVE (v4l2object))
2258 goto error;
2259
2260 memcpy (&fmt, try_fmt, sizeof (fmt));
2261 r = v4l2object->ioctl (fd, VIDIOC_S_FMT, &fmt);
2262 }
2263 memcpy (try_fmt, &fmt, sizeof (fmt));
2264
2265 return r;
2266
2267 error:
2268 memcpy (try_fmt, &fmt, sizeof (fmt));
2269 GST_WARNING_OBJECT (v4l2object->dbg_obj,
2270 "Unable to try format: %s", g_strerror (errno));
2271 return r;
2272 }
2273
2274
2275 static void
gst_v4l2_object_add_interlace_mode(GstV4l2Object * v4l2object,GstStructure * s,guint32 width,guint32 height,guint32 pixelformat)2276 gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object,
2277 GstStructure * s, guint32 width, guint32 height, guint32 pixelformat)
2278 {
2279 struct v4l2_format fmt;
2280 GValue interlace_formats = { 0, };
2281 enum v4l2_field formats[] = { V4L2_FIELD_NONE,
2282 V4L2_FIELD_INTERLACED, V4L2_FIELD_ALTERNATE
2283 };
2284 gsize i;
2285 GstVideoInterlaceMode interlace_mode, prev = -1;
2286
2287 if (!g_str_equal (gst_structure_get_name (s), "video/x-raw"))
2288 return;
2289
2290 if (v4l2object->never_interlaced) {
2291 gst_structure_set (s, "interlace-mode", G_TYPE_STRING, "progressive", NULL);
2292 return;
2293 }
2294
2295 g_value_init (&interlace_formats, GST_TYPE_LIST);
2296
2297 /* Try thrice - once for NONE, once for INTERLACED and once for ALTERNATE. */
2298 for (i = 0; i < G_N_ELEMENTS (formats); i++) {
2299 memset (&fmt, 0, sizeof (fmt));
2300 fmt.type = v4l2object->type;
2301 fmt.fmt.pix.width = width;
2302 fmt.fmt.pix.height = height;
2303 fmt.fmt.pix.pixelformat = pixelformat;
2304 fmt.fmt.pix.field = formats[i];
2305
2306 if (fmt.fmt.pix.field == V4L2_FIELD_ALTERNATE)
2307 fmt.fmt.pix.height /= 2;
2308
2309 /* if skip_try_fmt_probes is set it's up to the caller to filter out the
2310 * formats from the formats requested by peer.
2311 * For this negotiation to work with 'alternate' we need the caps to contain
2312 * the feature so we have an intersection with downstream caps.
2313 */
2314 if (!v4l2object->skip_try_fmt_probes
2315 && gst_v4l2_object_try_fmt (v4l2object, &fmt) != 0)
2316 continue;
2317
2318 if (gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)
2319 && prev != interlace_mode) {
2320 GValue interlace_enum = { 0, };
2321 const gchar *mode_string;
2322 g_value_init (&interlace_enum, G_TYPE_STRING);
2323 mode_string = gst_video_interlace_mode_to_string (interlace_mode);
2324 g_value_set_string (&interlace_enum, mode_string);
2325 gst_value_list_append_and_take_value (&interlace_formats,
2326 &interlace_enum);
2327 prev = interlace_mode;
2328 }
2329 }
2330
2331 if (gst_v4l2src_value_simplify (&interlace_formats)
2332 || gst_value_list_get_size (&interlace_formats) > 0)
2333 gst_structure_take_value (s, "interlace-mode", &interlace_formats);
2334 else
2335 GST_WARNING_OBJECT (v4l2object, "Failed to determine interlace mode");
2336
2337 return;
2338 }
2339
2340 static void
gst_v4l2_object_fill_colorimetry_list(GValue * list,GstVideoColorimetry * cinfo)2341 gst_v4l2_object_fill_colorimetry_list (GValue * list,
2342 GstVideoColorimetry * cinfo)
2343 {
2344 GValue colorimetry = G_VALUE_INIT;
2345 guint size;
2346 guint i;
2347 gboolean found = FALSE;
2348
2349 g_value_init (&colorimetry, G_TYPE_STRING);
2350 g_value_take_string (&colorimetry, gst_video_colorimetry_to_string (cinfo));
2351
2352 /* only insert if no duplicate */
2353 size = gst_value_list_get_size (list);
2354 for (i = 0; i < size; i++) {
2355 const GValue *tmp;
2356
2357 tmp = gst_value_list_get_value (list, i);
2358 if (gst_value_compare (&colorimetry, tmp) == GST_VALUE_EQUAL) {
2359 found = TRUE;
2360 break;
2361 }
2362 }
2363
2364 if (!found)
2365 gst_value_list_append_and_take_value (list, &colorimetry);
2366 else
2367 g_value_unset (&colorimetry);
2368 }
2369
2370 static void
gst_v4l2_object_add_colorspace(GstV4l2Object * v4l2object,GstStructure * s,guint32 width,guint32 height,guint32 pixelformat)2371 gst_v4l2_object_add_colorspace (GstV4l2Object * v4l2object, GstStructure * s,
2372 guint32 width, guint32 height, guint32 pixelformat)
2373 {
2374 struct v4l2_format fmt;
2375 GValue list = G_VALUE_INIT;
2376 GstVideoColorimetry cinfo;
2377 enum v4l2_colorspace req_cspace;
2378
2379 memset (&fmt, 0, sizeof (fmt));
2380 fmt.type = v4l2object->type;
2381 fmt.fmt.pix.width = width;
2382 fmt.fmt.pix.height = height;
2383 fmt.fmt.pix.pixelformat = pixelformat;
2384
2385 g_value_init (&list, GST_TYPE_LIST);
2386
2387 /* step 1: get device default colorspace and insert it first as
2388 * it should be the preferred one */
2389 if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0) {
2390 if (gst_v4l2_object_get_colorspace (v4l2object, &fmt, &cinfo))
2391 gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
2392 }
2393
2394 /* step 2: probe all colorspace other than default
2395 * We don't probe all colorspace, range, matrix and transfer combination to
2396 * avoid ioctl flooding which could greatly increase initialization time
2397 * with low-speed devices (UVC...) */
2398 for (req_cspace = V4L2_COLORSPACE_SMPTE170M;
2399 req_cspace <= V4L2_COLORSPACE_RAW; req_cspace++) {
2400 /* V4L2_COLORSPACE_BT878 is deprecated and shall not be used, so skip */
2401 if (req_cspace == V4L2_COLORSPACE_BT878)
2402 continue;
2403
2404 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
2405 fmt.fmt.pix_mp.colorspace = req_cspace;
2406 else
2407 fmt.fmt.pix.colorspace = req_cspace;
2408
2409 if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0) {
2410 enum v4l2_colorspace colorspace;
2411
2412 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
2413 colorspace = fmt.fmt.pix_mp.colorspace;
2414 else
2415 colorspace = fmt.fmt.pix.colorspace;
2416
2417 if (colorspace == req_cspace) {
2418 if (gst_v4l2_object_get_colorspace (v4l2object, &fmt, &cinfo))
2419 gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
2420 }
2421 }
2422 }
2423
2424 if (gst_value_list_get_size (&list) > 0)
2425 gst_structure_take_value (s, "colorimetry", &list);
2426 else
2427 g_value_unset (&list);
2428
2429 return;
2430 }
2431
2432 /* The frame interval enumeration code first appeared in Linux 2.6.19. */
2433 static GstStructure *
gst_v4l2_object_probe_caps_for_format_and_size(GstV4l2Object * v4l2object,guint32 pixelformat,guint32 width,guint32 height,const GstStructure * template)2434 gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object,
2435 guint32 pixelformat,
2436 guint32 width, guint32 height, const GstStructure * template)
2437 {
2438 gint fd = v4l2object->video_fd;
2439 struct v4l2_frmivalenum ival;
2440 guint32 num, denom;
2441 GstStructure *s;
2442 GValue rates = { 0, };
2443
2444 memset (&ival, 0, sizeof (struct v4l2_frmivalenum));
2445 ival.index = 0;
2446 ival.pixel_format = pixelformat;
2447 ival.width = width;
2448 ival.height = height;
2449
2450 GST_LOG_OBJECT (v4l2object->dbg_obj,
2451 "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
2452 GST_FOURCC_ARGS (pixelformat));
2453
2454 /* keep in mind that v4l2 gives us frame intervals (durations); we invert the
2455 * fraction to get framerate */
2456 if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0)
2457 goto enum_frameintervals_failed;
2458
2459 if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
2460 GValue rate = { 0, };
2461
2462 g_value_init (&rates, GST_TYPE_LIST);
2463 g_value_init (&rate, GST_TYPE_FRACTION);
2464
2465 do {
2466 num = ival.discrete.numerator;
2467 denom = ival.discrete.denominator;
2468
2469 if (num > G_MAXINT || denom > G_MAXINT) {
2470 /* let us hope we don't get here... */
2471 num >>= 1;
2472 denom >>= 1;
2473 }
2474
2475 GST_LOG_OBJECT (v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
2476 denom, num);
2477
2478 /* swap to get the framerate */
2479 gst_value_set_fraction (&rate, denom, num);
2480 gst_value_list_append_value (&rates, &rate);
2481
2482 ival.index++;
2483 } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
2484 } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
2485 GValue min = { 0, };
2486 GValue step = { 0, };
2487 GValue max = { 0, };
2488 gboolean added = FALSE;
2489 guint32 minnum, mindenom;
2490 guint32 maxnum, maxdenom;
2491
2492 g_value_init (&rates, GST_TYPE_LIST);
2493
2494 g_value_init (&min, GST_TYPE_FRACTION);
2495 g_value_init (&step, GST_TYPE_FRACTION);
2496 g_value_init (&max, GST_TYPE_FRACTION);
2497
2498 /* get the min */
2499 minnum = ival.stepwise.min.numerator;
2500 mindenom = ival.stepwise.min.denominator;
2501 if (minnum > G_MAXINT || mindenom > G_MAXINT) {
2502 minnum >>= 1;
2503 mindenom >>= 1;
2504 }
2505 GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
2506 minnum, mindenom);
2507 gst_value_set_fraction (&min, minnum, mindenom);
2508
2509 /* get the max */
2510 maxnum = ival.stepwise.max.numerator;
2511 maxdenom = ival.stepwise.max.denominator;
2512 if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
2513 maxnum >>= 1;
2514 maxdenom >>= 1;
2515 }
2516
2517 GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
2518 maxnum, maxdenom);
2519 gst_value_set_fraction (&max, maxnum, maxdenom);
2520
2521 /* get the step */
2522 num = ival.stepwise.step.numerator;
2523 denom = ival.stepwise.step.denominator;
2524 if (num > G_MAXINT || denom > G_MAXINT) {
2525 num >>= 1;
2526 denom >>= 1;
2527 }
2528
2529 if (num == 0 || denom == 0) {
2530 /* in this case we have a wrong fraction or no step, set the step to max
2531 * so that we only add the min value in the loop below */
2532 num = maxnum;
2533 denom = maxdenom;
2534 }
2535
2536 /* since we only have gst_value_fraction_subtract and not add, negate the
2537 * numerator */
2538 GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
2539 num, denom);
2540 gst_value_set_fraction (&step, -num, denom);
2541
2542 while (gst_value_compare (&min, &max) != GST_VALUE_GREATER_THAN) {
2543 GValue rate = { 0, };
2544
2545 num = gst_value_get_fraction_numerator (&min);
2546 denom = gst_value_get_fraction_denominator (&min);
2547 GST_LOG_OBJECT (v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
2548 denom, num);
2549
2550 /* invert to get the framerate */
2551 g_value_init (&rate, GST_TYPE_FRACTION);
2552 gst_value_set_fraction (&rate, denom, num);
2553 gst_value_list_append_value (&rates, &rate);
2554 added = TRUE;
2555
2556 /* we're actually adding because step was negated above. This is because
2557 * there is no _add function... */
2558 if (!gst_value_fraction_subtract (&min, &min, &step)) {
2559 GST_WARNING_OBJECT (v4l2object->dbg_obj, "could not step fraction!");
2560 break;
2561 }
2562 }
2563 if (!added) {
2564 /* no range was added, leave the default range from the template */
2565 GST_WARNING_OBJECT (v4l2object->dbg_obj,
2566 "no range added, leaving default");
2567 g_value_unset (&rates);
2568 }
2569 } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) {
2570 guint32 maxnum, maxdenom;
2571
2572 g_value_init (&rates, GST_TYPE_FRACTION_RANGE);
2573
2574 num = ival.stepwise.min.numerator;
2575 denom = ival.stepwise.min.denominator;
2576 if (num > G_MAXINT || denom > G_MAXINT) {
2577 num >>= 1;
2578 denom >>= 1;
2579 }
2580
2581 maxnum = ival.stepwise.max.numerator;
2582 maxdenom = ival.stepwise.max.denominator;
2583 if (maxnum > G_MAXINT || maxdenom > G_MAXINT) {
2584 maxnum >>= 1;
2585 maxdenom >>= 1;
2586 }
2587
2588 GST_LOG_OBJECT (v4l2object->dbg_obj,
2589 "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
2590 num);
2591
2592 gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num);
2593 } else {
2594 goto unknown_type;
2595 }
2596
2597 return_data:
2598 s = gst_structure_copy (template);
2599 gst_structure_set (s, "width", G_TYPE_INT, (gint) width,
2600 "height", G_TYPE_INT, (gint) height, NULL);
2601
2602 gst_v4l2_object_add_aspect_ratio (v4l2object, s);
2603
2604 if (!v4l2object->skip_try_fmt_probes) {
2605 gst_v4l2_object_add_interlace_mode (v4l2object, s, width, height,
2606 pixelformat);
2607 gst_v4l2_object_add_colorspace (v4l2object, s, width, height, pixelformat);
2608 }
2609
2610 if (G_IS_VALUE (&rates)) {
2611 gst_v4l2src_value_simplify (&rates);
2612 /* only change the framerate on the template when we have a valid probed new
2613 * value */
2614 gst_structure_take_value (s, "framerate", &rates);
2615 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2616 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
2617 gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2618 1, NULL);
2619 }
2620 return s;
2621
2622 /* ERRORS */
2623 enum_frameintervals_failed:
2624 {
2625 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
2626 "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
2627 GST_FOURCC_ARGS (pixelformat), width, height);
2628 goto return_data;
2629 }
2630 unknown_type:
2631 {
2632 /* I don't see how this is actually an error, we ignore the format then */
2633 GST_WARNING_OBJECT (v4l2object->dbg_obj,
2634 "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
2635 GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
2636 return NULL;
2637 }
2638 }
2639
2640 static gint
sort_by_frame_size(GstStructure * s1,GstStructure * s2)2641 sort_by_frame_size (GstStructure * s1, GstStructure * s2)
2642 {
2643 int w1, h1, w2, h2;
2644
2645 gst_structure_get_int (s1, "width", &w1);
2646 gst_structure_get_int (s1, "height", &h1);
2647 gst_structure_get_int (s2, "width", &w2);
2648 gst_structure_get_int (s2, "height", &h2);
2649
2650 /* I think it's safe to assume that this won't overflow for a while */
2651 return ((w2 * h2) - (w1 * h1));
2652 }
2653
2654 static void
check_alternate_and_append_struct(GstCaps * caps,GstStructure * s)2655 check_alternate_and_append_struct (GstCaps * caps, GstStructure * s)
2656 {
2657 const GValue *mode;
2658
2659 mode = gst_structure_get_value (s, "interlace-mode");
2660 if (!mode)
2661 goto done;
2662
2663 if (G_VALUE_HOLDS_STRING (mode)) {
2664 /* Add the INTERLACED feature if the mode is alternate */
2665 if (!g_strcmp0 (gst_structure_get_string (s, "interlace-mode"),
2666 "alternate")) {
2667 GstCapsFeatures *feat;
2668
2669 feat = gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL);
2670 gst_caps_set_features (caps, gst_caps_get_size (caps) - 1, feat);
2671 }
2672 } else if (GST_VALUE_HOLDS_LIST (mode)) {
2673 /* If the mode is a list containing alternate, remove it from the list and add a
2674 * variant with interlace-mode=alternate and the INTERLACED feature. */
2675 GValue alter = G_VALUE_INIT;
2676 GValue inter = G_VALUE_INIT;
2677
2678 g_value_init (&alter, G_TYPE_STRING);
2679 g_value_set_string (&alter, "alternate");
2680
2681 /* Cannot use gst_value_can_intersect() as it requires args to have the
2682 * same type. */
2683 if (gst_value_intersect (&inter, mode, &alter)) {
2684 GValue minus_alter = G_VALUE_INIT;
2685 GstStructure *copy;
2686
2687 gst_value_subtract (&minus_alter, mode, &alter);
2688 gst_structure_take_value (s, "interlace-mode", &minus_alter);
2689
2690 copy = gst_structure_copy (s);
2691 gst_structure_take_value (copy, "interlace-mode", &inter);
2692 gst_caps_append_structure_full (caps, copy,
2693 gst_caps_features_new (GST_CAPS_FEATURE_FORMAT_INTERLACED, NULL));
2694 }
2695 g_value_unset (&alter);
2696 }
2697
2698 done:
2699 gst_caps_append_structure (caps, s);
2700 }
2701
2702 static void
gst_v4l2_object_update_and_append(GstV4l2Object * v4l2object,guint32 format,GstCaps * caps,GstStructure * s)2703 gst_v4l2_object_update_and_append (GstV4l2Object * v4l2object,
2704 guint32 format, GstCaps * caps, GstStructure * s)
2705 {
2706 GstStructure *alt_s = NULL;
2707
2708 /* Encoded stream on output buffer need to be parsed */
2709 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT ||
2710 v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
2711 gint i = 0;
2712
2713 for (; i < GST_V4L2_FORMAT_COUNT; i++) {
2714 if (format == gst_v4l2_formats[i].format &&
2715 gst_v4l2_formats[i].flags & GST_V4L2_CODEC &&
2716 !(gst_v4l2_formats[i].flags & GST_V4L2_NO_PARSE)) {
2717 gst_structure_set (s, "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
2718 break;
2719 }
2720 }
2721 }
2722
2723 if (v4l2object->has_alpha_component &&
2724 (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2725 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
2726 switch (format) {
2727 case V4L2_PIX_FMT_RGB32:
2728 alt_s = gst_structure_copy (s);
2729 gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
2730 break;
2731 case V4L2_PIX_FMT_BGR32:
2732 alt_s = gst_structure_copy (s);
2733 gst_structure_set (alt_s, "format", G_TYPE_STRING, "BGRA", NULL);
2734 break;
2735 default:
2736 break;
2737 }
2738 }
2739
2740 check_alternate_and_append_struct (caps, s);
2741
2742 if (alt_s) {
2743 check_alternate_and_append_struct (caps, alt_s);
2744 }
2745 }
2746
2747 static GstCaps *
gst_v4l2_object_probe_caps_for_format(GstV4l2Object * v4l2object,guint32 pixelformat,const GstStructure * template)2748 gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object,
2749 guint32 pixelformat, const GstStructure * template)
2750 {
2751 GstCaps *ret = gst_caps_new_empty ();
2752 GstStructure *tmp;
2753 gint fd = v4l2object->video_fd;
2754 struct v4l2_frmsizeenum size;
2755 GList *results = NULL;
2756 guint32 w, h;
2757
2758 if (pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')) {
2759 gst_caps_append_structure (ret, gst_structure_copy (template));
2760 return ret;
2761 }
2762
2763 memset (&size, 0, sizeof (struct v4l2_frmsizeenum));
2764 size.index = 0;
2765 size.pixel_format = pixelformat;
2766
2767 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
2768 "Enumerating frame sizes for %" GST_FOURCC_FORMAT,
2769 GST_FOURCC_ARGS (pixelformat));
2770
2771 if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0)
2772 goto enum_framesizes_failed;
2773
2774 if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
2775 do {
2776 GST_LOG_OBJECT (v4l2object->dbg_obj, "got discrete frame size %dx%d",
2777 size.discrete.width, size.discrete.height);
2778
2779 w = MIN (size.discrete.width, G_MAXINT);
2780 h = MIN (size.discrete.height, G_MAXINT);
2781
2782 if (w && h) {
2783 tmp =
2784 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
2785 pixelformat, w, h, template);
2786
2787 if (tmp)
2788 results = g_list_prepend (results, tmp);
2789 }
2790
2791 size.index++;
2792 } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
2793 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
2794 "done iterating discrete frame sizes");
2795 } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
2796 guint32 maxw, maxh, step_w, step_h;
2797
2798 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have stepwise frame sizes:");
2799 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
2800 size.stepwise.min_width);
2801 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
2802 size.stepwise.min_height);
2803 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
2804 size.stepwise.max_width);
2805 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
2806 size.stepwise.max_height);
2807 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d",
2808 size.stepwise.step_width);
2809 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d",
2810 size.stepwise.step_height);
2811
2812 w = MAX (size.stepwise.min_width, 1);
2813 h = MAX (size.stepwise.min_height, 1);
2814 maxw = MIN (size.stepwise.max_width, G_MAXINT);
2815 maxh = MIN (size.stepwise.max_height, G_MAXINT);
2816
2817 step_w = MAX (size.stepwise.step_width, 1);
2818 step_h = MAX (size.stepwise.step_height, 1);
2819
2820 /* FIXME: check for sanity and that min/max are multiples of the steps */
2821
2822 /* we only query details for the max width/height since it's likely the
2823 * most restricted if there are any resolution-dependent restrictions */
2824 tmp = gst_v4l2_object_probe_caps_for_format_and_size (v4l2object,
2825 pixelformat, maxw, maxh, template);
2826
2827 if (tmp) {
2828 GValue step_range = G_VALUE_INIT;
2829
2830 g_value_init (&step_range, GST_TYPE_INT_RANGE);
2831 gst_value_set_int_range_step (&step_range, w, maxw, step_w);
2832 gst_structure_set_value (tmp, "width", &step_range);
2833
2834 gst_value_set_int_range_step (&step_range, h, maxh, step_h);
2835 gst_structure_take_value (tmp, "height", &step_range);
2836
2837 /* no point using the results list here, since there's only one struct */
2838 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
2839 }
2840 } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
2841 guint32 maxw, maxh;
2842
2843 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have continuous frame sizes:");
2844 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
2845 size.stepwise.min_width);
2846 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
2847 size.stepwise.min_height);
2848 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
2849 size.stepwise.max_width);
2850 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
2851 size.stepwise.max_height);
2852
2853 w = MAX (size.stepwise.min_width, 1);
2854 h = MAX (size.stepwise.min_height, 1);
2855 maxw = MIN (size.stepwise.max_width, G_MAXINT);
2856 maxh = MIN (size.stepwise.max_height, G_MAXINT);
2857
2858 tmp =
2859 gst_v4l2_object_probe_caps_for_format_and_size (v4l2object, pixelformat,
2860 w, h, template);
2861 if (tmp) {
2862 gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, (gint) w,
2863 (gint) maxw, "height", GST_TYPE_INT_RANGE, (gint) h, (gint) maxh,
2864 NULL);
2865
2866 /* no point using the results list here, since there's only one struct */
2867 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
2868 }
2869 } else {
2870 goto unknown_type;
2871 }
2872
2873 /* we use an intermediary list to store and then sort the results of the
2874 * probing because we can't make any assumptions about the order in which
2875 * the driver will give us the sizes, but we want the final caps to contain
2876 * the results starting with the highest resolution and having the lowest
2877 * resolution last, since order in caps matters for things like fixation. */
2878 results = g_list_sort (results, (GCompareFunc) sort_by_frame_size);
2879 while (results != NULL) {
2880 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret,
2881 results->data);
2882 results = g_list_delete_link (results, results);
2883 }
2884
2885 if (gst_caps_is_empty (ret))
2886 goto enum_framesizes_no_results;
2887
2888 return ret;
2889
2890 /* ERRORS */
2891 enum_framesizes_failed:
2892 {
2893 /* I don't see how this is actually an error */
2894 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
2895 "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
2896 " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
2897 goto default_frame_sizes;
2898 }
2899 enum_framesizes_no_results:
2900 {
2901 /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
2902 * question doesn't actually support it yet */
2903 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
2904 "No results for pixelformat %" GST_FOURCC_FORMAT
2905 " enumerating frame sizes, trying fallback",
2906 GST_FOURCC_ARGS (pixelformat));
2907 goto default_frame_sizes;
2908 }
2909 unknown_type:
2910 {
2911 GST_WARNING_OBJECT (v4l2object->dbg_obj,
2912 "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
2913 ": %u", GST_FOURCC_ARGS (pixelformat), size.type);
2914 goto default_frame_sizes;
2915 }
2916
2917 default_frame_sizes:
2918 {
2919 gint min_w, max_w, min_h, max_h, fix_num = 0, fix_denom = 0;
2920
2921 /* This code is for Linux < 2.6.19 */
2922 min_w = min_h = 1;
2923 max_w = max_h = GST_V4L2_MAX_SIZE;
2924 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
2925 &min_h)) {
2926 GST_WARNING_OBJECT (v4l2object->dbg_obj,
2927 "Could not probe minimum capture size for pixelformat %"
2928 GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
2929 }
2930 if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
2931 &max_h)) {
2932 GST_WARNING_OBJECT (v4l2object->dbg_obj,
2933 "Could not probe maximum capture size for pixelformat %"
2934 GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
2935 }
2936 if (min_w == 0 || min_h == 0)
2937 min_w = min_h = 1;
2938 if (max_w == 0 || max_h == 0)
2939 max_w = max_h = GST_V4L2_MAX_SIZE;
2940
2941 /* Since we can't get framerate directly, try to use the current norm */
2942 if (v4l2object->tv_norm && v4l2object->norms) {
2943 GList *norms;
2944 GstTunerNorm *norm = NULL;
2945 GstTunerNorm *current =
2946 gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
2947
2948 for (norms = v4l2object->norms; norms != NULL; norms = norms->next) {
2949 norm = (GstTunerNorm *) norms->data;
2950 if (!strcmp (norm->label, current->label))
2951 break;
2952 }
2953 /* If it's possible, set framerate to that (discrete) value */
2954 if (norm) {
2955 fix_num = gst_value_get_fraction_numerator (&norm->framerate);
2956 fix_denom = gst_value_get_fraction_denominator (&norm->framerate);
2957 }
2958 }
2959
2960 tmp = gst_structure_copy (template);
2961 if (fix_num) {
2962 gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION, fix_num,
2963 fix_denom, NULL);
2964 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
2965 v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
2966 /* if norm can't be used, copy the template framerate */
2967 gst_structure_set (tmp, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
2968 G_MAXINT, 1, NULL);
2969 }
2970
2971 if (min_w == max_w)
2972 gst_structure_set (tmp, "width", G_TYPE_INT, max_w, NULL);
2973 else
2974 gst_structure_set (tmp, "width", GST_TYPE_INT_RANGE, min_w, max_w, NULL);
2975
2976 if (min_h == max_h)
2977 gst_structure_set (tmp, "height", G_TYPE_INT, max_h, NULL);
2978 else
2979 gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL);
2980
2981 gst_v4l2_object_add_aspect_ratio (v4l2object, tmp);
2982
2983 /* We could consider setting interlace mode from min and max. */
2984 gst_v4l2_object_add_interlace_mode (v4l2object, tmp, max_w, max_h,
2985 pixelformat);
2986
2987 if (!v4l2object->skip_try_fmt_probes) {
2988 gint probed_w, probed_h;
2989 if (v4l2object->info.width >= min_w && v4l2object->info.width <= max_w &&
2990 v4l2object->info.height >= min_h
2991 && v4l2object->info.height <= max_h) {
2992 probed_w = v4l2object->info.width;
2993 probed_h = v4l2object->info.height;
2994 } else {
2995 probed_w = max_w;
2996 probed_h = max_h;
2997 }
2998 /* We could consider to check colorspace for min too, in case it depends on
2999 * the size. But in this case, min and max could not be enough */
3000 gst_v4l2_object_add_colorspace (v4l2object, tmp, probed_w, probed_h,
3001 pixelformat);
3002 }
3003
3004 gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp);
3005 return ret;
3006 }
3007 }
3008
3009 static gboolean
gst_v4l2_object_get_nearest_size(GstV4l2Object * v4l2object,guint32 pixelformat,gint * width,gint * height)3010 gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
3011 guint32 pixelformat, gint * width, gint * height)
3012 {
3013 struct v4l2_format fmt;
3014 gboolean ret = FALSE;
3015 GstVideoInterlaceMode interlace_mode;
3016
3017 g_return_val_if_fail (width != NULL, FALSE);
3018 g_return_val_if_fail (height != NULL, FALSE);
3019
3020 GST_LOG_OBJECT (v4l2object->dbg_obj,
3021 "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
3022 *width, *height, GST_FOURCC_ARGS (pixelformat));
3023
3024 memset (&fmt, 0, sizeof (struct v4l2_format));
3025
3026 /* get size delimiters */
3027 memset (&fmt, 0, sizeof (fmt));
3028 fmt.type = v4l2object->type;
3029 fmt.fmt.pix.width = *width;
3030 fmt.fmt.pix.height = *height;
3031 fmt.fmt.pix.pixelformat = pixelformat;
3032 fmt.fmt.pix.field = V4L2_FIELD_ANY;
3033
3034 if (gst_v4l2_object_try_fmt (v4l2object, &fmt) < 0)
3035 goto error;
3036
3037 GST_LOG_OBJECT (v4l2object->dbg_obj,
3038 "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
3039
3040 *width = fmt.fmt.pix.width;
3041 *height = fmt.fmt.pix.height;
3042
3043 if (!gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
3044 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3045 "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
3046 GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field);
3047 goto error;
3048 }
3049
3050 ret = TRUE;
3051
3052 error:
3053 if (!ret) {
3054 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3055 "Unable to try format: %s", g_strerror (errno));
3056 }
3057
3058 return ret;
3059 }
3060
3061 static gboolean
gst_v4l2_object_is_dmabuf_supported(GstV4l2Object * v4l2object)3062 gst_v4l2_object_is_dmabuf_supported (GstV4l2Object * v4l2object)
3063 {
3064 gboolean ret = TRUE;
3065 struct v4l2_exportbuffer expbuf = {
3066 .type = v4l2object->type,
3067 .index = -1,
3068 .plane = -1,
3069 .flags = O_CLOEXEC | O_RDWR,
3070 };
3071
3072 if (v4l2object->fmtdesc &&
3073 v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) {
3074 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3075 "libv4l2 converter detected, disabling DMABuf");
3076 ret = FALSE;
3077 }
3078
3079 /* Expected to fail, but ENOTTY tells us that it is not implemented. */
3080 v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
3081 if (errno == ENOTTY)
3082 ret = FALSE;
3083
3084 return ret;
3085 }
3086
3087 static gboolean
gst_v4l2_object_setup_pool(GstV4l2Object * v4l2object,GstCaps * caps)3088 gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
3089 {
3090 GstV4l2IOMode mode;
3091
3092 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "initializing the %s system",
3093 V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture");
3094
3095 GST_V4L2_CHECK_OPEN (v4l2object);
3096 GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
3097
3098 /* find transport */
3099 mode = v4l2object->req_mode;
3100
3101 if (v4l2object->device_caps & V4L2_CAP_READWRITE) {
3102 if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
3103 mode = GST_V4L2_IO_RW;
3104 } else if (v4l2object->req_mode == GST_V4L2_IO_RW)
3105 goto method_not_supported;
3106
3107 if (v4l2object->device_caps & V4L2_CAP_STREAMING) {
3108 if (v4l2object->req_mode == GST_V4L2_IO_AUTO) {
3109 if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) &&
3110 gst_v4l2_object_is_dmabuf_supported (v4l2object)) {
3111 mode = GST_V4L2_IO_DMABUF;
3112 } else {
3113 mode = GST_V4L2_IO_MMAP;
3114 }
3115 }
3116 } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
3117 v4l2object->req_mode == GST_V4L2_IO_DMABUF)
3118 goto method_not_supported;
3119
3120 /* if still no transport selected, error out */
3121 if (mode == GST_V4L2_IO_AUTO)
3122 goto no_supported_capture_method;
3123
3124 GST_INFO_OBJECT (v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
3125 v4l2object->mode = mode;
3126
3127 /* If min_buffers is not set, the driver either does not support the control or
3128 it has not been asked yet via propose_allocation/decide_allocation. */
3129 if (!v4l2object->min_buffers)
3130 gst_v4l2_get_driver_min_buffers (v4l2object);
3131
3132 /* Map the buffers */
3133 GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool");
3134
3135 if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
3136 goto buffer_pool_new_failed;
3137
3138 GST_V4L2_SET_ACTIVE (v4l2object);
3139
3140 return TRUE;
3141
3142 /* ERRORS */
3143 buffer_pool_new_failed:
3144 {
3145 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
3146 (_("Could not map buffers from device '%s'"),
3147 v4l2object->videodev),
3148 ("Failed to create buffer pool: %s", g_strerror (errno)));
3149 return FALSE;
3150 }
3151 method_not_supported:
3152 {
3153 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
3154 (_("The driver of device '%s' does not support the IO method %d"),
3155 v4l2object->videodev, mode), (NULL));
3156 return FALSE;
3157 }
3158 no_supported_capture_method:
3159 {
3160 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ,
3161 (_("The driver of device '%s' does not support any known IO "
3162 "method."), v4l2object->videodev), (NULL));
3163 return FALSE;
3164 }
3165 }
3166
3167 static void
gst_v4l2_object_set_stride(GstVideoInfo * info,GstVideoAlignment * align,gint plane,gint stride)3168 gst_v4l2_object_set_stride (GstVideoInfo * info, GstVideoAlignment * align,
3169 gint plane, gint stride)
3170 {
3171 const GstVideoFormatInfo *finfo = info->finfo;
3172
3173 if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
3174 gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
3175
3176
3177 ws = GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
3178 hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
3179 tile_height = 1 << hs;
3180
3181 padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, plane,
3182 info->height + align->padding_top + align->padding_bottom);
3183 padded_height = GST_ROUND_UP_N (padded_height, tile_height);
3184
3185 x_tiles = stride >> ws;
3186 y_tiles = padded_height >> hs;
3187 info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE (x_tiles, y_tiles);
3188 } else {
3189 info->stride[plane] = stride;
3190 }
3191 }
3192
3193 static void
gst_v4l2_object_extrapolate_info(GstV4l2Object * v4l2object,GstVideoInfo * info,GstVideoAlignment * align,gint stride)3194 gst_v4l2_object_extrapolate_info (GstV4l2Object * v4l2object,
3195 GstVideoInfo * info, GstVideoAlignment * align, gint stride)
3196 {
3197 const GstVideoFormatInfo *finfo = info->finfo;
3198 gint i, estride, padded_height;
3199 gsize offs = 0;
3200
3201 g_return_if_fail (v4l2object->n_v4l2_planes == 1);
3202
3203 padded_height =
3204 GST_VIDEO_INFO_FIELD_HEIGHT (info) + align->padding_top +
3205 align->padding_bottom;
3206
3207 for (i = 0; i < finfo->n_planes; i++) {
3208 estride = gst_v4l2_object_extrapolate_stride (finfo, i, stride);
3209
3210 gst_v4l2_object_set_stride (info, align, i, estride);
3211
3212 info->offset[i] = offs;
3213 offs += estride *
3214 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height);
3215
3216 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
3217 "Extrapolated for plane %d with base stride %d: "
3218 "stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i],
3219 info->offset[i]);
3220 }
3221
3222 /* Update the image size according the amount of data we are going to
3223 * read/write. This workaround bugs in driver where the sizeimage provided
3224 * by TRY/S_FMT represent the buffer length (maximum size) rather then the expected
3225 * bytesused (buffer size). */
3226 if (offs < info->size)
3227 info->size = offs;
3228 }
3229
3230 static void
gst_v4l2_object_save_format(GstV4l2Object * v4l2object,struct v4l2_fmtdesc * fmtdesc,struct v4l2_format * format,GstVideoInfo * info,GstVideoAlignment * align)3231 gst_v4l2_object_save_format (GstV4l2Object * v4l2object,
3232 struct v4l2_fmtdesc *fmtdesc, struct v4l2_format *format,
3233 GstVideoInfo * info, GstVideoAlignment * align)
3234 {
3235 const GstVideoFormatInfo *finfo = info->finfo;
3236 gboolean standard_stride = TRUE;
3237 gint stride, pstride, padded_width, padded_height, i;
3238
3239 if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) {
3240 v4l2object->n_v4l2_planes = 1;
3241 info->size = format->fmt.pix.sizeimage;
3242 goto store_info;
3243 }
3244
3245 /* adjust right padding */
3246 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
3247 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3248 else
3249 stride = format->fmt.pix.bytesperline;
3250
3251 pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
3252 if (pstride) {
3253 padded_width = stride / pstride;
3254 } else {
3255 /* pstride can be 0 for complex formats */
3256 GST_WARNING_OBJECT (v4l2object->element,
3257 "format %s has a pstride of 0, cannot compute padded with",
3258 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info)));
3259 padded_width = stride;
3260 }
3261
3262 if (padded_width < format->fmt.pix.width)
3263 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3264 "Driver bug detected, stride (%d) is too small for the width (%d)",
3265 padded_width, format->fmt.pix.width);
3266
3267 align->padding_right = padded_width - info->width - align->padding_left;
3268
3269 /* adjust bottom padding */
3270 padded_height = format->fmt.pix.height;
3271
3272 if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
3273 guint hs, tile_height;
3274
3275 hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
3276 tile_height = 1 << hs;
3277
3278 padded_height = GST_ROUND_UP_N (padded_height, tile_height);
3279 }
3280
3281 align->padding_bottom =
3282 padded_height - GST_VIDEO_INFO_FIELD_HEIGHT (info) - align->padding_top;
3283
3284 /* setup the strides and offset */
3285 if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
3286 struct v4l2_pix_format_mplane *pix_mp = &format->fmt.pix_mp;
3287
3288 /* figure out the frame layout */
3289 v4l2object->n_v4l2_planes = MAX (1, pix_mp->num_planes);
3290 info->size = 0;
3291 for (i = 0; i < v4l2object->n_v4l2_planes; i++) {
3292 stride = pix_mp->plane_fmt[i].bytesperline;
3293
3294 if (info->stride[i] != stride)
3295 standard_stride = FALSE;
3296
3297 gst_v4l2_object_set_stride (info, align, i, stride);
3298 info->offset[i] = info->size;
3299 info->size += pix_mp->plane_fmt[i].sizeimage;
3300 }
3301
3302 /* Extrapolate stride if planar format are being set in 1 v4l2 plane */
3303 if (v4l2object->n_v4l2_planes < finfo->n_planes) {
3304 stride = format->fmt.pix_mp.plane_fmt[0].bytesperline;
3305 gst_v4l2_object_extrapolate_info (v4l2object, info, align, stride);
3306 }
3307 } else {
3308 /* only one plane in non-MPLANE mode */
3309 v4l2object->n_v4l2_planes = 1;
3310 info->size = format->fmt.pix.sizeimage;
3311 stride = format->fmt.pix.bytesperline;
3312
3313 if (info->stride[0] != stride)
3314 standard_stride = FALSE;
3315
3316 gst_v4l2_object_extrapolate_info (v4l2object, info, align, stride);
3317 }
3318
3319 /* adjust the offset to take into account left and top */
3320 if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
3321 if ((align->padding_left + align->padding_top) > 0)
3322 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3323 "Left and top padding is not permitted for tiled formats");
3324 } else {
3325 for (i = 0; i < finfo->n_planes; i++) {
3326 gint vedge, hedge;
3327
3328 /* FIXME we assume plane as component as this is true for all supported
3329 * format we support. */
3330
3331 hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, align->padding_left);
3332 vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, align->padding_top);
3333
3334 info->offset[i] += (vedge * info->stride[i]) +
3335 (hedge * GST_VIDEO_INFO_COMP_PSTRIDE (info, i));
3336 }
3337 }
3338
3339 store_info:
3340 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
3341 info->size);
3342
3343 /* to avoid copies we need video meta if there is padding */
3344 v4l2object->need_video_meta =
3345 ((align->padding_top + align->padding_left + align->padding_right +
3346 align->padding_bottom) != 0);
3347
3348 /* ... or if stride is non "standard" */
3349 if (!standard_stride)
3350 v4l2object->need_video_meta = TRUE;
3351
3352 /* ... or also video meta if we use multiple, non-contiguous, planes */
3353 if (v4l2object->n_v4l2_planes > 1)
3354 v4l2object->need_video_meta = TRUE;
3355
3356 v4l2object->info = *info;
3357 v4l2object->align = *align;
3358 v4l2object->format = *format;
3359 v4l2object->fmtdesc = fmtdesc;
3360
3361 /* if we have a framerate pre-calculate duration */
3362 if (info->fps_n > 0 && info->fps_d > 0) {
3363 v4l2object->duration = gst_util_uint64_scale_int (GST_SECOND, info->fps_d,
3364 info->fps_n);
3365 if (GST_VIDEO_INFO_INTERLACE_MODE (info) ==
3366 GST_VIDEO_INTERLACE_MODE_ALTERNATE)
3367 v4l2object->duration /= 2;
3368 } else {
3369 v4l2object->duration = GST_CLOCK_TIME_NONE;
3370 }
3371 }
3372
3373 gint
gst_v4l2_object_extrapolate_stride(const GstVideoFormatInfo * finfo,gint plane,gint stride)3374 gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo,
3375 gint plane, gint stride)
3376 {
3377 gint estride;
3378
3379 switch (finfo->format) {
3380 case GST_VIDEO_FORMAT_NV12:
3381 case GST_VIDEO_FORMAT_NV12_64Z32:
3382 case GST_VIDEO_FORMAT_NV21:
3383 case GST_VIDEO_FORMAT_NV16:
3384 case GST_VIDEO_FORMAT_NV61:
3385 case GST_VIDEO_FORMAT_NV24:
3386 estride = (plane == 0 ? 1 : 2) *
3387 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, plane, stride);
3388 break;
3389 default:
3390 estride = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, plane, stride);
3391 break;
3392 }
3393
3394 return estride;
3395 }
3396
3397 static enum v4l2_field
get_v4l2_field_for_info(GstVideoInfo * info)3398 get_v4l2_field_for_info (GstVideoInfo * info)
3399 {
3400 if (!GST_VIDEO_INFO_IS_INTERLACED (info))
3401 return V4L2_FIELD_NONE;
3402
3403 if (GST_VIDEO_INFO_INTERLACE_MODE (info) ==
3404 GST_VIDEO_INTERLACE_MODE_ALTERNATE)
3405 return V4L2_FIELD_ALTERNATE;
3406
3407 switch (GST_VIDEO_INFO_FIELD_ORDER (info)) {
3408 case GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST:
3409 return V4L2_FIELD_INTERLACED_TB;
3410 case GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST:
3411 return V4L2_FIELD_INTERLACED_BT;
3412 case GST_VIDEO_FIELD_ORDER_UNKNOWN:
3413 default:
3414 return V4L2_FIELD_INTERLACED;
3415 }
3416 }
3417
3418 static gboolean
gst_v4l2_video_colorimetry_matches(const GstVideoColorimetry * cinfo,GstCaps * caps)3419 gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo,
3420 GstCaps * caps)
3421 {
3422 GstVideoInfo info;
3423 static const GstVideoColorimetry ci_likely_jpeg = {
3424 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3425 GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN
3426 };
3427 static const GstVideoColorimetry ci_jpeg = {
3428 GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
3429 GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709
3430 };
3431
3432 if (!gst_video_info_from_caps (&info, caps))
3433 return FALSE;
3434
3435 /* if colorimetry in caps is unknown, use the default one */
3436 if (info.colorimetry.primaries == GST_VIDEO_COLOR_PRIMARIES_UNKNOWN)
3437 info.colorimetry.primaries = cinfo->primaries;
3438 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_UNKNOWN)
3439 info.colorimetry.range = cinfo->range;
3440 if (info.colorimetry.matrix == GST_VIDEO_COLOR_MATRIX_UNKNOWN)
3441 info.colorimetry.matrix = cinfo->matrix;
3442 if (info.colorimetry.transfer == GST_VIDEO_TRANSFER_UNKNOWN)
3443 info.colorimetry.transfer = cinfo->transfer;
3444
3445 if (gst_video_colorimetry_is_equal (&info.colorimetry, cinfo))
3446 return TRUE;
3447
3448 /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
3449 if (gst_video_colorimetry_is_equal (&info.colorimetry, &ci_likely_jpeg)
3450 && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg))
3451 return TRUE;
3452
3453 return FALSE;
3454 }
3455
3456 static const gchar *
field_to_str(enum v4l2_field f)3457 field_to_str (enum v4l2_field f)
3458 {
3459 switch (f) {
3460 case V4L2_FIELD_ANY:
3461 return "any";
3462 case V4L2_FIELD_NONE:
3463 return "none";
3464 case V4L2_FIELD_TOP:
3465 return "top";
3466 case V4L2_FIELD_BOTTOM:
3467 return "bottom";
3468 case V4L2_FIELD_INTERLACED:
3469 return "interlaced";
3470 case V4L2_FIELD_SEQ_TB:
3471 return "seq-tb";
3472 case V4L2_FIELD_SEQ_BT:
3473 return "seq-bt";
3474 case V4L2_FIELD_ALTERNATE:
3475 return "alternate";
3476 case V4L2_FIELD_INTERLACED_TB:
3477 return "interlaced-tb";
3478 case V4L2_FIELD_INTERLACED_BT:
3479 return "interlaced-bt";
3480 }
3481
3482 return "unknown";
3483 }
3484
3485 static gboolean
gst_v4l2_object_set_format_full(GstV4l2Object * v4l2object,GstCaps * caps,gboolean try_only,GstV4l2Error * error)3486 gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
3487 gboolean try_only, GstV4l2Error * error)
3488 {
3489 gint fd = v4l2object->video_fd;
3490 struct v4l2_format format;
3491 struct v4l2_streamparm streamparm;
3492 enum v4l2_field field;
3493 guint32 pixelformat;
3494 struct v4l2_fmtdesc *fmtdesc;
3495 GstVideoInfo info;
3496 GstVideoAlignment align;
3497 gint width, height, fps_n, fps_d;
3498 gint n_v4l_planes;
3499 gint i = 0;
3500 gboolean is_mplane;
3501 enum v4l2_colorspace colorspace = 0;
3502 enum v4l2_quantization range = 0;
3503 enum v4l2_ycbcr_encoding matrix = 0;
3504 enum v4l2_xfer_func transfer = 0;
3505 GstStructure *s;
3506 gboolean disable_interlacing = FALSE;
3507 gboolean disable_colorimetry = FALSE;
3508
3509 g_return_val_if_fail (!v4l2object->skip_try_fmt_probes ||
3510 gst_caps_is_writable (caps), FALSE);
3511
3512 GST_V4L2_CHECK_OPEN (v4l2object);
3513 if (!try_only)
3514 GST_V4L2_CHECK_NOT_ACTIVE (v4l2object);
3515
3516 is_mplane = V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type);
3517
3518 gst_video_info_init (&info);
3519 gst_video_alignment_reset (&align);
3520 v4l2object->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
3521
3522 if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
3523 goto invalid_caps;
3524
3525 pixelformat = fmtdesc->pixelformat;
3526 width = GST_VIDEO_INFO_WIDTH (&info);
3527 height = GST_VIDEO_INFO_FIELD_HEIGHT (&info);
3528 fps_n = GST_VIDEO_INFO_FPS_N (&info);
3529 fps_d = GST_VIDEO_INFO_FPS_D (&info);
3530
3531 /* if encoded format (GST_VIDEO_INFO_N_PLANES return 0)
3532 * or if contiguous is preferred */
3533 n_v4l_planes = GST_VIDEO_INFO_N_PLANES (&info);
3534 if (!n_v4l_planes || !v4l2object->prefered_non_contiguous)
3535 n_v4l_planes = 1;
3536
3537 field = get_v4l2_field_for_info (&info);
3538 if (field != V4L2_FIELD_NONE)
3539 GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
3540 else
3541 GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
3542
3543 /* We first pick the main colorspace from the primaries */
3544 switch (info.colorimetry.primaries) {
3545 case GST_VIDEO_COLOR_PRIMARIES_BT709:
3546 /* There is two colorspaces using these primaries, use the range to
3547 * differentiate */
3548 if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
3549 colorspace = V4L2_COLORSPACE_REC709;
3550 else
3551 colorspace = V4L2_COLORSPACE_SRGB;
3552 break;
3553 case GST_VIDEO_COLOR_PRIMARIES_BT2020:
3554 colorspace = V4L2_COLORSPACE_BT2020;
3555 break;
3556 case GST_VIDEO_COLOR_PRIMARIES_BT470M:
3557 colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
3558 break;
3559 case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
3560 colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
3561 break;
3562 case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
3563 colorspace = V4L2_COLORSPACE_SMPTE170M;
3564 break;
3565 case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
3566 colorspace = V4L2_COLORSPACE_SMPTE240M;
3567 break;
3568
3569 case GST_VIDEO_COLOR_PRIMARIES_FILM:
3570 case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
3571 /* We don't know, we will guess */
3572 break;
3573
3574 default:
3575 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3576 "Unknown colorimetry primaries %d", info.colorimetry.primaries);
3577 break;
3578 }
3579
3580 switch (info.colorimetry.range) {
3581 case GST_VIDEO_COLOR_RANGE_0_255:
3582 range = V4L2_QUANTIZATION_FULL_RANGE;
3583 break;
3584 case GST_VIDEO_COLOR_RANGE_16_235:
3585 range = V4L2_QUANTIZATION_LIM_RANGE;
3586 break;
3587 case GST_VIDEO_COLOR_RANGE_UNKNOWN:
3588 /* We let the driver pick a default one */
3589 break;
3590 default:
3591 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3592 "Unknown colorimetry range %d", info.colorimetry.range);
3593 break;
3594 }
3595
3596 switch (info.colorimetry.matrix) {
3597 case GST_VIDEO_COLOR_MATRIX_RGB:
3598 /* Unspecified, leave to default */
3599 break;
3600 /* FCC is about the same as BT601 with less digit */
3601 case GST_VIDEO_COLOR_MATRIX_FCC:
3602 case GST_VIDEO_COLOR_MATRIX_BT601:
3603 matrix = V4L2_YCBCR_ENC_601;
3604 break;
3605 case GST_VIDEO_COLOR_MATRIX_BT709:
3606 matrix = V4L2_YCBCR_ENC_709;
3607 break;
3608 case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
3609 matrix = V4L2_YCBCR_ENC_SMPTE240M;
3610 break;
3611 case GST_VIDEO_COLOR_MATRIX_BT2020:
3612 matrix = V4L2_YCBCR_ENC_BT2020;
3613 break;
3614 case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
3615 /* We let the driver pick a default one */
3616 break;
3617 default:
3618 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3619 "Unknown colorimetry matrix %d", info.colorimetry.matrix);
3620 break;
3621 }
3622
3623 switch (info.colorimetry.transfer) {
3624 case GST_VIDEO_TRANSFER_GAMMA18:
3625 case GST_VIDEO_TRANSFER_GAMMA20:
3626 case GST_VIDEO_TRANSFER_GAMMA22:
3627 case GST_VIDEO_TRANSFER_GAMMA28:
3628 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3629 "GAMMA 18, 20, 22, 28 transfer functions not supported");
3630 /* fallthrough */
3631 case GST_VIDEO_TRANSFER_GAMMA10:
3632 transfer = V4L2_XFER_FUNC_NONE;
3633 break;
3634 case GST_VIDEO_TRANSFER_SMPTE2084:
3635 transfer = V4L2_XFER_FUNC_SMPTE2084;
3636 break;
3637 case GST_VIDEO_TRANSFER_BT601:
3638 case GST_VIDEO_TRANSFER_BT2020_12:
3639 case GST_VIDEO_TRANSFER_BT2020_10:
3640 case GST_VIDEO_TRANSFER_BT709:
3641 v4l2object->transfer = info.colorimetry.transfer;
3642 transfer = V4L2_XFER_FUNC_709;
3643 break;
3644 case GST_VIDEO_TRANSFER_SMPTE240M:
3645 transfer = V4L2_XFER_FUNC_SMPTE240M;
3646 break;
3647 case GST_VIDEO_TRANSFER_SRGB:
3648 transfer = V4L2_XFER_FUNC_SRGB;
3649 break;
3650 case GST_VIDEO_TRANSFER_LOG100:
3651 case GST_VIDEO_TRANSFER_LOG316:
3652 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3653 "LOG 100, 316 transfer functions not supported");
3654 /* FIXME No known sensible default, maybe AdobeRGB ? */
3655 break;
3656 case GST_VIDEO_TRANSFER_UNKNOWN:
3657 /* We let the driver pick a default one */
3658 break;
3659 default:
3660 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3661 "Unknown colorimetry transfer %d", info.colorimetry.transfer);
3662 break;
3663 }
3664
3665 if (colorspace == 0) {
3666 /* Try to guess colorspace according to pixelformat and size */
3667 if (GST_VIDEO_INFO_IS_YUV (&info)) {
3668 if (range == V4L2_QUANTIZATION_FULL_RANGE
3669 && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) {
3670 /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
3671 * function most likely is JPEG */
3672 colorspace = V4L2_COLORSPACE_JPEG;
3673 transfer = V4L2_XFER_FUNC_SRGB;
3674 } else {
3675 /* SD streams likely use SMPTE170M and HD streams REC709 */
3676 if (width <= 720 && GST_VIDEO_INFO_HEIGHT (&info) <= 576)
3677 colorspace = V4L2_COLORSPACE_SMPTE170M;
3678 else
3679 colorspace = V4L2_COLORSPACE_REC709;
3680 }
3681 } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
3682 colorspace = V4L2_COLORSPACE_SRGB;
3683 transfer = V4L2_XFER_FUNC_NONE;
3684 }
3685 }
3686
3687 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format %dx%d, format "
3688 "%" GST_FOURCC_FORMAT " stride: %d", width, height,
3689 GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
3690
3691 memset (&format, 0x00, sizeof (struct v4l2_format));
3692 format.type = v4l2object->type;
3693
3694 if (is_mplane) {
3695 format.type = v4l2object->type;
3696 format.fmt.pix_mp.pixelformat = pixelformat;
3697 format.fmt.pix_mp.width = width;
3698 format.fmt.pix_mp.height = height;
3699 format.fmt.pix_mp.field = field;
3700 format.fmt.pix_mp.num_planes = n_v4l_planes;
3701
3702 /* try to ask our preferred stride but it's not a failure if not
3703 * accepted */
3704 for (i = 0; i < n_v4l_planes; i++) {
3705 gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
3706
3707 if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
3708 stride = GST_VIDEO_TILE_X_TILES (stride) <<
3709 GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
3710
3711 format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
3712 }
3713
3714 if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
3715 format.fmt.pix_mp.plane_fmt[0].sizeimage = ENCODED_BUFFER_SIZE;
3716 } else {
3717 gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
3718
3719 format.type = v4l2object->type;
3720
3721 format.fmt.pix.width = width;
3722 format.fmt.pix.height = height;
3723 format.fmt.pix.pixelformat = pixelformat;
3724 format.fmt.pix.field = field;
3725
3726 if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
3727 stride = GST_VIDEO_TILE_X_TILES (stride) <<
3728 GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
3729
3730 /* try to ask our preferred stride */
3731 format.fmt.pix.bytesperline = stride;
3732
3733 if (GST_VIDEO_INFO_FORMAT (&info) == GST_VIDEO_FORMAT_ENCODED)
3734 format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
3735 }
3736
3737 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format is %dx%d, format "
3738 "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
3739 format.fmt.pix_mp.height,
3740 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
3741 is_mplane ? format.fmt.pix_mp.num_planes : 1);
3742
3743 #ifndef GST_DISABLE_GST_DEBUG
3744 if (is_mplane) {
3745 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3746 GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
3747 format.fmt.pix_mp.plane_fmt[i].bytesperline);
3748 } else {
3749 GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
3750 format.fmt.pix.bytesperline);
3751 }
3752 #endif
3753
3754 if (is_mplane) {
3755 format.fmt.pix_mp.colorspace = colorspace;
3756 format.fmt.pix_mp.quantization = range;
3757 format.fmt.pix_mp.ycbcr_enc = matrix;
3758 format.fmt.pix_mp.xfer_func = transfer;
3759 } else {
3760 format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
3761 format.fmt.pix.colorspace = colorspace;
3762 format.fmt.pix.quantization = range;
3763 format.fmt.pix.ycbcr_enc = matrix;
3764 format.fmt.pix.xfer_func = transfer;
3765 }
3766
3767 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
3768 colorspace, range, matrix, transfer);
3769
3770 if (try_only) {
3771 if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0)
3772 goto try_fmt_failed;
3773 } else {
3774 if (v4l2object->ioctl (fd, VIDIOC_S_FMT, &format) < 0)
3775 goto set_fmt_failed;
3776 }
3777
3778 if (is_mplane) {
3779 colorspace = format.fmt.pix_mp.colorspace;
3780 range = format.fmt.pix_mp.quantization;
3781 matrix = format.fmt.pix_mp.ycbcr_enc;
3782 transfer = format.fmt.pix_mp.xfer_func;
3783 } else {
3784 colorspace = format.fmt.pix.colorspace;
3785 range = format.fmt.pix.quantization;
3786 matrix = format.fmt.pix.ycbcr_enc;
3787 transfer = format.fmt.pix.xfer_func;
3788 }
3789
3790 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format "
3791 "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d field: %s",
3792 format.fmt.pix.width, format.fmt.pix_mp.height,
3793 GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
3794 is_mplane ? format.fmt.pix_mp.num_planes : 1,
3795 colorspace, range, matrix, transfer, field_to_str (format.fmt.pix.field));
3796
3797 #ifndef GST_DISABLE_GST_DEBUG
3798 if (is_mplane) {
3799 for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
3800 GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
3801 format.fmt.pix_mp.plane_fmt[i].bytesperline,
3802 format.fmt.pix_mp.plane_fmt[i].sizeimage);
3803 } else {
3804 GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
3805 format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
3806 }
3807 #endif
3808
3809 if (format.fmt.pix.pixelformat != pixelformat)
3810 goto invalid_pixelformat;
3811
3812 /* Only negotiate size with raw data.
3813 * For some codecs the dimensions are *not* in the bitstream, IIRC VC1
3814 * in ASF mode for example, there is also not reason for a driver to
3815 * change the size. */
3816 if (info.finfo->format != GST_VIDEO_FORMAT_ENCODED) {
3817 /* We can crop larger images */
3818 if (format.fmt.pix.width < width || format.fmt.pix.height < height)
3819 goto invalid_dimensions;
3820
3821 /* Note, this will be adjusted if upstream has non-centered cropping. */
3822 align.padding_top = 0;
3823 align.padding_bottom = format.fmt.pix.height - height;
3824 align.padding_left = 0;
3825 align.padding_right = format.fmt.pix.width - width;
3826 }
3827
3828 if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
3829 goto invalid_planes;
3830
3831 /* used to check colorimetry and interlace mode fields presence */
3832 s = gst_caps_get_structure (caps, 0);
3833
3834 if (gst_v4l2_object_get_interlace_mode (format.fmt.pix.field,
3835 &info.interlace_mode)) {
3836 if (gst_structure_has_field (s, "interlace-mode")) {
3837 if (format.fmt.pix.field != field)
3838 goto invalid_field;
3839 }
3840 } else {
3841 /* The driver (or libv4l2) is miss-behaving, just ignore interlace-mode from
3842 * the TRY_FMT */
3843 disable_interlacing = TRUE;
3844 if (gst_structure_has_field (s, "interlace-mode"))
3845 gst_structure_remove_field (s, "interlace-mode");
3846 }
3847
3848 if (gst_v4l2_object_get_colorspace (v4l2object, &format, &info.colorimetry)) {
3849 if (gst_structure_has_field (s, "colorimetry")) {
3850 if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry, caps))
3851 goto invalid_colorimetry;
3852 }
3853 } else {
3854 /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
3855 * the TRY_FMT */
3856 disable_colorimetry = TRUE;
3857 if (gst_structure_has_field (s, "colorimetry"))
3858 gst_structure_remove_field (s, "colorimetry");
3859 }
3860
3861 /* In case we have skipped the try_fmt probes, we'll need to set the
3862 * interlace-mode and colorimetry back into the caps. */
3863 if (v4l2object->skip_try_fmt_probes) {
3864 if (!disable_interlacing && !gst_structure_has_field (s, "interlace-mode")) {
3865 gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
3866 gst_video_interlace_mode_to_string (info.interlace_mode), NULL);
3867 }
3868 if (!disable_colorimetry && !gst_structure_has_field (s, "colorimetry")) {
3869 gchar *str = gst_video_colorimetry_to_string (&info.colorimetry);
3870 gst_structure_set (s, "colorimetry", G_TYPE_STRING, str, NULL);
3871 g_free (str);
3872 }
3873 }
3874
3875 if (try_only) /* good enough for trying only */
3876 return TRUE;
3877
3878 if (GST_VIDEO_INFO_HAS_ALPHA (&info)) {
3879 struct v4l2_control ctl = { 0, };
3880 ctl.id = V4L2_CID_ALPHA_COMPONENT;
3881 ctl.value = 0xff;
3882
3883 if (v4l2object->ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
3884 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3885 "Failed to set alpha component value");
3886 }
3887
3888 /* Is there a reason we require the caller to always specify a framerate? */
3889 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
3890 fps_d);
3891
3892 memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
3893 streamparm.type = v4l2object->type;
3894
3895 if (v4l2object->ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0)
3896 goto get_parm_failed;
3897
3898 if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
3899 || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
3900 GST_VIDEO_INFO_FPS_N (&info) =
3901 streamparm.parm.capture.timeperframe.denominator;
3902 GST_VIDEO_INFO_FPS_D (&info) =
3903 streamparm.parm.capture.timeperframe.numerator;
3904
3905 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got capture framerate: %u/%u",
3906 streamparm.parm.capture.timeperframe.denominator,
3907 streamparm.parm.capture.timeperframe.numerator);
3908
3909 /* We used to skip frame rate setup if the camera was already setup
3910 * with the requested frame rate. This breaks some cameras though,
3911 * causing them to not output data (several models of Thinkpad cameras
3912 * have this problem at least).
3913 * So, don't skip. */
3914 GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
3915 fps_n, fps_d);
3916 /* We want to change the frame rate, so check whether we can. Some cheap USB
3917 * cameras don't have the capability */
3918 if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
3919 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
3920 "Not setting capture framerate (not supported)");
3921 goto done;
3922 }
3923
3924 /* Note: V4L2 wants the frame interval, we have the frame rate */
3925 streamparm.parm.capture.timeperframe.numerator = fps_d;
3926 streamparm.parm.capture.timeperframe.denominator = fps_n;
3927
3928 /* some cheap USB cam's won't accept any change */
3929 if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
3930 goto set_parm_failed;
3931
3932 if (streamparm.parm.capture.timeperframe.numerator > 0 &&
3933 streamparm.parm.capture.timeperframe.denominator > 0) {
3934 /* get new values */
3935 fps_d = streamparm.parm.capture.timeperframe.numerator;
3936 fps_n = streamparm.parm.capture.timeperframe.denominator;
3937
3938 GST_INFO_OBJECT (v4l2object->dbg_obj, "Set capture framerate to %u/%u",
3939 fps_n, fps_d);
3940 } else {
3941 /* fix v4l2 capture driver to provide framerate values */
3942 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3943 "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
3944 }
3945
3946 GST_VIDEO_INFO_FPS_N (&info) = fps_n;
3947 GST_VIDEO_INFO_FPS_D (&info) = fps_d;
3948 } else if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT
3949 || v4l2object->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
3950 GST_VIDEO_INFO_FPS_N (&info) =
3951 streamparm.parm.output.timeperframe.denominator;
3952 GST_VIDEO_INFO_FPS_D (&info) =
3953 streamparm.parm.output.timeperframe.numerator;
3954
3955 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got output framerate: %u/%u",
3956 streamparm.parm.output.timeperframe.denominator,
3957 streamparm.parm.output.timeperframe.numerator);
3958
3959 GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting output framerate to %u/%u",
3960 fps_n, fps_d);
3961 if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
3962 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
3963 "Not setting output framerate (not supported)");
3964 goto done;
3965 }
3966
3967 /* Note: V4L2 wants the frame interval, we have the frame rate */
3968 streamparm.parm.output.timeperframe.numerator = fps_d;
3969 streamparm.parm.output.timeperframe.denominator = fps_n;
3970
3971 if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0)
3972 goto set_parm_failed;
3973
3974 if (streamparm.parm.output.timeperframe.numerator > 0 &&
3975 streamparm.parm.output.timeperframe.denominator > 0) {
3976 /* get new values */
3977 fps_d = streamparm.parm.output.timeperframe.numerator;
3978 fps_n = streamparm.parm.output.timeperframe.denominator;
3979
3980 GST_INFO_OBJECT (v4l2object->dbg_obj, "Set output framerate to %u/%u",
3981 fps_n, fps_d);
3982 } else {
3983 /* fix v4l2 output driver to provide framerate values */
3984 GST_WARNING_OBJECT (v4l2object->dbg_obj,
3985 "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
3986 }
3987
3988 GST_VIDEO_INFO_FPS_N (&info) = fps_n;
3989 GST_VIDEO_INFO_FPS_D (&info) = fps_d;
3990 }
3991
3992 done:
3993 /* add boolean return, so we can fail on drivers bugs */
3994 gst_v4l2_object_save_format (v4l2object, fmtdesc, &format, &info, &align);
3995
3996 /* now configure the pool */
3997 if (!gst_v4l2_object_setup_pool (v4l2object, caps))
3998 goto pool_failed;
3999
4000 return TRUE;
4001
4002 /* ERRORS */
4003 invalid_caps:
4004 {
4005 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
4006 caps);
4007
4008 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4009 (_("Invalid caps")), ("Can't parse caps %" GST_PTR_FORMAT, caps));
4010 return FALSE;
4011 }
4012 try_fmt_failed:
4013 {
4014 if (errno == EINVAL) {
4015 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4016 (_("Device '%s' has no supported format"), v4l2object->videodev),
4017 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4018 GST_FOURCC_ARGS (pixelformat), width, height,
4019 g_strerror (errno)));
4020 } else {
4021 GST_V4L2_ERROR (error, RESOURCE, FAILED,
4022 (_("Device '%s' failed during initialization"),
4023 v4l2object->videodev),
4024 ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4025 GST_FOURCC_ARGS (pixelformat), width, height,
4026 g_strerror (errno)));
4027 }
4028 return FALSE;
4029 }
4030 set_fmt_failed:
4031 {
4032 if (errno == EBUSY) {
4033 GST_V4L2_ERROR (error, RESOURCE, BUSY,
4034 (_("Device '%s' is busy"), v4l2object->videodev),
4035 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4036 GST_FOURCC_ARGS (pixelformat), width, height,
4037 g_strerror (errno)));
4038 } else if (errno == EINVAL) {
4039 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4040 (_("Device '%s' has no supported format"), v4l2object->videodev),
4041 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4042 GST_FOURCC_ARGS (pixelformat), width, height,
4043 g_strerror (errno)));
4044 } else {
4045 GST_V4L2_ERROR (error, RESOURCE, FAILED,
4046 (_("Device '%s' failed during initialization"),
4047 v4l2object->videodev),
4048 ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s",
4049 GST_FOURCC_ARGS (pixelformat), width, height,
4050 g_strerror (errno)));
4051 }
4052 return FALSE;
4053 }
4054 invalid_dimensions:
4055 {
4056 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4057 (_("Device '%s' cannot capture at %dx%d"),
4058 v4l2object->videodev, width, height),
4059 ("Tried to capture at %dx%d, but device returned size %dx%d",
4060 width, height, format.fmt.pix.width, format.fmt.pix.height));
4061 return FALSE;
4062 }
4063 invalid_pixelformat:
4064 {
4065 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4066 (_("Device '%s' cannot capture in the specified format"),
4067 v4l2object->videodev),
4068 ("Tried to capture in %" GST_FOURCC_FORMAT
4069 ", but device returned format" " %" GST_FOURCC_FORMAT,
4070 GST_FOURCC_ARGS (pixelformat),
4071 GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
4072 return FALSE;
4073 }
4074 invalid_planes:
4075 {
4076 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4077 (_("Device '%s' does support non-contiguous planes"),
4078 v4l2object->videodev),
4079 ("Device wants %d planes", format.fmt.pix_mp.num_planes));
4080 return FALSE;
4081 }
4082 invalid_field:
4083 {
4084 enum v4l2_field wanted_field;
4085
4086 if (is_mplane)
4087 wanted_field = format.fmt.pix_mp.field;
4088 else
4089 wanted_field = format.fmt.pix.field;
4090
4091 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4092 (_("Device '%s' does not support %s interlacing"),
4093 v4l2object->videodev,
4094 field == V4L2_FIELD_NONE ? "progressive" : "interleaved"),
4095 ("Device wants %s interlacing",
4096 wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
4097 return FALSE;
4098 }
4099 invalid_colorimetry:
4100 {
4101 gchar *wanted_colorimetry;
4102
4103 wanted_colorimetry = gst_video_colorimetry_to_string (&info.colorimetry);
4104
4105 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4106 (_("Device '%s' does not support %s colorimetry"),
4107 v4l2object->videodev, gst_structure_get_string (s, "colorimetry")),
4108 ("Device wants %s colorimetry", wanted_colorimetry));
4109
4110 g_free (wanted_colorimetry);
4111 return FALSE;
4112 }
4113 get_parm_failed:
4114 {
4115 /* it's possible that this call is not supported */
4116 if (errno != EINVAL && errno != ENOTTY) {
4117 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4118 (_("Could not get parameters on device '%s'"),
4119 v4l2object->videodev), GST_ERROR_SYSTEM);
4120 }
4121 goto done;
4122 }
4123 set_parm_failed:
4124 {
4125 GST_V4L2_ERROR (error, RESOURCE, SETTINGS,
4126 (_("Video device did not accept new frame rate setting.")),
4127 GST_ERROR_SYSTEM);
4128 goto done;
4129 }
4130 pool_failed:
4131 {
4132 /* setup_pool already send the error */
4133 return FALSE;
4134 }
4135 }
4136
4137 gboolean
gst_v4l2_object_set_format(GstV4l2Object * v4l2object,GstCaps * caps,GstV4l2Error * error)4138 gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps,
4139 GstV4l2Error * error)
4140 {
4141 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
4142 caps);
4143 return gst_v4l2_object_set_format_full (v4l2object, caps, FALSE, error);
4144 }
4145
4146 gboolean
gst_v4l2_object_try_format(GstV4l2Object * v4l2object,GstCaps * caps,GstV4l2Error * error)4147 gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps,
4148 GstV4l2Error * error)
4149 {
4150 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
4151 caps);
4152 return gst_v4l2_object_set_format_full (v4l2object, caps, TRUE, error);
4153 }
4154
4155 /**
4156 * gst_v4l2_object_acquire_format:
4157 * @v4l2object: the object
4158 * @info: a GstVideoInfo to be filled
4159 *
4160 * Acquire the driver chosen format. This is useful in decoder or encoder elements where
4161 * the output format is chosen by the HW.
4162 *
4163 * Returns: %TRUE on success, %FALSE on failure.
4164 */
4165 gboolean
gst_v4l2_object_acquire_format(GstV4l2Object * v4l2object,GstVideoInfo * info)4166 gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info)
4167 {
4168 struct v4l2_fmtdesc *fmtdesc;
4169 struct v4l2_format fmt;
4170 struct v4l2_crop crop;
4171 struct v4l2_selection sel;
4172 struct v4l2_rect *r = NULL;
4173 GstVideoFormat format;
4174 guint width, height;
4175 GstVideoAlignment align;
4176 GstVideoInterlaceMode interlace_mode;
4177
4178 gst_video_info_init (info);
4179 gst_video_alignment_reset (&align);
4180 v4l2object->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
4181
4182 memset (&fmt, 0x00, sizeof (struct v4l2_format));
4183 fmt.type = v4l2object->type;
4184 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0)
4185 goto get_fmt_failed;
4186
4187 fmtdesc = gst_v4l2_object_get_format_from_fourcc (v4l2object,
4188 fmt.fmt.pix.pixelformat);
4189 if (fmtdesc == NULL)
4190 goto unsupported_format;
4191
4192 /* No need to care about mplane, the four first params are the same */
4193 format = gst_v4l2_object_v4l2fourcc_to_video_format (fmt.fmt.pix.pixelformat);
4194
4195 /* fails if we do no translate the fmt.pix.pixelformat to GstVideoFormat */
4196 if (format == GST_VIDEO_FORMAT_UNKNOWN)
4197 goto unsupported_format;
4198
4199 if (fmt.fmt.pix.width == 0 || fmt.fmt.pix.height == 0)
4200 goto invalid_dimensions;
4201
4202 width = fmt.fmt.pix.width;
4203 height = fmt.fmt.pix.height;
4204
4205 /* Use the default compose rectangle */
4206 memset (&sel, 0, sizeof (struct v4l2_selection));
4207 sel.type = v4l2object->type;
4208 sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
4209 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0) {
4210 r = &sel.r;
4211 } else {
4212 /* For ancient kernels, fall back to G_CROP */
4213 memset (&crop, 0, sizeof (struct v4l2_crop));
4214 crop.type = v4l2object->type;
4215 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0)
4216 r = &crop.c;
4217 }
4218 if (r) {
4219 align.padding_left = r->left;
4220 align.padding_top = r->top;
4221 align.padding_right = width - r->width - r->left;
4222 align.padding_bottom = height - r->height - r->top;
4223 width = r->width;
4224 height = r->height;
4225 }
4226
4227 switch (fmt.fmt.pix.field) {
4228 case V4L2_FIELD_ANY:
4229 case V4L2_FIELD_NONE:
4230 interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
4231 break;
4232 case V4L2_FIELD_INTERLACED:
4233 case V4L2_FIELD_INTERLACED_TB:
4234 case V4L2_FIELD_INTERLACED_BT:
4235 interlace_mode = GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
4236 break;
4237 case V4L2_FIELD_ALTERNATE:
4238 interlace_mode = GST_VIDEO_INTERLACE_MODE_ALTERNATE;
4239 break;
4240 default:
4241 goto unsupported_field;
4242 }
4243
4244 gst_video_info_set_interlaced_format (info, format, interlace_mode, width,
4245 height);
4246
4247 gst_v4l2_object_get_colorspace (v4l2object, &fmt, &info->colorimetry);
4248 gst_v4l2_object_get_streamparm (v4l2object, info);
4249 if ((info->fps_n == 0 && v4l2object->info.fps_d != 0)
4250 && (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
4251 || v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)) {
4252 info->fps_d = v4l2object->info.fps_d;
4253 info->fps_n = v4l2object->info.fps_n;
4254 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Set capture fps to %d/%d",
4255 info->fps_n, info->fps_d);
4256 }
4257
4258 gst_v4l2_object_save_format (v4l2object, fmtdesc, &fmt, info, &align);
4259
4260 /* Shall we setup the pool ? */
4261
4262 return TRUE;
4263
4264 get_fmt_failed:
4265 {
4266 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
4267 (_("Video device did not provide output format.")), GST_ERROR_SYSTEM);
4268 return FALSE;
4269 }
4270 invalid_dimensions:
4271 {
4272 GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
4273 (_("Video device returned invalid dimensions.")),
4274 ("Expected non 0 dimensions, got %dx%d", fmt.fmt.pix.width,
4275 fmt.fmt.pix.height));
4276 return FALSE;
4277 }
4278 unsupported_field:
4279 {
4280 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
4281 (_("Video device uses an unsupported interlacing method.")),
4282 ("V4L2 field type %d not supported", fmt.fmt.pix.field));
4283 return FALSE;
4284 }
4285 unsupported_format:
4286 {
4287 GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
4288 (_("Video device uses an unsupported pixel format.")),
4289 ("V4L2 format %" GST_FOURCC_FORMAT " not supported",
4290 GST_FOURCC_ARGS (fmt.fmt.pix.pixelformat)));
4291 return FALSE;
4292 }
4293 }
4294
4295 /**
4296 * gst_v4l2_object_set_crop:
4297 * @obj: the object
4298 * @crop_rect: the region to crop
4299 *
4300 * Crop the video data to the regions specified in the @crop_rect.
4301 *
4302 * For capture devices, this crop the image sensor / video stream provided by
4303 * the V4L2 device.
4304 * For output devices, this crops the memory buffer that GStreamer passed to
4305 * the V4L2 device.
4306 *
4307 * The crop_rect may be modified by the V4L2 device to a region that
4308 * fulfills H/W requirements.
4309 *
4310 * Returns: %TRUE on success, %FALSE on failure.
4311 */
4312 gboolean
gst_v4l2_object_set_crop(GstV4l2Object * obj,struct v4l2_rect * crop_rect)4313 gst_v4l2_object_set_crop (GstV4l2Object * obj, struct v4l2_rect * crop_rect)
4314 {
4315 struct v4l2_selection sel = { 0 };
4316 struct v4l2_crop crop = { 0 };
4317
4318 GST_V4L2_CHECK_OPEN (obj);
4319
4320 sel.type = obj->type;
4321 sel.target = V4L2_SEL_TGT_CROP;
4322 sel.flags = 0;
4323 sel.r = *crop_rect;
4324
4325 crop.type = obj->type;
4326 crop.c = sel.r;
4327
4328 GST_DEBUG_OBJECT (obj->dbg_obj,
4329 "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4330 crop.c.width, crop.c.height);
4331
4332 if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) {
4333 if (errno != ENOTTY) {
4334 GST_WARNING_OBJECT (obj->dbg_obj,
4335 "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
4336 g_strerror (errno));
4337 return FALSE;
4338 } else {
4339 if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
4340 GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed");
4341 return FALSE;
4342 }
4343
4344 if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
4345 GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed");
4346 return FALSE;
4347 }
4348
4349 sel.r = crop.c;
4350 }
4351 }
4352
4353 GST_DEBUG_OBJECT (obj->dbg_obj,
4354 "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
4355 crop.c.width, crop.c.height);
4356
4357 return TRUE;
4358 }
4359
4360 /**
4361 * gst_v4l2_object_setup_padding:
4362 * @obj: v4l2 object
4363 *
4364 * Crop away the padding around the video data as specified
4365 * in GstVideoAlignement data stored in @obj.
4366 *
4367 * For capture devices, this crop the image sensor / video stream provided by
4368 * the V4L2 device.
4369 * For output devices, this crops the memory buffer that GStreamer passed to
4370 * the V4L2 device.
4371 *
4372 * Returns: %TRUE on success, %FALSE on failure.
4373 */
4374 gboolean
gst_v4l2_object_setup_padding(GstV4l2Object * obj)4375 gst_v4l2_object_setup_padding (GstV4l2Object * obj)
4376 {
4377 GstVideoAlignment *align = &obj->align;
4378 struct v4l2_rect crop;
4379
4380 if (align->padding_left + align->padding_top
4381 + align->padding_right + align->padding_bottom == 0) {
4382 GST_DEBUG_OBJECT (obj->dbg_obj, "no cropping needed");
4383 return TRUE;
4384 }
4385
4386 crop.left = align->padding_left;
4387 crop.top = align->padding_top;
4388 crop.width = obj->info.width;
4389 crop.height = GST_VIDEO_INFO_FIELD_HEIGHT (&obj->info);
4390
4391 return gst_v4l2_object_set_crop (obj, &crop);
4392 }
4393
4394 gboolean
gst_v4l2_object_caps_equal(GstV4l2Object * v4l2object,GstCaps * caps)4395 gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps)
4396 {
4397 GstStructure *config;
4398 GstCaps *oldcaps;
4399 gboolean ret;
4400
4401 if (!v4l2object->pool)
4402 return FALSE;
4403
4404 config = gst_buffer_pool_get_config (v4l2object->pool);
4405 gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
4406
4407 ret = oldcaps && gst_caps_is_equal (caps, oldcaps);
4408
4409 gst_structure_free (config);
4410
4411 return ret;
4412 }
4413
4414 gboolean
gst_v4l2_object_caps_is_subset(GstV4l2Object * v4l2object,GstCaps * caps)4415 gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps)
4416 {
4417 GstStructure *config;
4418 GstCaps *oldcaps;
4419 gboolean ret;
4420
4421 if (!v4l2object->pool)
4422 return FALSE;
4423
4424 config = gst_buffer_pool_get_config (v4l2object->pool);
4425 gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
4426
4427 ret = oldcaps && gst_caps_is_subset (oldcaps, caps);
4428
4429 gst_structure_free (config);
4430
4431 return ret;
4432 }
4433
4434 GstCaps *
gst_v4l2_object_get_current_caps(GstV4l2Object * v4l2object)4435 gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object)
4436 {
4437 GstStructure *config;
4438 GstCaps *oldcaps;
4439
4440 if (!v4l2object->pool)
4441 return NULL;
4442
4443 config = gst_buffer_pool_get_config (v4l2object->pool);
4444 gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
4445
4446 if (oldcaps)
4447 gst_caps_ref (oldcaps);
4448
4449 gst_structure_free (config);
4450
4451 return oldcaps;
4452 }
4453
4454 gboolean
gst_v4l2_object_unlock(GstV4l2Object * v4l2object)4455 gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
4456 {
4457 gboolean ret = TRUE;
4458
4459 GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing");
4460
4461 if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
4462 gst_buffer_pool_set_flushing (v4l2object->pool, TRUE);
4463
4464 return ret;
4465 }
4466
4467 gboolean
gst_v4l2_object_unlock_stop(GstV4l2Object * v4l2object)4468 gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object)
4469 {
4470 gboolean ret = TRUE;
4471
4472 GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing");
4473
4474 if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
4475 gst_buffer_pool_set_flushing (v4l2object->pool, FALSE);
4476
4477 return ret;
4478 }
4479
4480 gboolean
gst_v4l2_object_stop(GstV4l2Object * v4l2object)4481 gst_v4l2_object_stop (GstV4l2Object * v4l2object)
4482 {
4483 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping");
4484
4485 if (!GST_V4L2_IS_OPEN (v4l2object))
4486 goto done;
4487 if (!GST_V4L2_IS_ACTIVE (v4l2object))
4488 goto done;
4489
4490 if (v4l2object->pool) {
4491 if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) {
4492 GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
4493 gst_buffer_pool_set_active (v4l2object->pool, FALSE);
4494 gst_object_unref (v4l2object->pool);
4495 }
4496 v4l2object->pool = NULL;
4497 }
4498
4499 GST_V4L2_SET_INACTIVE (v4l2object);
4500
4501 done:
4502 return TRUE;
4503 }
4504
4505 GstCaps *
gst_v4l2_object_probe_caps(GstV4l2Object * v4l2object,GstCaps * filter)4506 gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter)
4507 {
4508 GstCaps *ret;
4509 GSList *walk;
4510 GSList *formats;
4511
4512 formats = gst_v4l2_object_get_format_list (v4l2object);
4513
4514 ret = gst_caps_new_empty ();
4515
4516 if (v4l2object->keep_aspect && !v4l2object->par) {
4517 struct v4l2_cropcap cropcap;
4518
4519 memset (&cropcap, 0, sizeof (cropcap));
4520
4521 cropcap.type = v4l2object->type;
4522 if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) {
4523 if (errno != ENOTTY)
4524 GST_WARNING_OBJECT (v4l2object->dbg_obj,
4525 "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
4526 g_strerror (errno));
4527 } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) {
4528 v4l2object->par = g_new0 (GValue, 1);
4529 g_value_init (v4l2object->par, GST_TYPE_FRACTION);
4530 gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator,
4531 cropcap.pixelaspect.denominator);
4532 }
4533 }
4534
4535 for (walk = formats; walk; walk = walk->next) {
4536 struct v4l2_fmtdesc *format;
4537 GstStructure *template;
4538 GstCaps *tmp;
4539
4540 format = (struct v4l2_fmtdesc *) walk->data;
4541
4542 template = gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat);
4543
4544 if (!template) {
4545 GST_DEBUG_OBJECT (v4l2object->dbg_obj,
4546 "unknown format %" GST_FOURCC_FORMAT,
4547 GST_FOURCC_ARGS (format->pixelformat));
4548 continue;
4549 }
4550
4551 /* If we have a filter, check if we need to probe this format or not */
4552 if (filter) {
4553 GstCaps *format_caps = gst_caps_new_empty ();
4554
4555 gst_caps_append_structure (format_caps, gst_structure_copy (template));
4556
4557 if (!gst_caps_can_intersect (format_caps, filter)) {
4558 gst_caps_unref (format_caps);
4559 gst_structure_free (template);
4560 continue;
4561 }
4562
4563 gst_caps_unref (format_caps);
4564 }
4565
4566 tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
4567 format->pixelformat, template);
4568 if (tmp) {
4569 gst_caps_append (ret, tmp);
4570
4571 /* Add a variant of the caps with the Interlaced feature so we can negotiate it if needed */
4572 add_alternate_variant (v4l2object, ret, gst_caps_get_structure (ret,
4573 gst_caps_get_size (ret) - 1));
4574 }
4575
4576 gst_structure_free (template);
4577 }
4578
4579 if (filter) {
4580 GstCaps *tmp;
4581
4582 tmp = ret;
4583 ret = gst_caps_intersect_full (filter, ret, GST_CAPS_INTERSECT_FIRST);
4584 gst_caps_unref (tmp);
4585 }
4586
4587 GST_INFO_OBJECT (v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
4588
4589 return ret;
4590 }
4591
4592 GstCaps *
gst_v4l2_object_get_caps(GstV4l2Object * v4l2object,GstCaps * filter)4593 gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter)
4594 {
4595 GstCaps *ret;
4596
4597 if (v4l2object->probed_caps == NULL)
4598 v4l2object->probed_caps = gst_v4l2_object_probe_caps (v4l2object, NULL);
4599
4600 if (filter) {
4601 ret = gst_caps_intersect_full (filter, v4l2object->probed_caps,
4602 GST_CAPS_INTERSECT_FIRST);
4603 } else {
4604 ret = gst_caps_ref (v4l2object->probed_caps);
4605 }
4606
4607 return ret;
4608 }
4609
4610 static gboolean
gst_v4l2_object_match_buffer_layout(GstV4l2Object * obj,guint n_planes,gsize offset[GST_VIDEO_MAX_PLANES],gint stride[GST_VIDEO_MAX_PLANES],gsize buffer_size,guint padded_height)4611 gst_v4l2_object_match_buffer_layout (GstV4l2Object * obj, guint n_planes,
4612 gsize offset[GST_VIDEO_MAX_PLANES], gint stride[GST_VIDEO_MAX_PLANES],
4613 gsize buffer_size, guint padded_height)
4614 {
4615 guint p;
4616 gboolean need_fmt_update = FALSE;
4617
4618 if (n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) {
4619 GST_WARNING_OBJECT (obj->dbg_obj,
4620 "Cannot match buffers with different number planes");
4621 return FALSE;
4622 }
4623
4624 for (p = 0; p < n_planes; p++) {
4625 if (stride[p] < obj->info.stride[p]) {
4626 GST_DEBUG_OBJECT (obj->dbg_obj,
4627 "Not matching as remote stride %i is smaller than %i on plane %u",
4628 stride[p], obj->info.stride[p], p);
4629 return FALSE;
4630 } else if (stride[p] > obj->info.stride[p]) {
4631 GST_LOG_OBJECT (obj->dbg_obj,
4632 "remote stride %i is higher than %i on plane %u",
4633 stride[p], obj->info.stride[p], p);
4634 need_fmt_update = TRUE;
4635 }
4636
4637 if (offset[p] < obj->info.offset[p]) {
4638 GST_DEBUG_OBJECT (obj->dbg_obj,
4639 "Not matching as offset %" G_GSIZE_FORMAT
4640 " is smaller than %" G_GSIZE_FORMAT " on plane %u",
4641 offset[p], obj->info.offset[p], p);
4642 return FALSE;
4643 } else if (offset[p] > obj->info.offset[p]) {
4644 GST_LOG_OBJECT (obj->dbg_obj,
4645 "Remote offset %" G_GSIZE_FORMAT
4646 " is higher than %" G_GSIZE_FORMAT " on plane %u",
4647 offset[p], obj->info.offset[p], p);
4648 need_fmt_update = TRUE;
4649 }
4650
4651 if (padded_height) {
4652 guint fmt_height;
4653
4654 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type))
4655 fmt_height = obj->format.fmt.pix_mp.height;
4656 else
4657 fmt_height = obj->format.fmt.pix.height;
4658
4659 if (padded_height > fmt_height)
4660 need_fmt_update = TRUE;
4661 }
4662 }
4663
4664 if (need_fmt_update) {
4665 struct v4l2_format format;
4666 gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, };
4667
4668 format = obj->format;
4669
4670 if (padded_height) {
4671 GST_DEBUG_OBJECT (obj->dbg_obj, "Padded height %u", padded_height);
4672
4673 obj->align.padding_bottom =
4674 padded_height - GST_VIDEO_INFO_FIELD_HEIGHT (&obj->info);
4675 } else {
4676 GST_WARNING_OBJECT (obj->dbg_obj,
4677 "Failed to compute padded height; keep the default one");
4678 padded_height = format.fmt.pix_mp.height;
4679 }
4680
4681 /* update the current format with the stride we want to import from */
4682 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
4683 guint i;
4684
4685 GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:");
4686
4687 for (i = 0; i < obj->n_v4l2_planes; i++) {
4688 gint plane_stride = stride[i];
4689
4690 if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
4691 plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) <<
4692 GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
4693
4694 format.fmt.pix_mp.plane_fmt[i].bytesperline = plane_stride;
4695 format.fmt.pix_mp.height = padded_height;
4696 wanted_stride[i] = plane_stride;
4697 GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
4698 }
4699 } else {
4700 gint plane_stride = stride[0];
4701
4702 GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", plane_stride);
4703
4704 if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
4705 plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) <<
4706 GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
4707
4708 format.fmt.pix.bytesperline = plane_stride;
4709 format.fmt.pix.height = padded_height;
4710 wanted_stride[0] = plane_stride;
4711 }
4712
4713 if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) {
4714 GST_WARNING_OBJECT (obj->dbg_obj,
4715 "Something went wrong trying to update current format: %s",
4716 g_strerror (errno));
4717 return FALSE;
4718 }
4719
4720 gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info,
4721 &obj->align);
4722
4723 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
4724 guint i;
4725
4726 for (i = 0; i < obj->n_v4l2_planes; i++) {
4727 if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) {
4728 GST_DEBUG_OBJECT (obj->dbg_obj,
4729 "[%i] Driver did not accept the new stride (wants %i, got %i)",
4730 i, wanted_stride[i], format.fmt.pix_mp.plane_fmt[i].bytesperline);
4731 return FALSE;
4732 }
4733 }
4734
4735 if (format.fmt.pix_mp.height != padded_height) {
4736 GST_DEBUG_OBJECT (obj->dbg_obj,
4737 "Driver did not accept the padded height (wants %i, got %i)",
4738 padded_height, format.fmt.pix_mp.height);
4739 }
4740 } else {
4741 if (format.fmt.pix.bytesperline != wanted_stride[0]) {
4742 GST_DEBUG_OBJECT (obj->dbg_obj,
4743 "Driver did not accept the new stride (wants %i, got %i)",
4744 wanted_stride[0], format.fmt.pix.bytesperline);
4745 return FALSE;
4746 }
4747
4748 if (format.fmt.pix.height != padded_height) {
4749 GST_DEBUG_OBJECT (obj->dbg_obj,
4750 "Driver did not accept the padded height (wants %i, got %i)",
4751 padded_height, format.fmt.pix.height);
4752 }
4753 }
4754 }
4755
4756 if (obj->align.padding_bottom) {
4757 /* Crop because of vertical padding */
4758 GST_DEBUG_OBJECT (obj->dbg_obj, "crop because of bottom padding of %d",
4759 obj->align.padding_bottom);
4760 gst_v4l2_object_setup_padding (obj);
4761 }
4762
4763 return TRUE;
4764 }
4765
4766 static gboolean
validate_video_meta_struct(GstV4l2Object * obj,const GstStructure * s)4767 validate_video_meta_struct (GstV4l2Object * obj, const GstStructure * s)
4768 {
4769 guint i;
4770
4771 for (i = 0; i < gst_structure_n_fields (s); i++) {
4772 const gchar *name = gst_structure_nth_field_name (s, i);
4773
4774 if (!g_str_equal (name, "padding-top")
4775 && !g_str_equal (name, "padding-bottom")
4776 && !g_str_equal (name, "padding-left")
4777 && !g_str_equal (name, "padding-right")) {
4778 GST_WARNING_OBJECT (obj->dbg_obj, "Unknown video meta field: '%s'", name);
4779 return FALSE;
4780 }
4781 }
4782
4783 return TRUE;
4784 }
4785
4786 static gboolean
gst_v4l2_object_match_buffer_layout_from_struct(GstV4l2Object * obj,const GstStructure * s,GstCaps * caps,guint buffer_size)4787 gst_v4l2_object_match_buffer_layout_from_struct (GstV4l2Object * obj,
4788 const GstStructure * s, GstCaps * caps, guint buffer_size)
4789 {
4790 GstVideoInfo info;
4791 GstVideoAlignment align;
4792 gsize plane_size[GST_VIDEO_MAX_PLANES];
4793
4794 if (!validate_video_meta_struct (obj, s))
4795 return FALSE;
4796
4797 if (!gst_video_info_from_caps (&info, caps)) {
4798 GST_WARNING_OBJECT (obj->dbg_obj, "Failed to create video info");
4799 return FALSE;
4800 }
4801
4802 gst_video_alignment_reset (&align);
4803
4804 gst_structure_get_uint (s, "padding-top", &align.padding_top);
4805 gst_structure_get_uint (s, "padding-bottom", &align.padding_bottom);
4806 gst_structure_get_uint (s, "padding-left", &align.padding_left);
4807 gst_structure_get_uint (s, "padding-right", &align.padding_right);
4808
4809 if (align.padding_top || align.padding_bottom || align.padding_left ||
4810 align.padding_right) {
4811 GST_DEBUG_OBJECT (obj->dbg_obj,
4812 "Upstream requested padding (top: %d bottom: %d left: %d right: %d)",
4813 align.padding_top, align.padding_bottom, align.padding_left,
4814 align.padding_right);
4815 }
4816
4817 if (!gst_video_info_align_full (&info, &align, plane_size)) {
4818 GST_WARNING_OBJECT (obj->dbg_obj, "Failed to align video info");
4819 return FALSE;
4820 }
4821
4822 if (GST_VIDEO_INFO_SIZE (&info) != buffer_size) {
4823 GST_WARNING_OBJECT (obj->dbg_obj,
4824 "Requested buffer size (%d) doesn't match video info size (%"
4825 G_GSIZE_FORMAT ")", buffer_size, GST_VIDEO_INFO_SIZE (&info));
4826 return FALSE;
4827 }
4828
4829 GST_DEBUG_OBJECT (obj->dbg_obj,
4830 "try matching buffer layout requested by downstream");
4831
4832 gst_v4l2_object_match_buffer_layout (obj, GST_VIDEO_INFO_N_PLANES (&info),
4833 info.offset, info.stride, buffer_size,
4834 GST_VIDEO_INFO_PLANE_HEIGHT (&info, 0, plane_size));
4835
4836 return TRUE;
4837 }
4838
4839 gboolean
gst_v4l2_object_decide_allocation(GstV4l2Object * obj,GstQuery * query)4840 gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query)
4841 {
4842 GstCaps *caps;
4843 GstBufferPool *pool = NULL, *other_pool = NULL;
4844 GstStructure *config;
4845 guint size, min, max, own_min = 0;
4846 gboolean update;
4847 gboolean has_video_meta;
4848 gboolean can_share_own_pool, pushing_from_our_pool = FALSE;
4849 GstAllocator *allocator = NULL;
4850 GstAllocationParams params = { 0 };
4851 guint video_idx;
4852
4853 GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation");
4854
4855 g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
4856 obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
4857
4858 gst_query_parse_allocation (query, &caps, NULL);
4859
4860 if (obj->pool == NULL) {
4861 if (!gst_v4l2_object_setup_pool (obj, caps))
4862 goto pool_failed;
4863 }
4864
4865 if (gst_query_get_n_allocation_params (query) > 0)
4866 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
4867
4868 if (gst_query_get_n_allocation_pools (query) > 0) {
4869 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
4870 update = TRUE;
4871 } else {
4872 pool = NULL;
4873 min = max = 0;
4874 size = 0;
4875 update = FALSE;
4876 }
4877
4878 GST_DEBUG_OBJECT (obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%"
4879 GST_PTR_FORMAT, size, min, max, pool);
4880
4881 has_video_meta =
4882 gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE,
4883 &video_idx);
4884
4885 if (has_video_meta) {
4886 const GstStructure *params;
4887 gst_query_parse_nth_allocation_meta (query, video_idx, ¶ms);
4888
4889 if (params)
4890 gst_v4l2_object_match_buffer_layout_from_struct (obj, params, caps, size);
4891 }
4892
4893 can_share_own_pool = (has_video_meta || !obj->need_video_meta);
4894
4895 gst_v4l2_get_driver_min_buffers (obj);
4896 /* We can't share our own pool, if it exceed V4L2 capacity */
4897 if (min + obj->min_buffers + 1 > VIDEO_MAX_FRAME)
4898 can_share_own_pool = FALSE;
4899
4900 /* select a pool */
4901 switch (obj->mode) {
4902 case GST_V4L2_IO_RW:
4903 if (pool) {
4904 /* in READ/WRITE mode, prefer a downstream pool because our own pool
4905 * doesn't help much, we have to write to it as well */
4906 GST_DEBUG_OBJECT (obj->dbg_obj,
4907 "read/write mode: using downstream pool");
4908 /* use the bigest size, when we use our own pool we can't really do any
4909 * other size than what the hardware gives us but for downstream pools
4910 * we can try */
4911 size = MAX (size, obj->info.size);
4912 } else if (can_share_own_pool) {
4913 /* no downstream pool, use our own then */
4914 GST_DEBUG_OBJECT (obj->dbg_obj,
4915 "read/write mode: no downstream pool, using our own");
4916 pool = gst_object_ref (obj->pool);
4917 size = obj->info.size;
4918 pushing_from_our_pool = TRUE;
4919 }
4920 break;
4921
4922 case GST_V4L2_IO_USERPTR:
4923 case GST_V4L2_IO_DMABUF_IMPORT:
4924 /* in importing mode, prefer our own pool, and pass the other pool to
4925 * our own, so it can serve itself */
4926 if (pool == NULL)
4927 goto no_downstream_pool;
4928 gst_v4l2_buffer_pool_set_other_pool (GST_V4L2_BUFFER_POOL (obj->pool),
4929 pool);
4930 other_pool = pool;
4931 gst_object_unref (pool);
4932 pool = gst_object_ref (obj->pool);
4933 size = obj->info.size;
4934 break;
4935
4936 case GST_V4L2_IO_MMAP:
4937 case GST_V4L2_IO_DMABUF:
4938 /* in streaming mode, prefer our own pool */
4939 /* Check if we can use it ... */
4940 if (can_share_own_pool) {
4941 if (pool)
4942 gst_object_unref (pool);
4943 pool = gst_object_ref (obj->pool);
4944 size = obj->info.size;
4945 GST_DEBUG_OBJECT (obj->dbg_obj,
4946 "streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
4947 pushing_from_our_pool = TRUE;
4948 } else if (pool) {
4949 GST_DEBUG_OBJECT (obj->dbg_obj,
4950 "streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
4951 pool);
4952 } else {
4953 GST_DEBUG_OBJECT (obj->dbg_obj,
4954 "streaming mode: no usable pool, copying to generic pool");
4955 size = MAX (size, obj->info.size);
4956 }
4957 break;
4958 case GST_V4L2_IO_AUTO:
4959 default:
4960 GST_WARNING_OBJECT (obj->dbg_obj, "unhandled mode");
4961 break;
4962 }
4963
4964 if (size == 0)
4965 goto no_size;
4966
4967 /* If pushing from our own pool, configure it with queried minimum,
4968 * otherwise use the minimum required */
4969 if (pushing_from_our_pool) {
4970 /* When pushing from our own pool, we need what downstream one, to be able
4971 * to fill the pipeline, the minimum required to decoder according to the
4972 * driver and 2 more, so we don't endup up with everything downstream or
4973 * held by the decoder. We account 2 buffers for v4l2 so when one is being
4974 * pushed downstream the other one can already be queued for the next
4975 * frame. */
4976 own_min = min + obj->min_buffers + 2;
4977
4978 /* If no allocation parameters where provided, allow for a little more
4979 * buffers and enable copy threshold */
4980 if (!update) {
4981 own_min += 2;
4982 gst_v4l2_buffer_pool_copy_at_threshold (GST_V4L2_BUFFER_POOL (pool),
4983 TRUE);
4984 } else {
4985 gst_v4l2_buffer_pool_copy_at_threshold (GST_V4L2_BUFFER_POOL (pool),
4986 FALSE);
4987 }
4988
4989 } else {
4990 /* In this case we'll have to configure two buffer pool. For our buffer
4991 * pool, we'll need what the driver one, and one more, so we can dequeu */
4992 own_min = obj->min_buffers + 1;
4993 own_min = MAX (own_min, GST_V4L2_MIN_BUFFERS (obj));
4994
4995 /* for the downstream pool, we keep what downstream wants, though ensure
4996 * at least a minimum if downstream didn't suggest anything (we are
4997 * expecting the base class to create a default one for the context) */
4998 min = MAX (min, GST_V4L2_MIN_BUFFERS (obj));
4999
5000 /* To import we need the other pool to hold at least own_min */
5001 if (obj->pool == pool)
5002 min += own_min;
5003 }
5004
5005 /* Request a bigger max, if one was suggested but it's too small */
5006 if (max != 0)
5007 max = MAX (min, max);
5008
5009 /* First step, configure our own pool */
5010 config = gst_buffer_pool_get_config (obj->pool);
5011
5012 if (obj->need_video_meta || has_video_meta) {
5013 GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
5014 gst_buffer_pool_config_add_option (config,
5015 GST_BUFFER_POOL_OPTION_VIDEO_META);
5016 }
5017
5018 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
5019 gst_buffer_pool_config_set_params (config, caps, size, own_min, 0);
5020
5021 GST_DEBUG_OBJECT (obj->dbg_obj, "setting own pool config to %"
5022 GST_PTR_FORMAT, config);
5023
5024 /* Our pool often need to adjust the value */
5025 if (!gst_buffer_pool_set_config (obj->pool, config)) {
5026 config = gst_buffer_pool_get_config (obj->pool);
5027
5028 GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %"
5029 GST_PTR_FORMAT, config);
5030
5031 /* our pool will adjust the maximum buffer, which we are fine with */
5032 if (!gst_buffer_pool_set_config (obj->pool, config))
5033 goto config_failed;
5034 }
5035
5036 /* Now configure the other pool if different */
5037 if (obj->pool != pool)
5038 other_pool = pool;
5039
5040 if (other_pool) {
5041 config = gst_buffer_pool_get_config (other_pool);
5042 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
5043 gst_buffer_pool_config_set_params (config, caps, size, min, max);
5044
5045 GST_DEBUG_OBJECT (obj->dbg_obj, "setting other pool config to %"
5046 GST_PTR_FORMAT, config);
5047
5048 /* if downstream supports video metadata, add this to the pool config */
5049 if (has_video_meta) {
5050 GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
5051 gst_buffer_pool_config_add_option (config,
5052 GST_BUFFER_POOL_OPTION_VIDEO_META);
5053 }
5054
5055 if (!gst_buffer_pool_set_config (other_pool, config)) {
5056 config = gst_buffer_pool_get_config (other_pool);
5057
5058 if (!gst_buffer_pool_config_validate_params (config, caps, size, min,
5059 max)) {
5060 gst_structure_free (config);
5061 goto config_failed;
5062 }
5063
5064 if (!gst_buffer_pool_set_config (other_pool, config))
5065 goto config_failed;
5066 }
5067 }
5068
5069 if (pool) {
5070 /* For simplicity, simply read back the active configuration, so our base
5071 * class get the right information */
5072 config = gst_buffer_pool_get_config (pool);
5073 gst_buffer_pool_config_get_params (config, NULL, &size, &min, &max);
5074 gst_structure_free (config);
5075 }
5076
5077 if (update)
5078 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
5079 else
5080 gst_query_add_allocation_pool (query, pool, size, min, max);
5081
5082 if (allocator)
5083 gst_object_unref (allocator);
5084
5085 if (pool)
5086 gst_object_unref (pool);
5087
5088 return TRUE;
5089
5090 pool_failed:
5091 {
5092 /* setup_pool already send the error */
5093 goto cleanup;
5094 }
5095 config_failed:
5096 {
5097 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
5098 (_("Failed to configure internal buffer pool.")), (NULL));
5099 goto cleanup;
5100 }
5101 no_size:
5102 {
5103 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
5104 (_("Video device did not suggest any buffer size.")), (NULL));
5105 goto cleanup;
5106 }
5107 cleanup:
5108 {
5109 if (allocator)
5110 gst_object_unref (allocator);
5111
5112 if (pool)
5113 gst_object_unref (pool);
5114 return FALSE;
5115 }
5116 no_downstream_pool:
5117 {
5118 GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
5119 (_("No downstream pool to import from.")),
5120 ("When importing DMABUF or USERPTR, we need a pool to import from"));
5121 return FALSE;
5122 }
5123 }
5124
5125 gboolean
gst_v4l2_object_propose_allocation(GstV4l2Object * obj,GstQuery * query)5126 gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
5127 {
5128 GstBufferPool *pool = NULL;
5129 /* we need at least 2 buffers to operate */
5130 guint size, min, max;
5131 GstCaps *caps;
5132 gboolean need_pool;
5133
5134 /* Set defaults allocation parameters */
5135 size = obj->info.size;
5136 min = GST_V4L2_MIN_BUFFERS (obj);
5137 max = VIDEO_MAX_FRAME;
5138
5139 gst_query_parse_allocation (query, &caps, &need_pool);
5140
5141 if (caps == NULL)
5142 goto no_caps;
5143
5144 switch (obj->mode) {
5145 case GST_V4L2_IO_MMAP:
5146 case GST_V4L2_IO_DMABUF:
5147 if (need_pool && obj->pool) {
5148 if (!gst_buffer_pool_is_active (obj->pool))
5149 pool = gst_object_ref (obj->pool);
5150 }
5151 break;
5152 default:
5153 break;
5154 }
5155
5156 if (pool != NULL) {
5157 GstCaps *pcaps;
5158 GstStructure *config;
5159
5160 /* we had a pool, check caps */
5161 config = gst_buffer_pool_get_config (pool);
5162 gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL);
5163
5164 GST_DEBUG_OBJECT (obj->dbg_obj,
5165 "we had a pool with caps %" GST_PTR_FORMAT, pcaps);
5166 if (!gst_caps_is_equal (caps, pcaps)) {
5167 gst_structure_free (config);
5168 gst_object_unref (pool);
5169 goto different_caps;
5170 }
5171 gst_structure_free (config);
5172 }
5173 gst_v4l2_get_driver_min_buffers (obj);
5174
5175 min = MAX (obj->min_buffers, GST_V4L2_MIN_BUFFERS (obj));
5176
5177 gst_query_add_allocation_pool (query, pool, size, min, max);
5178
5179 /* we also support various metadata */
5180 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
5181
5182 if (pool)
5183 gst_object_unref (pool);
5184
5185 return TRUE;
5186
5187 /* ERRORS */
5188 no_caps:
5189 {
5190 GST_DEBUG_OBJECT (obj->dbg_obj, "no caps specified");
5191 return FALSE;
5192 }
5193 different_caps:
5194 {
5195 /* different caps, we can't use this pool */
5196 GST_DEBUG_OBJECT (obj->dbg_obj, "pool has different caps");
5197 return FALSE;
5198 }
5199 }
5200
5201 gboolean
gst_v4l2_object_try_import(GstV4l2Object * obj,GstBuffer * buffer)5202 gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer)
5203 {
5204 GstVideoMeta *vmeta;
5205 guint n_mem = gst_buffer_n_memory (buffer);
5206
5207 /* only import if requested */
5208 switch (obj->mode) {
5209 case GST_V4L2_IO_USERPTR:
5210 case GST_V4L2_IO_DMABUF_IMPORT:
5211 break;
5212 default:
5213 GST_DEBUG_OBJECT (obj->dbg_obj,
5214 "The io-mode does not enable importation");
5215 return FALSE;
5216 }
5217
5218 vmeta = gst_buffer_get_video_meta (buffer);
5219 if (!vmeta && obj->need_video_meta) {
5220 GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard "
5221 "stride/offset while the driver does not.");
5222 return FALSE;
5223 }
5224
5225 /* we need matching strides/offsets and size */
5226 if (vmeta) {
5227 guint plane_height[GST_VIDEO_MAX_PLANES] = { 0, };
5228
5229 gst_video_meta_get_plane_height (vmeta, plane_height);
5230
5231 if (!gst_v4l2_object_match_buffer_layout (obj, vmeta->n_planes,
5232 vmeta->offset, vmeta->stride, gst_buffer_get_size (buffer),
5233 plane_height[0]))
5234 return FALSE;
5235 }
5236
5237 /* we can always import single memory buffer, but otherwise we need the same
5238 * amount of memory object. */
5239 if (n_mem != 1 && n_mem != obj->n_v4l2_planes) {
5240 GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, "
5241 "buffers contains %u memory", obj->n_v4l2_planes, n_mem);
5242 return FALSE;
5243 }
5244
5245 /* For DMABuf importation we need DMABuf of course */
5246 if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) {
5247 guint i;
5248
5249 for (i = 0; i < n_mem; i++) {
5250 GstMemory *mem = gst_buffer_peek_memory (buffer, i);
5251
5252 if (!gst_is_dmabuf_memory (mem)) {
5253 GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory.");
5254 return FALSE;
5255 }
5256 }
5257 }
5258
5259 /* for the remaining, only the kernel driver can tell */
5260 return TRUE;
5261 }
5262