• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* SPDX-License-Identifier: (GPL-2.0-only OR BSD-3-Clause) */
2 /*
3  * V4L2 C helper header providing wrappers to simplify access to the various
4  * v4l2 functions.
5  *
6  * Copyright 2014-2016 Cisco Systems, Inc. and/or its affiliates. All rights reserved.
7  */
8 
9 #ifndef _V4L_HELPERS_H_
10 #define _V4L_HELPERS_H_
11 
12 #include <linux/v4l2-subdev.h>
13 #include <linux/videodev2.h>
14 #include <string.h>
15 #include <stdlib.h>
16 #include <stdio.h>
17 #include <stdarg.h>
18 #include <time.h>
19 #include <unistd.h>
20 #include <sys/ioctl.h>
21 #include <fcntl.h>
22 #include <sys/mman.h>
23 #include <errno.h>
24 
25 #ifdef __cplusplus
26 extern "C" {
27 #endif /* __cplusplus */
28 
29 struct v4l_fd {
30 	int fd;
31 	struct v4l2_capability cap;
32 	char devname[128];
33 	__u32 type;
34 	__u32 caps;
35 	unsigned int trace;
36 	bool direct;
37 	bool have_query_ext_ctrl;
38 	bool have_ext_ctrls;
39 	bool have_next_ctrl;
40 	bool have_selection;
41 	bool is_subdev;
42 	bool is_media;
43 	bool have_streams;
44 	bool ival_uses_which;
45 
46 	int (*open)(struct v4l_fd *f, const char *file, int oflag, ...);
47 	int (*close)(struct v4l_fd *f);
48 	int (*ioctl)(struct v4l_fd *f, unsigned long cmd, ...);
49 	ssize_t (*read)(struct v4l_fd *f, void *buffer, size_t n);
50 	ssize_t (*write)(struct v4l_fd *f, const void *buffer, size_t n);
51 	void *(*mmap)(void *addr, size_t length, int prot, int flags,
52 		      struct v4l_fd *f, off_t offset);
53 	int (*munmap)(struct v4l_fd *f, void *addr, size_t length);
54 };
55 
56 #ifdef __LIBV4L2_H
57 
v4l_wrap_open(struct v4l_fd * f,const char * file,int oflag,...)58 static inline int v4l_wrap_open(struct v4l_fd *f, const char *file, int oflag, ...)
59 {
60  	return f->direct ? open(file, oflag) : v4l2_open(file, oflag);
61 }
62 
v4l_wrap_close(struct v4l_fd * f)63 static inline int v4l_wrap_close(struct v4l_fd *f)
64 {
65 	int ret = f->direct ? close(f->fd) : v4l2_close(f->fd);
66 
67 	f->fd = -1;
68 	return ret;
69 }
70 
v4l_wrap_read(struct v4l_fd * f,void * buffer,size_t n)71 static inline ssize_t v4l_wrap_read(struct v4l_fd *f, void *buffer, size_t n)
72 {
73 	return f->direct ? read(f->fd, buffer, n) : v4l2_read(f->fd, buffer, n);
74 }
75 
v4l_wrap_write(struct v4l_fd * f,const void * buffer,size_t n)76 static inline ssize_t v4l_wrap_write(struct v4l_fd *f, const void *buffer, size_t n)
77 {
78 	return f->direct ? write(f->fd, buffer, n) : v4l2_write(f->fd, buffer, n);
79 }
80 
v4l_wrap_ioctl(struct v4l_fd * f,unsigned long cmd,...)81 static inline int v4l_wrap_ioctl(struct v4l_fd *f, unsigned long cmd, ...)
82 {
83 	void *arg;
84 	va_list ap;
85 
86 	va_start(ap, cmd);
87 	arg = va_arg(ap, void *);
88 	va_end(ap);
89 	return f->direct ? ioctl(f->fd, cmd, arg) : v4l2_ioctl(f->fd, cmd, arg);
90 }
91 
v4l_wrap_mmap(void * start,size_t length,int prot,int flags,struct v4l_fd * f,off_t offset)92 static inline void *v4l_wrap_mmap(void *start, size_t length, int prot, int flags,
93 		struct v4l_fd *f, off_t offset)
94 {
95  	return f->direct ? mmap(start, length, prot, flags, f->fd, offset) :
96 		v4l2_mmap(start, length, prot, flags, f->fd, offset);
97 }
98 
v4l_wrap_munmap(struct v4l_fd * f,void * start,size_t length)99 static inline int v4l_wrap_munmap(struct v4l_fd *f, void *start, size_t length)
100 {
101  	return f->direct ? munmap(start, length) : v4l2_munmap(start, length);
102 }
103 
v4l_fd_g_direct(const struct v4l_fd * f)104 static inline bool v4l_fd_g_direct(const struct v4l_fd *f)
105 {
106 	return f->direct;
107 }
108 
v4l_fd_s_direct(struct v4l_fd * f,bool direct)109 static inline void v4l_fd_s_direct(struct v4l_fd *f, bool direct)
110 {
111 	if (!f->is_subdev && !f->is_media)
112 		f->direct = direct;
113 }
114 
115 #else
116 
v4l_wrap_open(struct v4l_fd * f,const char * file,int oflag,...)117 static inline int v4l_wrap_open(struct v4l_fd *f, const char *file, int oflag, ...)
118 {
119  	return open(file, oflag);
120 }
121 
v4l_wrap_close(struct v4l_fd * f)122 static inline int v4l_wrap_close(struct v4l_fd *f)
123 {
124 	int ret = close(f->fd);
125 
126 	f->fd = -1;
127 	return ret;
128 }
129 
v4l_wrap_read(struct v4l_fd * f,void * buffer,size_t n)130 static inline ssize_t v4l_wrap_read(struct v4l_fd *f, void *buffer, size_t n)
131 {
132 	return read(f->fd, buffer, n);
133 }
134 
v4l_wrap_write(struct v4l_fd * f,const void * buffer,size_t n)135 static inline ssize_t v4l_wrap_write(struct v4l_fd *f, const void *buffer, size_t n)
136 {
137 	return write(f->fd, buffer, n);
138 }
139 
v4l_wrap_ioctl(struct v4l_fd * f,unsigned long cmd,...)140 static inline int v4l_wrap_ioctl(struct v4l_fd *f, unsigned long cmd, ...)
141 {
142 	void *arg;
143 	va_list ap;
144 
145 	va_start(ap, cmd);
146 	arg = va_arg(ap, void *);
147 	va_end(ap);
148 	return ioctl(f->fd, cmd, arg);
149 }
150 
v4l_wrap_mmap(void * start,size_t length,int prot,int flags,struct v4l_fd * f,off_t offset)151 static inline void *v4l_wrap_mmap(void *start, size_t length, int prot, int flags,
152 		struct v4l_fd *f, off_t offset)
153 {
154  	return mmap(start, length, prot, flags, f->fd, offset);
155 }
156 
v4l_wrap_munmap(struct v4l_fd * f,void * start,size_t length)157 static inline int v4l_wrap_munmap(struct v4l_fd *f, void *start, size_t length)
158 {
159  	return munmap(start, length);
160 }
161 
v4l_fd_g_direct(const struct v4l_fd * f)162 static inline bool v4l_fd_g_direct(const struct v4l_fd *f)
163 {
164 	return true;
165 }
166 
v4l_fd_s_direct(struct v4l_fd * f,bool direct)167 static inline void v4l_fd_s_direct(struct v4l_fd *f, bool direct)
168 {
169 }
170 
171 #endif
172 
v4l_fd_init(struct v4l_fd * f)173 static inline void v4l_fd_init(struct v4l_fd *f)
174 {
175 	memset(f, 0, sizeof(*f));
176 	f->fd = -1;
177 	f->is_subdev = false;
178 	f->is_media = false;
179 	f->open = v4l_wrap_open;
180 	f->close = v4l_wrap_close;
181 	f->ioctl = v4l_wrap_ioctl;
182 	f->read = v4l_wrap_read;
183 	f->write = v4l_wrap_write;
184 	f->mmap = v4l_wrap_mmap;
185 	f->munmap = v4l_wrap_munmap;
186 }
187 
v4l_fd_is_subdev(const struct v4l_fd * f)188 static inline bool v4l_fd_is_subdev(const struct v4l_fd *f)
189 {
190 	return f->is_subdev;
191 }
192 
v4l_fd_is_media(const struct v4l_fd * f)193 static inline bool v4l_fd_is_media(const struct v4l_fd *f)
194 {
195 	return f->is_media;
196 }
197 
v4l_fd_is_v4l2(const struct v4l_fd * f)198 static inline bool v4l_fd_is_v4l2(const struct v4l_fd *f)
199 {
200 	return !f->is_subdev && !f->is_media;
201 }
202 
v4l_fd_g_trace(const struct v4l_fd * f)203 static inline unsigned int v4l_fd_g_trace(const struct v4l_fd *f)
204 {
205 	return f->trace;
206 }
207 
v4l_fd_s_trace(struct v4l_fd * f,unsigned int trace)208 static inline void v4l_fd_s_trace(struct v4l_fd *f, unsigned int trace)
209 {
210 	f->trace = trace;
211 }
212 
v4l_named_ioctl(struct v4l_fd * f,const char * cmd_name,unsigned long cmd,void * arg)213 static inline int v4l_named_ioctl(struct v4l_fd *f,
214 		const char *cmd_name, unsigned long cmd, void *arg)
215 {
216 	int retval;
217 	int e;
218 
219 	retval = f->ioctl(f, cmd, arg);
220 	e = retval == 0 ? 0 : errno;
221 	if (f->trace >= (e ? 1 : 2))
222 		fprintf(stderr, "\t\t%s returned %d (%s)\n",
223 				cmd_name, retval, strerror(e));
224 	return retval == -1 ? e : (retval ? -1 : 0);
225 }
226 
227 #define v4l_ioctl(f, cmd, arg) v4l_named_ioctl(f, #cmd, cmd, arg)
228 
v4l_mmap(struct v4l_fd * f,size_t length,off_t offset)229 static inline void *v4l_mmap(struct v4l_fd *f, size_t length, off_t offset)
230 {
231 	return f->mmap(NULL, length, PROT_READ | PROT_WRITE, MAP_SHARED, f, offset);
232 }
233 
v4l_munmap(struct v4l_fd * f,void * start,size_t length)234 static inline int v4l_munmap(struct v4l_fd *f, void *start, size_t length)
235 {
236 	return f->munmap(f, start, length);
237 }
238 
v4l_read(struct v4l_fd * f,void * buffer,size_t n)239 static inline ssize_t v4l_read(struct v4l_fd *f, void *buffer, size_t n)
240 {
241 	return f->read(f, buffer, n);
242 }
243 
v4l_write(struct v4l_fd * f,const void * buffer,size_t n)244 static inline ssize_t v4l_write(struct v4l_fd *f, const void *buffer, size_t n)
245 {
246 	return f->write(f, buffer, n);
247 }
248 
v4l_close(struct v4l_fd * f)249 static inline int v4l_close(struct v4l_fd *f)
250 {
251 	int res = f->close(f);
252 
253 	f->caps = f->type = 0;
254 	f->fd = -1;
255 	return res;
256 }
257 
v4l_querycap(struct v4l_fd * f,struct v4l2_capability * cap)258 static inline int v4l_querycap(struct v4l_fd *f, struct v4l2_capability *cap)
259 {
260 	return v4l_ioctl(f, VIDIOC_QUERYCAP, cap);
261 }
262 
v4l_capability_g_caps(const struct v4l2_capability * cap)263 static inline __u32 v4l_capability_g_caps(const struct v4l2_capability *cap)
264 {
265 	return (cap->capabilities & V4L2_CAP_DEVICE_CAPS) ?
266 			cap->device_caps : cap->capabilities;
267 }
268 
v4l_g_type(const struct v4l_fd * f)269 static inline __u32 v4l_g_type(const struct v4l_fd *f)
270 {
271 	return f->type;
272 }
273 
v4l_s_type(struct v4l_fd * f,__u32 type)274 static inline void v4l_s_type(struct v4l_fd *f, __u32 type)
275 {
276 	f->type = type;
277 }
278 
v4l_g_selection_type(const struct v4l_fd * f)279 static inline __u32 v4l_g_selection_type(const struct v4l_fd *f)
280 {
281 	if (f->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
282 		return V4L2_BUF_TYPE_VIDEO_CAPTURE;
283 	if (f->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
284 		return V4L2_BUF_TYPE_VIDEO_OUTPUT;
285 	return f->type;
286 }
287 
v4l_g_caps(const struct v4l_fd * f)288 static inline __u32 v4l_g_caps(const struct v4l_fd *f)
289 {
290 	return f->caps;
291 }
292 
v4l_has_vid_cap(const struct v4l_fd * f)293 static inline bool v4l_has_vid_cap(const struct v4l_fd *f)
294 {
295 	return v4l_g_caps(f) & (V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_VIDEO_CAPTURE_MPLANE |
296 				V4L2_CAP_VIDEO_M2M | V4L2_CAP_VIDEO_M2M_MPLANE);
297 }
298 
v4l_has_vid_out(const struct v4l_fd * f)299 static inline bool v4l_has_vid_out(const struct v4l_fd *f)
300 {
301 	return v4l_g_caps(f) & (V4L2_CAP_VIDEO_OUTPUT | V4L2_CAP_VIDEO_OUTPUT_MPLANE |
302 				V4L2_CAP_VIDEO_M2M | V4L2_CAP_VIDEO_M2M_MPLANE);
303 }
304 
v4l_has_vid_m2m(const struct v4l_fd * f)305 static inline bool v4l_has_vid_m2m(const struct v4l_fd *f)
306 {
307 	return v4l_g_caps(f) & (V4L2_CAP_VIDEO_M2M | V4L2_CAP_VIDEO_M2M_MPLANE);
308 }
309 
v4l_has_vid_mplane(const struct v4l_fd * f)310 static inline bool v4l_has_vid_mplane(const struct v4l_fd *f)
311 {
312 	return v4l_g_caps(f) & (V4L2_CAP_VIDEO_CAPTURE_MPLANE |
313 				V4L2_CAP_VIDEO_OUTPUT_MPLANE |
314 				V4L2_CAP_VIDEO_M2M_MPLANE);
315 }
316 
v4l_has_overlay_cap(const struct v4l_fd * f)317 static inline bool v4l_has_overlay_cap(const struct v4l_fd *f)
318 {
319 	return v4l_g_caps(f) & V4L2_CAP_VIDEO_OVERLAY;
320 }
321 
v4l_has_overlay_out(const struct v4l_fd * f)322 static inline bool v4l_has_overlay_out(const struct v4l_fd *f)
323 {
324 	return v4l_g_caps(f) & V4L2_CAP_VIDEO_OUTPUT_OVERLAY;
325 }
326 
v4l_has_raw_vbi_cap(const struct v4l_fd * f)327 static inline bool v4l_has_raw_vbi_cap(const struct v4l_fd *f)
328 {
329 	return v4l_g_caps(f) & V4L2_CAP_VBI_CAPTURE;
330 }
331 
v4l_has_sliced_vbi_cap(const struct v4l_fd * f)332 static inline bool v4l_has_sliced_vbi_cap(const struct v4l_fd *f)
333 {
334 	return v4l_g_caps(f) & V4L2_CAP_SLICED_VBI_CAPTURE;
335 }
336 
v4l_has_vbi_cap(const struct v4l_fd * f)337 static inline bool v4l_has_vbi_cap(const struct v4l_fd *f)
338 {
339 	return v4l_has_raw_vbi_cap(f) || v4l_has_sliced_vbi_cap(f);
340 }
341 
v4l_has_raw_vbi_out(const struct v4l_fd * f)342 static inline bool v4l_has_raw_vbi_out(const struct v4l_fd *f)
343 {
344 	return v4l_g_caps(f) & V4L2_CAP_VBI_OUTPUT;
345 }
346 
v4l_has_sliced_vbi_out(const struct v4l_fd * f)347 static inline bool v4l_has_sliced_vbi_out(const struct v4l_fd *f)
348 {
349 	return v4l_g_caps(f) & V4L2_CAP_SLICED_VBI_OUTPUT;
350 }
351 
v4l_has_vbi_out(const struct v4l_fd * f)352 static inline bool v4l_has_vbi_out(const struct v4l_fd *f)
353 {
354 	return v4l_has_raw_vbi_out(f) || v4l_has_sliced_vbi_out(f);
355 }
356 
v4l_has_vbi(const struct v4l_fd * f)357 static inline bool v4l_has_vbi(const struct v4l_fd *f)
358 {
359 	return v4l_has_vbi_cap(f) || v4l_has_vbi_out(f);
360 }
361 
v4l_has_radio_rx(const struct v4l_fd * f)362 static inline bool v4l_has_radio_rx(const struct v4l_fd *f)
363 {
364 	return (v4l_g_caps(f) & V4L2_CAP_RADIO) &&
365 	       (v4l_g_caps(f) & V4L2_CAP_TUNER);
366 }
367 
v4l_has_radio_tx(const struct v4l_fd * f)368 static inline bool v4l_has_radio_tx(const struct v4l_fd *f)
369 {
370 	return v4l_g_caps(f) & V4L2_CAP_MODULATOR;
371 }
372 
v4l_has_rds_cap(const struct v4l_fd * f)373 static inline bool v4l_has_rds_cap(const struct v4l_fd *f)
374 {
375 	return v4l_g_caps(f) & V4L2_CAP_RDS_CAPTURE;
376 }
377 
v4l_has_rds_out(const struct v4l_fd * f)378 static inline bool v4l_has_rds_out(const struct v4l_fd *f)
379 {
380 	return v4l_g_caps(f) & V4L2_CAP_RDS_OUTPUT;
381 }
382 
v4l_has_sdr_cap(const struct v4l_fd * f)383 static inline bool v4l_has_sdr_cap(const struct v4l_fd *f)
384 {
385 	return v4l_g_caps(f) & V4L2_CAP_SDR_CAPTURE;
386 }
387 
v4l_has_sdr_out(const struct v4l_fd * f)388 static inline bool v4l_has_sdr_out(const struct v4l_fd *f)
389 {
390 	return v4l_g_caps(f) & V4L2_CAP_SDR_OUTPUT;
391 }
392 
v4l_has_meta_cap(const struct v4l_fd * f)393 static inline bool v4l_has_meta_cap(const struct v4l_fd *f)
394 {
395 	return v4l_g_caps(f) & V4L2_CAP_META_CAPTURE;
396 }
397 
v4l_has_meta_out(const struct v4l_fd * f)398 static inline bool v4l_has_meta_out(const struct v4l_fd *f)
399 {
400 	return v4l_g_caps(f) & V4L2_CAP_META_OUTPUT;
401 }
402 
v4l_has_touch(const struct v4l_fd * f)403 static inline bool v4l_has_touch(const struct v4l_fd *f)
404 {
405 	return v4l_g_caps(f) & V4L2_CAP_TOUCH;
406 }
407 
v4l_has_hwseek(const struct v4l_fd * f)408 static inline bool v4l_has_hwseek(const struct v4l_fd *f)
409 {
410 	return v4l_g_caps(f) & V4L2_CAP_HW_FREQ_SEEK;
411 }
412 
v4l_has_rw(const struct v4l_fd * f)413 static inline bool v4l_has_rw(const struct v4l_fd *f)
414 {
415 	return v4l_g_caps(f) & V4L2_CAP_READWRITE;
416 }
417 
v4l_has_streaming(const struct v4l_fd * f)418 static inline bool v4l_has_streaming(const struct v4l_fd *f)
419 {
420 	return v4l_g_caps(f) & V4L2_CAP_STREAMING;
421 }
422 
v4l_has_ext_pix_format(const struct v4l_fd * f)423 static inline bool v4l_has_ext_pix_format(const struct v4l_fd *f)
424 {
425 	return v4l_g_caps(f) & V4L2_CAP_EXT_PIX_FORMAT;
426 }
427 
v4l_determine_type(const struct v4l_fd * f)428 static inline __u32 v4l_determine_type(const struct v4l_fd *f)
429 {
430 	if (v4l_has_vid_mplane(f))
431 		return v4l_has_vid_cap(f) ? V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE :
432 					    V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
433 	if (v4l_has_vid_cap(f))
434 		return V4L2_BUF_TYPE_VIDEO_CAPTURE;
435 	if (v4l_has_vid_out(f))
436 		return V4L2_BUF_TYPE_VIDEO_OUTPUT;
437 	if (v4l_has_raw_vbi_cap(f))
438 		return V4L2_BUF_TYPE_VBI_CAPTURE;
439 	if (v4l_has_sliced_vbi_cap(f))
440 		return V4L2_BUF_TYPE_SLICED_VBI_CAPTURE;
441 	if (v4l_has_raw_vbi_out(f))
442 		return V4L2_BUF_TYPE_VBI_OUTPUT;
443 	if (v4l_has_sliced_vbi_out(f))
444 		return V4L2_BUF_TYPE_SLICED_VBI_OUTPUT;
445 	if (v4l_has_sdr_cap(f))
446 		return V4L2_BUF_TYPE_SDR_CAPTURE;
447 	if (v4l_has_sdr_out(f))
448 		return V4L2_BUF_TYPE_SDR_OUTPUT;
449 	if (v4l_has_meta_cap(f))
450 		return V4L2_BUF_TYPE_META_CAPTURE;
451 	if (v4l_has_meta_out(f))
452 		return V4L2_BUF_TYPE_META_OUTPUT;
453 
454 	return 0;
455 }
456 
v4l_s_fd(struct v4l_fd * f,int fd,const char * devname,bool direct)457 static inline int v4l_s_fd(struct v4l_fd *f, int fd, const char *devname, bool direct)
458 {
459 	struct v4l2_query_ext_ctrl qec;
460 	struct v4l2_ext_controls ec;
461 	struct v4l2_queryctrl qc;
462 	struct v4l2_selection sel;
463 
464 	if (f->fd >= 0)
465 		f->close(f);
466 
467 	f->fd = fd;
468 	f->direct = direct;
469 	if (fd < 0)
470 		return fd;
471 
472 	memset(&qec, 0, sizeof(qec));
473 	qec.id = V4L2_CTRL_FLAG_NEXT_CTRL | V4L2_CTRL_FLAG_NEXT_COMPOUND;
474 	memset(&ec, 0, sizeof(ec));
475 	memset(&qc, 0, sizeof(qc));
476 	qc.id = V4L2_CTRL_FLAG_NEXT_CTRL;
477 	memset(&sel, 0, sizeof(sel));
478 
479 	if (f->devname != devname)
480 		strncpy(f->devname, devname, sizeof(f->devname));
481 	f->devname[sizeof(f->devname) - 1] = '\0';
482 
483 	memset(&f->cap, 0, sizeof(f->cap));
484 	if (v4l_querycap(f, &f->cap)) {
485 		v4l_close(f);
486 		return -1;
487 	}
488 	f->is_subdev = false;
489 	f->is_media = false;
490 	f->caps = v4l_capability_g_caps(&f->cap);
491 	f->type = v4l_determine_type(f);
492 
493 	f->have_query_ext_ctrl = v4l_ioctl(f, VIDIOC_QUERY_EXT_CTRL, &qec) == 0;
494 	f->have_ext_ctrls = v4l_ioctl(f, VIDIOC_TRY_EXT_CTRLS, &ec) == 0;
495 	f->have_next_ctrl = v4l_ioctl(f, VIDIOC_QUERYCTRL, &qc) == 0;
496 	sel.type = v4l_g_selection_type(f);
497 	sel.target = sel.type == V4L2_BUF_TYPE_VIDEO_CAPTURE ?
498 			V4L2_SEL_TGT_CROP : V4L2_SEL_TGT_COMPOSE;
499 	f->have_selection = v4l_ioctl(f, VIDIOC_G_SELECTION, &sel) != ENOTTY;
500 
501 	return f->fd;
502 }
503 
v4l_open(struct v4l_fd * f,const char * devname,bool non_blocking)504 static inline int v4l_open(struct v4l_fd *f, const char *devname, bool non_blocking)
505 {
506 	int fd = f->open(f, devname, O_RDWR | (non_blocking ? O_NONBLOCK : 0));
507 
508 	return v4l_s_fd(f, fd, devname, f->direct);
509 }
510 
v4l_subdev_s_fd(struct v4l_fd * f,int fd,const char * devname)511 static inline int v4l_subdev_s_fd(struct v4l_fd *f, int fd, const char *devname)
512 {
513 	struct v4l2_subdev_client_capability clientcap = {};
514 	struct v4l2_subdev_capability subdevcap = {};
515 	bool subdev_streams;
516 	bool client_streams;
517 	int ret;
518 
519 	if (f->fd >= 0)
520 		f->close(f);
521 
522 	f->fd = fd;
523 	f->direct = true;
524 	if (fd < 0)
525 		return fd;
526 
527 	if (f->devname != devname)
528 		strncpy(f->devname, devname, sizeof(f->devname));
529 	f->devname[sizeof(f->devname) - 1] = '\0';
530 
531 	memset(&f->cap, 0, sizeof(f->cap));
532 	f->is_subdev = true;
533 	f->is_media = false;
534 	f->type = 0;
535 	f->have_query_ext_ctrl = false;
536 	f->have_ext_ctrls = false;
537 	f->have_next_ctrl = false;
538 	f->have_selection = false;
539 
540 	ret = ioctl(f->fd, VIDIOC_SUBDEV_QUERYCAP, &subdevcap);
541 	subdev_streams = !ret && (subdevcap.capabilities & V4L2_SUBDEV_CAP_STREAMS);
542 
543 	clientcap.capabilities = V4L2_SUBDEV_CLIENT_CAP_STREAMS |
544 				 V4L2_SUBDEV_CLIENT_CAP_INTERVAL_USES_WHICH;
545 
546 	ret = ioctl(f->fd, VIDIOC_SUBDEV_S_CLIENT_CAP, &clientcap);
547 	client_streams = !ret && (clientcap.capabilities & V4L2_SUBDEV_CLIENT_CAP_STREAMS);
548 	f->ival_uses_which = !ret && (clientcap.capabilities & V4L2_SUBDEV_CLIENT_CAP_INTERVAL_USES_WHICH);
549 
550 	f->have_streams = subdev_streams && client_streams;
551 
552 	return f->fd;
553 }
554 
v4l_subdev_open(struct v4l_fd * f,const char * devname,bool non_blocking)555 static inline int v4l_subdev_open(struct v4l_fd *f, const char *devname, bool non_blocking)
556 {
557 	int fd = f->open(f, devname, O_RDWR | (non_blocking ? O_NONBLOCK : 0));
558 
559 	return v4l_subdev_s_fd(f, fd, devname);
560 }
561 
v4l_media_s_fd(struct v4l_fd * f,int fd,const char * devname)562 static inline int v4l_media_s_fd(struct v4l_fd *f, int fd, const char *devname)
563 {
564 	if (f->fd >= 0)
565 		f->close(f);
566 
567 	f->fd = fd;
568 	f->direct = true;
569 	if (fd < 0)
570 		return fd;
571 
572 	if (f->devname != devname)
573 		strncpy(f->devname, devname, sizeof(f->devname));
574 	f->devname[sizeof(f->devname) - 1] = '\0';
575 
576 	memset(&f->cap, 0, sizeof(f->cap));
577 	f->is_subdev = false;
578 	f->is_media = true;
579 	f->type = 0;
580 	f->have_query_ext_ctrl = false;
581 	f->have_ext_ctrls = false;
582 	f->have_next_ctrl = false;
583 	f->have_selection = false;
584 
585 	return f->fd;
586 }
587 
v4l_media_open(struct v4l_fd * f,const char * devname,bool non_blocking)588 static inline int v4l_media_open(struct v4l_fd *f, const char *devname, bool non_blocking)
589 {
590 	int fd = f->open(f, devname, O_RDWR | (non_blocking ? O_NONBLOCK : 0));
591 
592 	return v4l_media_s_fd(f, fd, devname);
593 }
594 
v4l_reopen(struct v4l_fd * f,bool non_blocking)595 static inline int v4l_reopen(struct v4l_fd *f, bool non_blocking)
596 {
597 	f->close(f);
598 	if (f->is_subdev)
599 		return v4l_subdev_open(f, f->devname, non_blocking);
600 	if (f->is_media)
601 		return v4l_media_open(f, f->devname, non_blocking);
602 	return v4l_open(f, f->devname, non_blocking);
603 }
604 
v4l_format_init(struct v4l2_format * fmt,unsigned type)605 static inline void v4l_format_init(struct v4l2_format *fmt, unsigned type)
606 {
607 	memset(fmt, 0, sizeof(*fmt));
608 	fmt->type = type;
609 	if (fmt->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
610 	    fmt->type == V4L2_BUF_TYPE_VIDEO_OUTPUT)
611 		fmt->fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
612 }
613 
v4l_format_s_width(struct v4l2_format * fmt,__u32 width)614 static inline void v4l_format_s_width(struct v4l2_format *fmt, __u32 width)
615 {
616 	switch (fmt->type) {
617 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
618 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
619 		fmt->fmt.pix.width = width;
620 		break;
621 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
622 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
623 		fmt->fmt.pix_mp.width = width;
624 		break;
625 	case V4L2_BUF_TYPE_VIDEO_OVERLAY:
626 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
627 		fmt->fmt.win.w.width = width;
628 		break;
629 	}
630 }
631 
v4l_format_g_width(const struct v4l2_format * fmt)632 static inline __u32 v4l_format_g_width(const struct v4l2_format *fmt)
633 {
634 	switch (fmt->type) {
635 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
636 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
637 		return fmt->fmt.pix.width;
638 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
639 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
640 		return fmt->fmt.pix_mp.width;
641 	case V4L2_BUF_TYPE_VIDEO_OVERLAY:
642 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
643 		return fmt->fmt.win.w.width;
644 	default:
645 		return 0;
646 	}
647 }
648 
v4l_format_s_height(struct v4l2_format * fmt,__u32 height)649 static inline void v4l_format_s_height(struct v4l2_format *fmt, __u32 height)
650 {
651 	switch (fmt->type) {
652 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
653 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
654 		fmt->fmt.pix.height = height;
655 		break;
656 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
657 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
658 		fmt->fmt.pix_mp.height = height;
659 		break;
660 	case V4L2_BUF_TYPE_VIDEO_OVERLAY:
661 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
662 		fmt->fmt.win.w.height = height;
663 		break;
664 	}
665 }
666 
v4l_format_g_height(const struct v4l2_format * fmt)667 static inline __u32 v4l_format_g_height(const struct v4l2_format *fmt)
668 {
669 	switch (fmt->type) {
670 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
671 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
672 		return fmt->fmt.pix.height;
673 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
674 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
675 		return fmt->fmt.pix_mp.height;
676 	case V4L2_BUF_TYPE_VIDEO_OVERLAY:
677 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
678 		return fmt->fmt.win.w.height;
679 	default:
680 		return 0;
681 	}
682 }
683 
v4l_format_s_pixelformat(struct v4l2_format * fmt,__u32 pixelformat)684 static inline void v4l_format_s_pixelformat(struct v4l2_format *fmt, __u32 pixelformat)
685 {
686 	switch (fmt->type) {
687 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
688 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
689 		fmt->fmt.pix.pixelformat = pixelformat;
690 		break;
691 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
692 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
693 		fmt->fmt.pix_mp.pixelformat = pixelformat;
694 		break;
695 	case V4L2_BUF_TYPE_SDR_CAPTURE:
696 	case V4L2_BUF_TYPE_SDR_OUTPUT:
697 		fmt->fmt.sdr.pixelformat = pixelformat;
698 		break;
699 	case V4L2_BUF_TYPE_VBI_CAPTURE:
700 	case V4L2_BUF_TYPE_VBI_OUTPUT:
701 		fmt->fmt.vbi.sample_format = pixelformat;
702 		break;
703 	case V4L2_BUF_TYPE_META_CAPTURE:
704 	case V4L2_BUF_TYPE_META_OUTPUT:
705 		fmt->fmt.meta.dataformat = pixelformat;
706 		break;
707 	}
708 }
709 
v4l_format_g_pixelformat(const struct v4l2_format * fmt)710 static inline __u32 v4l_format_g_pixelformat(const struct v4l2_format *fmt)
711 {
712 	switch (fmt->type) {
713 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
714 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
715 		return fmt->fmt.pix.pixelformat;
716 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
717 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
718 		return fmt->fmt.pix_mp.pixelformat;
719 	case V4L2_BUF_TYPE_SDR_CAPTURE:
720 	case V4L2_BUF_TYPE_SDR_OUTPUT:
721 		return fmt->fmt.sdr.pixelformat;
722 	case V4L2_BUF_TYPE_VBI_CAPTURE:
723 	case V4L2_BUF_TYPE_VBI_OUTPUT:
724 		return fmt->fmt.vbi.sample_format;
725 	case V4L2_BUF_TYPE_META_CAPTURE:
726 	case V4L2_BUF_TYPE_META_OUTPUT:
727 		return fmt->fmt.meta.dataformat;
728 	default:
729 		return 0;
730 	}
731 }
732 
v4l_format_s_field(struct v4l2_format * fmt,unsigned field)733 static inline void v4l_format_s_field(struct v4l2_format *fmt, unsigned field)
734 {
735 	switch (fmt->type) {
736 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
737 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
738 		fmt->fmt.pix.field = field;
739 		break;
740 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
741 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
742 		fmt->fmt.pix_mp.field = field;
743 		break;
744 	case V4L2_BUF_TYPE_VIDEO_OVERLAY:
745 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
746 		fmt->fmt.win.field = field;
747 		break;
748 	}
749 }
750 
v4l_format_g_field(const struct v4l2_format * fmt)751 static inline unsigned v4l_format_g_field(const struct v4l2_format *fmt)
752 {
753 	switch (fmt->type) {
754 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
755 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
756 		return fmt->fmt.pix.field;
757 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
758 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
759 		return fmt->fmt.pix_mp.field;
760 	case V4L2_BUF_TYPE_VIDEO_OVERLAY:
761 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY:
762 		return fmt->fmt.win.field;
763 	default:
764 		return V4L2_FIELD_NONE;
765 	}
766 }
767 
v4l_format_g_first_field(const struct v4l2_format * fmt,v4l2_std_id std)768 static inline unsigned v4l_format_g_first_field(const struct v4l2_format *fmt,
769 						v4l2_std_id std)
770 {
771 	unsigned field = v4l_format_g_field(fmt);
772 
773 	if (field != V4L2_FIELD_ALTERNATE)
774 		return field;
775 	if (std & V4L2_STD_525_60)
776 		return V4L2_FIELD_BOTTOM;
777 	return V4L2_FIELD_TOP;
778 }
779 
v4l_format_g_flds_per_frm(const struct v4l2_format * fmt)780 static inline unsigned v4l_format_g_flds_per_frm(const struct v4l2_format *fmt)
781 {
782 	unsigned field = v4l_format_g_field(fmt);
783 
784 	if (field == V4L2_FIELD_ALTERNATE ||
785 	    field == V4L2_FIELD_TOP || field == V4L2_FIELD_BOTTOM)
786 		return 2;
787 	return 1;
788 }
789 
v4l_format_s_frame_height(struct v4l2_format * fmt,__u32 height)790 static inline void v4l_format_s_frame_height(struct v4l2_format *fmt, __u32 height)
791 {
792 	if (V4L2_FIELD_HAS_T_OR_B(v4l_format_g_field(fmt)))
793 		height /= 2;
794 	v4l_format_s_height(fmt, height);
795 }
796 
v4l_format_g_frame_height(const struct v4l2_format * fmt)797 static inline __u32 v4l_format_g_frame_height(const struct v4l2_format *fmt)
798 {
799 	__u32 height = v4l_format_g_height(fmt);
800 
801 	if (V4L2_FIELD_HAS_T_OR_B(v4l_format_g_field(fmt)))
802 		return height * 2;
803 	return height;
804 }
805 
v4l_format_s_colorspace(struct v4l2_format * fmt,unsigned colorspace)806 static inline void v4l_format_s_colorspace(struct v4l2_format *fmt,
807 					       unsigned colorspace)
808 {
809 	switch (fmt->type) {
810 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
811 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
812 		fmt->fmt.pix.colorspace = colorspace;
813 		break;
814 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
815 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
816 		fmt->fmt.pix_mp.colorspace = colorspace;
817 		break;
818 	}
819 }
820 
821 static inline unsigned
v4l_format_g_colorspace(const struct v4l2_format * fmt)822 v4l_format_g_colorspace(const struct v4l2_format *fmt)
823 {
824 	switch (fmt->type) {
825 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
826 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
827 		return fmt->fmt.pix.colorspace;
828 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
829 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
830 		return fmt->fmt.pix_mp.colorspace;
831 	default:
832 		return 0;
833 	}
834 }
835 
v4l_format_s_xfer_func(struct v4l2_format * fmt,unsigned xfer_func)836 static inline void v4l_format_s_xfer_func(struct v4l2_format *fmt,
837 					       unsigned xfer_func)
838 {
839 	switch (fmt->type) {
840 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
841 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
842 		fmt->fmt.pix.xfer_func = xfer_func;
843 		break;
844 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
845 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
846 		fmt->fmt.pix_mp.xfer_func = xfer_func;
847 		break;
848 	}
849 }
850 
851 static inline unsigned
v4l_format_g_xfer_func(const struct v4l2_format * fmt)852 v4l_format_g_xfer_func(const struct v4l2_format *fmt)
853 {
854 	switch (fmt->type) {
855 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
856 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
857 		return fmt->fmt.pix.xfer_func;
858 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
859 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
860 		return fmt->fmt.pix_mp.xfer_func;
861 	default:
862 		return 0;
863 	}
864 }
865 
v4l_format_s_ycbcr_enc(struct v4l2_format * fmt,unsigned ycbcr_enc)866 static inline void v4l_format_s_ycbcr_enc(struct v4l2_format *fmt,
867 					       unsigned ycbcr_enc)
868 {
869 	switch (fmt->type) {
870 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
871 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
872 		fmt->fmt.pix.ycbcr_enc = ycbcr_enc;
873 		break;
874 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
875 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
876 		fmt->fmt.pix_mp.ycbcr_enc = ycbcr_enc;
877 		break;
878 	}
879 }
880 
881 static inline unsigned
v4l_format_g_ycbcr_enc(const struct v4l2_format * fmt)882 v4l_format_g_ycbcr_enc(const struct v4l2_format *fmt)
883 {
884 	switch (fmt->type) {
885 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
886 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
887 		return fmt->fmt.pix.ycbcr_enc;
888 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
889 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
890 		return fmt->fmt.pix_mp.ycbcr_enc;
891 	default:
892 		return 0;
893 	}
894 }
895 
896 static inline unsigned
v4l_format_g_hsv_enc(const struct v4l2_format * fmt)897 v4l_format_g_hsv_enc(const struct v4l2_format *fmt)
898 {
899 	unsigned hsv_enc = v4l_format_g_ycbcr_enc(fmt);
900 	if (hsv_enc == V4L2_HSV_ENC_180)
901 		return V4L2_HSV_ENC_180;
902 
903 	return hsv_enc;
904 }
905 
v4l_format_s_quantization(struct v4l2_format * fmt,unsigned quantization)906 static inline void v4l_format_s_quantization(struct v4l2_format *fmt,
907 					       unsigned quantization)
908 {
909 	switch (fmt->type) {
910 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
911 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
912 		fmt->fmt.pix.quantization = quantization;
913 		break;
914 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
915 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
916 		fmt->fmt.pix_mp.quantization = quantization;
917 		break;
918 	}
919 }
920 
921 static inline unsigned
v4l_format_g_quantization(const struct v4l2_format * fmt)922 v4l_format_g_quantization(const struct v4l2_format *fmt)
923 {
924 	switch (fmt->type) {
925 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
926 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
927 		return fmt->fmt.pix.quantization;
928 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
929 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
930 		return fmt->fmt.pix_mp.quantization;
931 	default:
932 		return 0;
933 	}
934 }
935 
v4l_format_s_flags(struct v4l2_format * fmt,unsigned flags)936 static inline void v4l_format_s_flags(struct v4l2_format *fmt,
937 					       unsigned flags)
938 {
939 	switch (fmt->type) {
940 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
941 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
942 		fmt->fmt.pix.flags = flags;
943 		break;
944 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
945 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
946 		fmt->fmt.pix_mp.flags = flags;
947 		break;
948 	}
949 }
950 
951 static inline unsigned
v4l_format_g_flags(const struct v4l2_format * fmt)952 v4l_format_g_flags(const struct v4l2_format *fmt)
953 {
954 	switch (fmt->type) {
955 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
956 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
957 		return fmt->fmt.pix.flags;
958 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
959 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
960 		return fmt->fmt.pix_mp.flags;
961 	default:
962 		return 0;
963 	}
964 }
965 
v4l_format_s_num_planes(struct v4l2_format * fmt,__u8 num_planes)966 static inline void v4l_format_s_num_planes(struct v4l2_format *fmt, __u8 num_planes)
967 {
968 	switch (fmt->type) {
969 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
970 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
971 		fmt->fmt.pix_mp.num_planes = num_planes;
972 		break;
973 	}
974 }
975 
976 static inline __u8
v4l_format_g_num_planes(const struct v4l2_format * fmt)977 v4l_format_g_num_planes(const struct v4l2_format *fmt)
978 {
979 	switch (fmt->type) {
980 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
981 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
982 		return fmt->fmt.pix_mp.num_planes;
983 	default:
984 		return 1;
985 	}
986 }
987 
v4l_format_s_bytesperline(struct v4l2_format * fmt,unsigned plane,__u32 bytesperline)988 static inline void v4l_format_s_bytesperline(struct v4l2_format *fmt,
989 					     unsigned plane, __u32 bytesperline)
990 {
991 	switch (fmt->type) {
992 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
993 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
994 		if (plane == 0)
995 			fmt->fmt.pix.bytesperline = bytesperline;
996 		break;
997 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
998 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
999 		fmt->fmt.pix_mp.plane_fmt[plane].bytesperline = bytesperline;
1000 		break;
1001 	case V4L2_BUF_TYPE_VBI_CAPTURE:
1002 	case V4L2_BUF_TYPE_VBI_OUTPUT:
1003 		/* This assumes V4L2_PIX_FMT_GREY which is always the case */
1004 		if (plane == 0)
1005 			fmt->fmt.vbi.samples_per_line = bytesperline;
1006 		break;
1007 	}
1008 }
1009 
1010 static inline __u32
v4l_format_g_bytesperline(const struct v4l2_format * fmt,unsigned plane)1011 v4l_format_g_bytesperline(const struct v4l2_format *fmt, unsigned plane)
1012 {
1013 	switch (fmt->type) {
1014 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1015 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1016 		return plane ? 0 : fmt->fmt.pix.bytesperline;
1017 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1018 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1019 		return fmt->fmt.pix_mp.plane_fmt[plane].bytesperline;
1020 	case V4L2_BUF_TYPE_VBI_CAPTURE:
1021 	case V4L2_BUF_TYPE_VBI_OUTPUT:
1022 		/* This assumes V4L2_PIX_FMT_GREY which is always the case */
1023 		return plane ? 0 : fmt->fmt.vbi.samples_per_line;
1024 	default:
1025 		return 0;
1026 	}
1027 }
1028 
v4l_format_s_sizeimage(struct v4l2_format * fmt,unsigned plane,__u32 sizeimage)1029 static inline void v4l_format_s_sizeimage(struct v4l2_format *fmt,
1030 					  unsigned plane, __u32 sizeimage)
1031 {
1032 	switch (fmt->type) {
1033 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1034 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1035 		if (plane == 0)
1036 			fmt->fmt.pix.sizeimage = sizeimage;
1037 		break;
1038 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1039 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1040 		fmt->fmt.pix_mp.plane_fmt[plane].sizeimage = sizeimage;
1041 		break;
1042 	case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:
1043 	case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:
1044 		if (plane == 0)
1045 			fmt->fmt.sliced.io_size = sizeimage;
1046 		break;
1047 	case V4L2_BUF_TYPE_SDR_CAPTURE:
1048 	case V4L2_BUF_TYPE_SDR_OUTPUT:
1049 		if (plane == 0)
1050 			fmt->fmt.sdr.buffersize = sizeimage;
1051 		break;
1052 	case V4L2_BUF_TYPE_META_CAPTURE:
1053 	case V4L2_BUF_TYPE_META_OUTPUT:
1054 		if (plane == 0)
1055 			fmt->fmt.meta.buffersize = sizeimage;
1056 		break;
1057 	}
1058 }
1059 
1060 static inline __u32
v4l_format_g_sizeimage(const struct v4l2_format * fmt,unsigned plane)1061 v4l_format_g_sizeimage(const struct v4l2_format *fmt, unsigned plane)
1062 {
1063 	switch (fmt->type) {
1064 	case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1065 	case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1066 		return plane ? 0 : fmt->fmt.pix.sizeimage;
1067 	case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1068 	case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1069 		return fmt->fmt.pix_mp.plane_fmt[plane].sizeimage;
1070 	case V4L2_BUF_TYPE_VBI_CAPTURE:
1071 	case V4L2_BUF_TYPE_VBI_OUTPUT:
1072 		/* This assumes V4L2_PIX_FMT_GREY which is always the case */
1073 		return plane ? 0 : fmt->fmt.vbi.samples_per_line *
1074 			(fmt->fmt.vbi.count[0] + fmt->fmt.vbi.count[1]);
1075 	case V4L2_BUF_TYPE_SLICED_VBI_CAPTURE:
1076 	case V4L2_BUF_TYPE_SLICED_VBI_OUTPUT:
1077 		return plane ? 0 : fmt->fmt.sliced.io_size;
1078 	case V4L2_BUF_TYPE_SDR_CAPTURE:
1079 	case V4L2_BUF_TYPE_SDR_OUTPUT:
1080 		return plane ? 0 : fmt->fmt.sdr.buffersize;
1081 	case V4L2_BUF_TYPE_META_CAPTURE:
1082 	case V4L2_BUF_TYPE_META_OUTPUT:
1083 		return plane ? 0 : fmt->fmt.meta.buffersize;
1084 	default:
1085 		return 0;
1086 	}
1087 }
1088 
v4l_g_fmt(struct v4l_fd * f,struct v4l2_format * fmt,unsigned type)1089 static inline int v4l_g_fmt(struct v4l_fd *f, struct v4l2_format *fmt, unsigned type)
1090 {
1091 	v4l_format_init(fmt, type ? type : f->type);
1092 	return v4l_ioctl(f, VIDIOC_G_FMT, fmt);
1093 }
1094 
v4l_try_fmt(struct v4l_fd * f,struct v4l2_format * fmt,bool zero_bpl)1095 static inline int v4l_try_fmt(struct v4l_fd *f, struct v4l2_format *fmt, bool zero_bpl)
1096 {
1097 	/*
1098 	 * Some drivers allow applications to set bytesperline to a larger value.
1099 	 * In most cases you just want the driver to fill in the bytesperline field
1100 	 * and so you have to zero bytesperline first.
1101 	 */
1102 	if (zero_bpl) {
1103 		__u8 p;
1104 
1105 		for (p = 0; p < v4l_format_g_num_planes(fmt); p++)
1106 			v4l_format_s_bytesperline(fmt, p, 0);
1107 	}
1108 	return v4l_ioctl(f, VIDIOC_TRY_FMT, fmt);
1109 }
1110 
v4l_s_fmt(struct v4l_fd * f,struct v4l2_format * fmt,bool zero_bpl)1111 static inline int v4l_s_fmt(struct v4l_fd *f, struct v4l2_format *fmt, bool zero_bpl)
1112 {
1113 	/*
1114 	 * Some drivers allow applications to set bytesperline to a larger value.
1115 	 * In most cases you just want the driver to fill in the bytesperline field
1116 	 * and so you have to zero bytesperline first.
1117 	 */
1118 	if (zero_bpl) {
1119 		__u8 p;
1120 
1121 		for (p = 0; p < v4l_format_g_num_planes(fmt); p++)
1122 			v4l_format_s_bytesperline(fmt, p, 0);
1123 	}
1124 	return v4l_ioctl(f, VIDIOC_S_FMT, fmt);
1125 }
1126 
1127 struct v4l_buffer {
1128 	struct v4l2_plane planes[VIDEO_MAX_PLANES];
1129 	struct v4l2_buffer buf;
1130 };
1131 
v4l_buffer_init(struct v4l_buffer * buf,unsigned type,unsigned memory,unsigned index)1132 static inline void v4l_buffer_init(struct v4l_buffer *buf,
1133 		unsigned type, unsigned memory, unsigned index)
1134 {
1135 	memset(buf, 0, sizeof(*buf));
1136 	buf->buf.type = type;
1137 	buf->buf.memory = memory;
1138 	buf->buf.index = index;
1139 	if (V4L2_TYPE_IS_MULTIPLANAR(type)) {
1140 		buf->buf.m.planes = buf->planes;
1141 		buf->buf.length = VIDEO_MAX_PLANES;
1142 	}
1143 }
1144 
v4l_type_is_planar(unsigned type)1145 static inline bool v4l_type_is_planar(unsigned type)
1146 {
1147        return V4L2_TYPE_IS_MULTIPLANAR(type);
1148 }
1149 
v4l_type_is_output(unsigned type)1150 static inline bool v4l_type_is_output(unsigned type)
1151 {
1152        return V4L2_TYPE_IS_OUTPUT(type);
1153 }
1154 
v4l_type_is_capture(unsigned type)1155 static inline bool v4l_type_is_capture(unsigned type)
1156 {
1157        return V4L2_TYPE_IS_CAPTURE(type);
1158 }
1159 
v4l_type_is_video(unsigned type)1160 static inline bool v4l_type_is_video(unsigned type)
1161 {
1162        switch (type) {
1163        case V4L2_BUF_TYPE_VIDEO_CAPTURE:
1164        case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
1165        case V4L2_BUF_TYPE_VIDEO_OUTPUT:
1166        case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
1167                return true;
1168        default:
1169                return false;
1170        }
1171 }
1172 
v4l_type_is_raw_vbi(unsigned type)1173 static inline bool v4l_type_is_raw_vbi(unsigned type)
1174 {
1175        return type == V4L2_BUF_TYPE_VBI_CAPTURE ||
1176               type == V4L2_BUF_TYPE_VBI_OUTPUT;
1177 }
1178 
v4l_type_is_sliced_vbi(unsigned type)1179 static inline bool v4l_type_is_sliced_vbi(unsigned type)
1180 {
1181        return type == V4L2_BUF_TYPE_SLICED_VBI_CAPTURE ||
1182               type == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT;
1183 }
1184 
v4l_type_is_vbi(unsigned type)1185 static inline bool v4l_type_is_vbi(unsigned type)
1186 {
1187        return v4l_type_is_raw_vbi(type) || v4l_type_is_sliced_vbi(type);
1188 }
1189 
v4l_type_is_overlay(unsigned type)1190 static inline bool v4l_type_is_overlay(unsigned type)
1191 {
1192        return type == V4L2_BUF_TYPE_VIDEO_OVERLAY ||
1193               type == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY;
1194 }
1195 
v4l_type_is_sdr(unsigned type)1196 static inline bool v4l_type_is_sdr(unsigned type)
1197 {
1198        return type == V4L2_BUF_TYPE_SDR_CAPTURE ||
1199 	      type == V4L2_BUF_TYPE_SDR_OUTPUT;
1200 }
1201 
v4l_type_is_meta(unsigned type)1202 static inline bool v4l_type_is_meta(unsigned type)
1203 {
1204 	return type == V4L2_BUF_TYPE_META_CAPTURE ||
1205 	       type == V4L2_BUF_TYPE_META_OUTPUT;
1206 }
1207 
v4l_type_invert(unsigned type)1208 static inline unsigned v4l_type_invert(unsigned type)
1209 {
1210 	if (v4l_type_is_planar(type))
1211 		return v4l_type_is_output(type) ?
1212 			V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE :
1213 			V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1214 	return v4l_type_is_output(type) ?
1215 		V4L2_BUF_TYPE_VIDEO_CAPTURE :
1216 		V4L2_BUF_TYPE_VIDEO_OUTPUT;
1217 }
1218 
v4l_buffer_g_num_planes(const struct v4l_buffer * buf)1219 static inline unsigned v4l_buffer_g_num_planes(const struct v4l_buffer *buf)
1220 {
1221 	if (v4l_type_is_planar(buf->buf.type))
1222 		return buf->buf.length;
1223 	return 1;
1224 }
1225 
v4l_buffer_g_index(const struct v4l_buffer * buf)1226 static inline __u32 v4l_buffer_g_index(const struct v4l_buffer *buf)
1227 {
1228 	return buf->buf.index;
1229 }
1230 
v4l_buffer_s_index(struct v4l_buffer * buf,__u32 index)1231 static inline void v4l_buffer_s_index(struct v4l_buffer *buf, __u32 index)
1232 {
1233 	buf->buf.index = index;
1234 }
1235 
v4l_buffer_g_request_fd(const struct v4l_buffer * buf)1236 static inline __s32 v4l_buffer_g_request_fd(const struct v4l_buffer *buf)
1237 {
1238 	return buf->buf.request_fd;
1239 }
1240 
v4l_buffer_s_request_fd(struct v4l_buffer * buf,__s32 request_fd)1241 static inline void v4l_buffer_s_request_fd(struct v4l_buffer *buf, __s32 request_fd)
1242 {
1243 	buf->buf.request_fd = request_fd;
1244 }
1245 
1246 
v4l_buffer_g_type(const struct v4l_buffer * buf)1247 static inline unsigned v4l_buffer_g_type(const struct v4l_buffer *buf)
1248 {
1249 	return buf->buf.type;
1250 }
1251 
v4l_buffer_g_memory(const struct v4l_buffer * buf)1252 static inline unsigned v4l_buffer_g_memory(const struct v4l_buffer *buf)
1253 {
1254 	return buf->buf.memory;
1255 }
1256 
v4l_buffer_g_flags(const struct v4l_buffer * buf)1257 static inline __u32 v4l_buffer_g_flags(const struct v4l_buffer *buf)
1258 {
1259 	return buf->buf.flags;
1260 }
1261 
v4l_buffer_s_flags(struct v4l_buffer * buf,__u32 flags)1262 static inline void v4l_buffer_s_flags(struct v4l_buffer *buf, __u32 flags)
1263 {
1264 	buf->buf.flags = flags;
1265 }
1266 
v4l_buffer_or_flags(struct v4l_buffer * buf,__u32 flags)1267 static inline void v4l_buffer_or_flags(struct v4l_buffer *buf, __u32 flags)
1268 {
1269 	buf->buf.flags |= flags;
1270 }
1271 
v4l_buffer_g_field(const struct v4l_buffer * buf)1272 static inline unsigned v4l_buffer_g_field(const struct v4l_buffer *buf)
1273 {
1274 	return buf->buf.field;
1275 }
1276 
v4l_buffer_s_field(struct v4l_buffer * buf,unsigned field)1277 static inline void v4l_buffer_s_field(struct v4l_buffer *buf, unsigned field)
1278 {
1279 	buf->buf.field = field;
1280 }
1281 
v4l_buffer_g_sequence(const struct v4l_buffer * buf)1282 static inline __u32 v4l_buffer_g_sequence(const struct v4l_buffer *buf)
1283 {
1284 	return buf->buf.sequence;
1285 }
1286 
v4l_buffer_g_timestamp(const struct v4l_buffer * buf)1287 static inline const struct timeval *v4l_buffer_g_timestamp(const struct v4l_buffer *buf)
1288 {
1289 	return &buf->buf.timestamp;
1290 }
1291 
v4l_buffer_s_timestamp(struct v4l_buffer * buf,const struct timeval * tv)1292 static inline void v4l_buffer_s_timestamp(struct v4l_buffer *buf, const struct timeval *tv)
1293 {
1294 	buf->buf.timestamp = *tv;
1295 }
1296 
v4l_buffer_s_timestamp_ts(struct v4l_buffer * buf,const struct timespec * ts)1297 static inline void v4l_buffer_s_timestamp_ts(struct v4l_buffer *buf, const struct timespec *ts)
1298 {
1299 	buf->buf.timestamp.tv_sec = ts->tv_sec;
1300 	buf->buf.timestamp.tv_usec = ts->tv_nsec / 1000;
1301 }
1302 
v4l_buffer_s_timestamp_clock(struct v4l_buffer * buf)1303 static inline void v4l_buffer_s_timestamp_clock(struct v4l_buffer *buf)
1304 {
1305 	struct timespec ts;
1306 
1307 	clock_gettime(CLOCK_MONOTONIC, &ts);
1308 	v4l_buffer_s_timestamp_ts(buf, &ts);
1309 }
1310 
v4l_buffer_g_timecode(const struct v4l_buffer * buf)1311 static inline const struct v4l2_timecode *v4l_buffer_g_timecode(const struct v4l_buffer *buf)
1312 {
1313 	return &buf->buf.timecode;
1314 }
1315 
v4l_buffer_s_timecode(struct v4l_buffer * buf,const struct v4l2_timecode * tc)1316 static inline void v4l_buffer_s_timecode(struct v4l_buffer *buf, const struct v4l2_timecode *tc)
1317 {
1318 	buf->buf.timecode = *tc;
1319 }
1320 
v4l_buffer_g_timestamp_type(const struct v4l_buffer * buf)1321 static inline __u32 v4l_buffer_g_timestamp_type(const struct v4l_buffer *buf)
1322 {
1323 	return buf->buf.flags & V4L2_BUF_FLAG_TIMESTAMP_MASK;
1324 }
1325 
v4l_buffer_is_copy(const struct v4l_buffer * buf)1326 static inline bool v4l_buffer_is_copy(const struct v4l_buffer *buf)
1327 {
1328 	return v4l_buffer_g_timestamp_type(buf) == V4L2_BUF_FLAG_TIMESTAMP_COPY;
1329 }
1330 
v4l_buffer_g_timestamp_src(const struct v4l_buffer * buf)1331 static inline __u32 v4l_buffer_g_timestamp_src(const struct v4l_buffer *buf)
1332 {
1333 	return buf->buf.flags & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1334 }
1335 
v4l_buffer_s_timestamp_src(struct v4l_buffer * buf,__u32 src)1336 static inline void v4l_buffer_s_timestamp_src(struct v4l_buffer *buf, __u32 src)
1337 {
1338 	buf->buf.flags &= ~V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1339 	buf->buf.flags |= src & V4L2_BUF_FLAG_TSTAMP_SRC_MASK;
1340 }
1341 
v4l_buffer_g_length(const struct v4l_buffer * buf,unsigned plane)1342 static inline unsigned v4l_buffer_g_length(const struct v4l_buffer *buf, unsigned plane)
1343 {
1344 	if (v4l_type_is_planar(buf->buf.type))
1345 		return buf->planes[plane].length;
1346 	return plane ? 0 : buf->buf.length;
1347 }
1348 
v4l_buffer_s_length(struct v4l_buffer * buf,unsigned plane,unsigned length)1349 static inline void v4l_buffer_s_length(struct v4l_buffer *buf, unsigned plane, unsigned length)
1350 {
1351 	if (v4l_type_is_planar(buf->buf.type))
1352 		buf->planes[plane].length = length;
1353 	else if (plane == 0)
1354 		buf->buf.length = length;
1355 }
1356 
v4l_buffer_g_bytesused(const struct v4l_buffer * buf,unsigned plane)1357 static inline unsigned v4l_buffer_g_bytesused(const struct v4l_buffer *buf, unsigned plane)
1358 {
1359 	if (v4l_type_is_planar(buf->buf.type))
1360 		return buf->planes[plane].bytesused;
1361 	return plane ? 0 : buf->buf.bytesused;
1362 }
1363 
v4l_buffer_s_bytesused(struct v4l_buffer * buf,unsigned plane,__u32 bytesused)1364 static inline void v4l_buffer_s_bytesused(struct v4l_buffer *buf, unsigned plane, __u32 bytesused)
1365 {
1366 	if (v4l_type_is_planar(buf->buf.type))
1367 		buf->planes[plane].bytesused = bytesused;
1368 	else if (plane == 0)
1369 		buf->buf.bytesused = bytesused;
1370 }
1371 
v4l_buffer_g_data_offset(const struct v4l_buffer * buf,unsigned plane)1372 static inline unsigned v4l_buffer_g_data_offset(const struct v4l_buffer *buf, unsigned plane)
1373 {
1374 	if (v4l_type_is_planar(buf->buf.type))
1375 		return buf->planes[plane].data_offset;
1376 	return 0;
1377 }
1378 
v4l_buffer_s_data_offset(struct v4l_buffer * buf,unsigned plane,__u32 data_offset)1379 static inline void v4l_buffer_s_data_offset(struct v4l_buffer *buf, unsigned plane, __u32 data_offset)
1380 {
1381 	if (v4l_type_is_planar(buf->buf.type))
1382 		buf->planes[plane].data_offset = data_offset;
1383 }
1384 
v4l_buffer_g_mem_offset(const struct v4l_buffer * buf,unsigned plane)1385 static inline __u32 v4l_buffer_g_mem_offset(const struct v4l_buffer *buf, unsigned plane)
1386 {
1387 	if (v4l_type_is_planar(buf->buf.type))
1388 		return buf->planes[plane].m.mem_offset;
1389 	return plane ? 0 : buf->buf.m.offset;
1390 }
1391 
v4l_buffer_s_userptr(struct v4l_buffer * buf,unsigned plane,void * userptr)1392 static inline void v4l_buffer_s_userptr(struct v4l_buffer *buf, unsigned plane, void *userptr)
1393 {
1394 	if (v4l_type_is_planar(buf->buf.type))
1395 		buf->planes[plane].m.userptr = (unsigned long)userptr;
1396 	else if (plane == 0)
1397 		buf->buf.m.userptr = (unsigned long)userptr;
1398 }
1399 
v4l_buffer_g_userptr(const struct v4l_buffer * buf,unsigned plane)1400 static inline void *v4l_buffer_g_userptr(const struct v4l_buffer *buf, unsigned plane)
1401 {
1402 	if (v4l_type_is_planar(buf->buf.type))
1403 		return (void *)buf->planes[plane].m.userptr;
1404 	return plane ? NULL : (void *)buf->buf.m.userptr;
1405 }
1406 
v4l_buffer_s_fd(struct v4l_buffer * buf,unsigned plane,int fd)1407 static inline void v4l_buffer_s_fd(struct v4l_buffer *buf, unsigned plane, int fd)
1408 {
1409 	if (v4l_type_is_planar(buf->buf.type))
1410 		buf->planes[plane].m.fd = fd;
1411 	else if (plane == 0)
1412 		buf->buf.m.fd = fd;
1413 }
1414 
v4l_buffer_g_fd(const struct v4l_buffer * buf,unsigned plane)1415 static inline int v4l_buffer_g_fd(const struct v4l_buffer *buf, unsigned plane)
1416 {
1417 	if (v4l_type_is_planar(buf->buf.type))
1418 		return buf->planes[plane].m.fd;
1419 	return plane ? -1 : buf->buf.m.fd;
1420 }
1421 
v4l_buffer_prepare_buf(struct v4l_fd * f,struct v4l_buffer * buf)1422 static inline int v4l_buffer_prepare_buf(struct v4l_fd *f, struct v4l_buffer *buf)
1423 {
1424 	return v4l_ioctl(f, VIDIOC_PREPARE_BUF, &buf->buf);
1425 }
1426 
v4l_buffer_qbuf(struct v4l_fd * f,struct v4l_buffer * buf)1427 static inline int v4l_buffer_qbuf(struct v4l_fd *f, struct v4l_buffer *buf)
1428 {
1429 	return v4l_ioctl(f, VIDIOC_QBUF, &buf->buf);
1430 }
1431 
v4l_buffer_dqbuf(struct v4l_fd * f,struct v4l_buffer * buf)1432 static inline int v4l_buffer_dqbuf(struct v4l_fd *f, struct v4l_buffer *buf)
1433 {
1434 	return v4l_ioctl(f, VIDIOC_DQBUF, &buf->buf);
1435 }
1436 
v4l_buffer_querybuf(struct v4l_fd * f,struct v4l_buffer * buf,unsigned index)1437 static inline int v4l_buffer_querybuf(struct v4l_fd *f, struct v4l_buffer *buf, unsigned index)
1438 {
1439 	v4l_buffer_s_index(buf, index);
1440 	return v4l_ioctl(f, VIDIOC_QUERYBUF, &buf->buf);
1441 }
1442 
1443 struct v4l_queue_buf_info {
1444 	__u32 mem_offsets[VIDEO_MAX_PLANES];
1445 	void *mmappings[VIDEO_MAX_PLANES];
1446 	unsigned long userptrs[VIDEO_MAX_PLANES];
1447 	int fds[VIDEO_MAX_PLANES];
1448 };
1449 
1450 struct v4l_queue {
1451 	unsigned type;
1452 	unsigned memory;
1453 	unsigned buffers;
1454 	unsigned mappings;
1455 	unsigned num_planes;
1456 	unsigned capabilities;
1457 	unsigned max_num_buffers;
1458 
1459 	__u32 lengths[VIDEO_MAX_PLANES];
1460 	struct v4l_queue_buf_info _bufs_info[VIDEO_MAX_FRAME];
1461 	struct v4l_queue_buf_info *bufs_info;
1462 };
1463 
v4l_queue_init(struct v4l_queue * q,unsigned type,unsigned memory)1464 static inline void v4l_queue_init(struct v4l_queue *q,
1465 		unsigned type, unsigned memory)
1466 {
1467 	unsigned i, p;
1468 
1469 	memset(q, 0, sizeof(*q));
1470 	q->type = type;
1471 	q->memory = memory;
1472 	q->max_num_buffers = VIDEO_MAX_FRAME;
1473 	q->bufs_info = q->_bufs_info;
1474 
1475 	for (i = 0; i < VIDEO_MAX_FRAME; i++)
1476 		for (p = 0; p < VIDEO_MAX_PLANES; p++)
1477 			q->bufs_info[i].fds[p] = -1;
1478 }
1479 
v4l_queue_alloc_bufs_info(struct v4l_queue * q)1480 static inline int v4l_queue_alloc_bufs_info(struct v4l_queue *q)
1481 {
1482 	struct v4l_queue_buf_info *bi;
1483 	unsigned i, p;
1484 
1485 	if (q->max_num_buffers <= VIDEO_MAX_FRAME)
1486 		return 0;
1487 	bi = (struct v4l_queue_buf_info *)calloc(q->max_num_buffers, sizeof(*bi));
1488 	if (!bi)
1489 		return -ENOMEM;
1490 	for (i = 0; i < VIDEO_MAX_FRAME; i++)
1491 		bi[i] = q->bufs_info[i];
1492 	for (i = VIDEO_MAX_FRAME; i < q->max_num_buffers; i++)
1493 		for (p = 0; p < VIDEO_MAX_PLANES; p++)
1494 			bi[i].fds[p] = -1;
1495 	q->bufs_info = bi;
1496 	return 0;
1497 }
1498 
v4l_queue_free_bufs_info(struct v4l_queue * q)1499 static inline void v4l_queue_free_bufs_info(struct v4l_queue *q)
1500 {
1501 	if (q->bufs_info != q->_bufs_info)
1502 		free(q->bufs_info);
1503 	q->bufs_info = q->_bufs_info;
1504 }
1505 
v4l_queue_g_type(const struct v4l_queue * q)1506 static inline unsigned v4l_queue_g_type(const struct v4l_queue *q) { return q->type; }
v4l_queue_g_memory(const struct v4l_queue * q)1507 static inline unsigned v4l_queue_g_memory(const struct v4l_queue *q) { return q->memory; }
v4l_queue_g_buffers(const struct v4l_queue * q)1508 static inline unsigned v4l_queue_g_buffers(const struct v4l_queue *q) { return q->buffers; }
v4l_queue_g_max_num_buffers(const struct v4l_queue * q)1509 static inline unsigned v4l_queue_g_max_num_buffers(const struct v4l_queue *q) { return q->max_num_buffers; }
v4l_queue_g_mappings(const struct v4l_queue * q)1510 static inline unsigned v4l_queue_g_mappings(const struct v4l_queue *q) { return q->mappings; }
v4l_queue_g_num_planes(const struct v4l_queue * q)1511 static inline unsigned v4l_queue_g_num_planes(const struct v4l_queue *q) { return q->num_planes; }
v4l_queue_g_capabilities(const struct v4l_queue * q)1512 static inline unsigned v4l_queue_g_capabilities(const struct v4l_queue *q) { return q->capabilities; }
1513 
v4l_queue_g_length(const struct v4l_queue * q,unsigned plane)1514 static inline __u32 v4l_queue_g_length(const struct v4l_queue *q, unsigned plane)
1515 {
1516 	return q->lengths[plane];
1517 }
1518 
v4l_queue_g_mem_offset(const struct v4l_queue * q,unsigned index,unsigned plane)1519 static inline __u32 v4l_queue_g_mem_offset(const struct v4l_queue *q, unsigned index, unsigned plane)
1520 {
1521 	return q->bufs_info[index].mem_offsets[plane];
1522 }
1523 
v4l_queue_s_mmapping(struct v4l_queue * q,unsigned index,unsigned plane,void * m)1524 static inline void v4l_queue_s_mmapping(struct v4l_queue *q, unsigned index, unsigned plane, void *m)
1525 {
1526 	q->bufs_info[index].mmappings[plane] = m;
1527 }
1528 
v4l_queue_g_mmapping(const struct v4l_queue * q,unsigned index,unsigned plane)1529 static inline void *v4l_queue_g_mmapping(const struct v4l_queue *q, unsigned index, unsigned plane)
1530 {
1531 	if (index >= v4l_queue_g_mappings(q) || plane >= v4l_queue_g_num_planes(q))
1532 		return NULL;
1533 	return q->bufs_info[index].mmappings[plane];
1534 }
1535 
v4l_queue_s_userptr(struct v4l_queue * q,unsigned index,unsigned plane,void * m)1536 static inline void v4l_queue_s_userptr(struct v4l_queue *q, unsigned index, unsigned plane, void *m)
1537 {
1538 	q->bufs_info[index].userptrs[plane] = (unsigned long)m;
1539 }
1540 
v4l_queue_g_userptr(const struct v4l_queue * q,unsigned index,unsigned plane)1541 static inline void *v4l_queue_g_userptr(const struct v4l_queue *q, unsigned index, unsigned plane)
1542 {
1543 	if (index >= v4l_queue_g_buffers(q) || plane >= v4l_queue_g_num_planes(q))
1544 		return NULL;
1545 	return (void *)q->bufs_info[index].userptrs[plane];
1546 }
1547 
v4l_queue_s_fd(struct v4l_queue * q,unsigned index,unsigned plane,int fd)1548 static inline void v4l_queue_s_fd(struct v4l_queue *q, unsigned index, unsigned plane, int fd)
1549 {
1550 	q->bufs_info[index].fds[plane] = fd;
1551 }
1552 
v4l_queue_g_fd(const struct v4l_queue * q,unsigned index,unsigned plane)1553 static inline int v4l_queue_g_fd(const struct v4l_queue *q, unsigned index, unsigned plane)
1554 {
1555 	return q->bufs_info[index].fds[plane];
1556 }
1557 
v4l_queue_g_dataptr(const struct v4l_queue * q,unsigned index,unsigned plane)1558 static inline void *v4l_queue_g_dataptr(const struct v4l_queue *q, unsigned index, unsigned plane)
1559 {
1560 	if (q->memory == V4L2_MEMORY_USERPTR)
1561 		return v4l_queue_g_userptr(q, index, plane);
1562 	return v4l_queue_g_mmapping(q, index, plane);
1563 }
1564 
v4l_queue_remove_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned index,unsigned count)1565 static inline int v4l_queue_remove_bufs(struct v4l_fd *f, struct v4l_queue *q, unsigned index, unsigned count)
1566 {
1567 	struct v4l2_remove_buffers removebufs;
1568 	int ret;
1569 
1570 	memset(&removebufs, 0, sizeof(removebufs));
1571 	removebufs.type = q->type;
1572 	removebufs.index = index;
1573 	removebufs.count = count;
1574 
1575 	ret = v4l_ioctl(f, VIDIOC_REMOVE_BUFS, &removebufs);
1576 	if (!ret)
1577 		q->buffers -= removebufs.count;
1578 
1579 	return ret;
1580 }
1581 
v4l_queue_querybufs(struct v4l_fd * f,struct v4l_queue * q,unsigned from,unsigned count)1582 static inline int v4l_queue_querybufs(struct v4l_fd *f, struct v4l_queue *q, unsigned from, unsigned count)
1583 {
1584 	unsigned b, p, max = from + count;
1585 	int ret;
1586 
1587 	for (b = from; b < max; b++) {
1588 		struct v4l_buffer buf;
1589 
1590 		v4l_buffer_init(&buf, v4l_queue_g_type(q), v4l_queue_g_memory(q), b);
1591 		ret = v4l_ioctl(f, VIDIOC_QUERYBUF, &buf.buf);
1592 		if (ret)
1593 			return ret;
1594 		if (b == 0) {
1595 			q->num_planes = v4l_buffer_g_num_planes(&buf);
1596 			for (p = 0; p < v4l_queue_g_num_planes(q); p++)
1597 				q->lengths[p] = v4l_buffer_g_length(&buf, p);
1598 		}
1599 		if (q->memory == V4L2_MEMORY_MMAP)
1600 			for (p = 0; p < q->num_planes; p++)
1601 				q->bufs_info[b].mem_offsets[p] = v4l_buffer_g_mem_offset(&buf, p);
1602 	}
1603 	return 0;
1604 }
1605 
1606 static inline int v4l_queue_reqbufs(struct v4l_fd *f,
1607 		struct v4l_queue *q, unsigned count, unsigned int flags = 0)
1608 {
1609 	struct v4l2_create_buffers createbufs;
1610 	struct v4l2_requestbuffers reqbufs;
1611 	int ret;
1612 
1613 	reqbufs.type = q->type;
1614 	reqbufs.memory = q->memory;
1615 	reqbufs.count = count;
1616 	reqbufs.flags = flags;
1617 	/*
1618 	 * Problem: if REQBUFS returns an error, did it free any old
1619 	 * buffers or not?
1620 	 */
1621 	ret = v4l_ioctl(f, VIDIOC_REQBUFS, &reqbufs);
1622 	if (ret)
1623 		return ret;
1624 	q->buffers = reqbufs.count;
1625 	q->capabilities = reqbufs.capabilities;
1626 
1627 	if (q->buffers) {
1628 		memset(&createbufs, 0, sizeof(createbufs));
1629 		createbufs.format.type = q->type;
1630 		createbufs.memory = q->memory;
1631 		if (!v4l_ioctl(f, VIDIOC_CREATE_BUFS, &createbufs)) {
1632 			q->capabilities = createbufs.capabilities;
1633 			if (q->bufs_info == q->_bufs_info &&
1634 			    (q->capabilities & V4L2_BUF_CAP_SUPPORTS_MAX_NUM_BUFFERS)) {
1635 				q->max_num_buffers = createbufs.max_num_buffers;
1636 				v4l_queue_alloc_bufs_info(q);
1637 			}
1638 		}
1639 	}
1640 	return v4l_queue_querybufs(f, q, 0, reqbufs.count);
1641 }
1642 
v4l_queue_has_create_bufs(struct v4l_fd * f,const struct v4l_queue * q)1643 static inline bool v4l_queue_has_create_bufs(struct v4l_fd *f, const struct v4l_queue *q)
1644 {
1645 	struct v4l2_create_buffers createbufs;
1646 
1647 	memset(&createbufs, 0, sizeof(createbufs));
1648 	createbufs.format.type = q->type;
1649 	createbufs.memory = q->memory;
1650 	return v4l_ioctl(f, VIDIOC_CREATE_BUFS, &createbufs) == 0;
1651 }
1652 
1653 static inline int v4l_queue_create_bufs(struct v4l_fd *f,
1654 		struct v4l_queue *q, unsigned count,
1655 		const struct v4l2_format *fmt, unsigned int flags = 0)
1656 {
1657 	struct v4l2_create_buffers createbufs;
1658 	int ret;
1659 
1660 	createbufs.format.type = q->type;
1661 	createbufs.memory = q->memory;
1662 	createbufs.count = count;
1663 	createbufs.flags = flags;
1664 	if (fmt) {
1665 		createbufs.format = *fmt;
1666 	} else {
1667 		ret = v4l_g_fmt(f, &createbufs.format, q->type);
1668 		if (ret)
1669 			return ret;
1670 	}
1671 	memset(createbufs.reserved, 0, sizeof(createbufs.reserved));
1672 	ret = v4l_ioctl(f, VIDIOC_CREATE_BUFS, &createbufs);
1673 	if (ret)
1674 		return ret;
1675 	if (q->buffers == 0) {
1676 		q->capabilities = createbufs.capabilities;
1677 		if (q->bufs_info == q->_bufs_info &&
1678 		    (q->capabilities & V4L2_BUF_CAP_SUPPORTS_MAX_NUM_BUFFERS)) {
1679 			q->max_num_buffers = createbufs.max_num_buffers;
1680 			v4l_queue_alloc_bufs_info(q);
1681 		}
1682 	}
1683 	q->buffers += createbufs.count;
1684 	return v4l_queue_querybufs(f, q, createbufs.index, createbufs.count);
1685 }
1686 
v4l_queue_mmap_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned from)1687 static inline int v4l_queue_mmap_bufs(struct v4l_fd *f,
1688 		struct v4l_queue *q, unsigned from)
1689 {
1690 	unsigned b, p;
1691 
1692 	if (q->memory != V4L2_MEMORY_MMAP && q->memory != V4L2_MEMORY_DMABUF)
1693 		return 0;
1694 
1695 	for (b = from; b < v4l_queue_g_buffers(q); b++) {
1696 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1697 			void *m = MAP_FAILED;
1698 
1699 			if (q->memory == V4L2_MEMORY_MMAP)
1700 				m = v4l_mmap(f, v4l_queue_g_length(q, p), v4l_queue_g_mem_offset(q, b, p));
1701 			else if (q->memory == V4L2_MEMORY_DMABUF)
1702 				m = mmap(NULL, v4l_queue_g_length(q, p),
1703 						PROT_READ | PROT_WRITE, MAP_SHARED,
1704 						v4l_queue_g_fd(q, b, p), 0);
1705 
1706 			if (m == MAP_FAILED)
1707 				return errno;
1708 			v4l_queue_s_mmapping(q, b, p, m);
1709 		}
1710 	}
1711 	q->mappings = b;
1712 	return 0;
1713 }
v4l_queue_munmap_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned from)1714 static inline int v4l_queue_munmap_bufs(struct v4l_fd *f, struct v4l_queue *q,
1715 					unsigned from)
1716 {
1717 	unsigned b, p;
1718 	int ret = 0;
1719 
1720 	if (q->memory != V4L2_MEMORY_MMAP && q->memory != V4L2_MEMORY_DMABUF)
1721 		return 0;
1722 
1723 	for (b = from; b < v4l_queue_g_mappings(q); b++) {
1724 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1725 			void *m = v4l_queue_g_mmapping(q, b, p);
1726 
1727 			if (m == NULL)
1728 				continue;
1729 
1730 			if (q->memory == V4L2_MEMORY_MMAP)
1731 				ret = v4l_munmap(f, m, v4l_queue_g_length(q, p));
1732 			else if (q->memory == V4L2_MEMORY_DMABUF)
1733 				ret = munmap(m, v4l_queue_g_length(q, p)) ? errno : 0;
1734 			if (ret)
1735 				return ret;
1736 			v4l_queue_s_mmapping(q, b, p, NULL);
1737 		}
1738 	}
1739 	q->mappings = from;
1740 	return 0;
1741 }
1742 
v4l_queue_alloc_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned from)1743 static inline int v4l_queue_alloc_bufs(struct v4l_fd *f,
1744 		struct v4l_queue *q, unsigned from)
1745 {
1746 	unsigned b, p;
1747 
1748 	if (q->memory != V4L2_MEMORY_USERPTR)
1749 		return 0;
1750 	for (b = from; b < v4l_queue_g_buffers(q); b++) {
1751 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1752 			void *m = malloc(v4l_queue_g_length(q, p));
1753 
1754 			if (m == NULL)
1755 				return errno;
1756 			v4l_queue_s_userptr(q, b, p, m);
1757 		}
1758 	}
1759 	return 0;
1760 }
1761 
v4l_queue_free_bufs(struct v4l_queue * q,unsigned from)1762 static inline int v4l_queue_free_bufs(struct v4l_queue *q, unsigned from)
1763 {
1764 	unsigned b, p;
1765 
1766 	if (q->memory != V4L2_MEMORY_USERPTR)
1767 		return 0;
1768 	for (b = from; b < v4l_queue_g_buffers(q); b++) {
1769 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1770 			free(v4l_queue_g_userptr(q, b, p));
1771 			v4l_queue_s_userptr(q, b, p, NULL);
1772 		}
1773 	}
1774 	return 0;
1775 }
1776 
v4l_queue_obtain_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned from)1777 static inline int v4l_queue_obtain_bufs(struct v4l_fd *f,
1778 		struct v4l_queue *q, unsigned from)
1779 {
1780 	if (q->memory == V4L2_MEMORY_USERPTR)
1781 		return v4l_queue_alloc_bufs(f, q, from);
1782 	return v4l_queue_mmap_bufs(f, q, from);
1783 }
1784 
v4l_queue_release_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned from)1785 static inline int v4l_queue_release_bufs(struct v4l_fd *f, struct v4l_queue *q,
1786 					 unsigned from)
1787 {
1788 	if (q->memory == V4L2_MEMORY_USERPTR)
1789 		return v4l_queue_free_bufs(q, from);
1790 	return v4l_queue_munmap_bufs(f, q, from);
1791 }
1792 
1793 
v4l_queue_has_expbuf(struct v4l_fd * f)1794 static inline bool v4l_queue_has_expbuf(struct v4l_fd *f)
1795 {
1796 	struct v4l2_exportbuffer expbuf;
1797 
1798 	memset(&expbuf, 0, sizeof(expbuf));
1799 	return v4l_ioctl(f, VIDIOC_EXPBUF, &expbuf) != ENOTTY;
1800 }
1801 
v4l_queue_export_bufs(struct v4l_fd * f,struct v4l_queue * q,unsigned exp_type)1802 static inline int v4l_queue_export_bufs(struct v4l_fd *f, struct v4l_queue *q,
1803 					unsigned exp_type)
1804 {
1805 	struct v4l2_exportbuffer expbuf;
1806 	unsigned b, p;
1807 	int ret = 0;
1808 
1809 	expbuf.type = exp_type ? : f->type;
1810 	expbuf.flags = O_RDWR;
1811 	memset(expbuf.reserved, 0, sizeof(expbuf.reserved));
1812 	for (b = 0; b < v4l_queue_g_buffers(q); b++) {
1813 		expbuf.index = b;
1814 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1815 			expbuf.plane = p;
1816 			ret = v4l_ioctl(f, VIDIOC_EXPBUF, &expbuf);
1817 			if (ret)
1818 				return ret;
1819 			v4l_queue_s_fd(q, b, p, expbuf.fd);
1820 		}
1821 	}
1822 	return 0;
1823 }
1824 
v4l_queue_close_exported_fds(struct v4l_queue * q)1825 static inline void v4l_queue_close_exported_fds(struct v4l_queue *q)
1826 {
1827 	unsigned b, p;
1828 
1829 	if (q->memory != V4L2_MEMORY_MMAP)
1830 		return;
1831 
1832 	for (b = 0; b < v4l_queue_g_buffers(q); b++) {
1833 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1834 			int fd = v4l_queue_g_fd(q, b, p);
1835 
1836 			if (fd != -1) {
1837 				close(fd);
1838 				v4l_queue_s_fd(q, b, p, -1);
1839 			}
1840 		}
1841 	}
1842 }
1843 
v4l_queue_free(struct v4l_fd * f,struct v4l_queue * q)1844 static inline void v4l_queue_free(struct v4l_fd *f, struct v4l_queue *q)
1845 {
1846 	v4l_ioctl(f, VIDIOC_STREAMOFF, &q->type);
1847 	v4l_queue_release_bufs(f, q, 0);
1848 	v4l_queue_close_exported_fds(q);
1849 	v4l_queue_reqbufs(f, q, 0, 0);
1850 	v4l_queue_free_bufs_info(q);
1851 }
1852 
v4l_queue_buffer_update(const struct v4l_queue * q,struct v4l_buffer * buf,unsigned index)1853 static inline void v4l_queue_buffer_update(const struct v4l_queue *q,
1854 					   struct v4l_buffer *buf, unsigned index)
1855 {
1856 	unsigned p;
1857 
1858 	if (v4l_type_is_planar(q->type)) {
1859 		buf->buf.length = v4l_queue_g_num_planes(q);
1860 		buf->buf.m.planes = buf->planes;
1861 	}
1862 	switch (q->memory) {
1863 	case V4L2_MEMORY_USERPTR:
1864 		for (p = 0; p < v4l_queue_g_num_planes(q); p++) {
1865 			v4l_buffer_s_userptr(buf, p, v4l_queue_g_userptr(q, index, p));
1866 			v4l_buffer_s_length(buf, p, v4l_queue_g_length(q, p));
1867 		}
1868 		break;
1869 	case V4L2_MEMORY_DMABUF:
1870 		for (p = 0; p < v4l_queue_g_num_planes(q); p++)
1871 			v4l_buffer_s_fd(buf, p, v4l_queue_g_fd(q, index, p));
1872 		break;
1873 	default:
1874 		break;
1875 	}
1876 }
1877 
v4l_queue_buffer_init(const struct v4l_queue * q,struct v4l_buffer * buf,unsigned index)1878 static inline void v4l_queue_buffer_init(const struct v4l_queue *q, struct v4l_buffer *buf, unsigned index)
1879 {
1880 	v4l_buffer_init(buf, v4l_queue_g_type(q), v4l_queue_g_memory(q), index);
1881 	v4l_queue_buffer_update(q, buf, index);
1882 }
1883 
v4l_query_ext_ctrl(v4l_fd * f,struct v4l2_query_ext_ctrl * qec,bool next_ctrl,bool next_compound)1884 static inline int v4l_query_ext_ctrl(v4l_fd *f, struct v4l2_query_ext_ctrl *qec,
1885 		bool next_ctrl, bool next_compound)
1886 {
1887 	struct v4l2_queryctrl qc;
1888 	int ret;
1889 
1890 	if (next_compound && !f->have_query_ext_ctrl) {
1891 		if (!next_ctrl)
1892 			return -EINVAL;
1893 		next_compound = false;
1894 	}
1895 	if (next_compound)
1896 		qec->id |= V4L2_CTRL_FLAG_NEXT_COMPOUND;
1897 	if (next_ctrl) {
1898 		if (f->have_next_ctrl)
1899 			qec->id |= V4L2_CTRL_FLAG_NEXT_CTRL;
1900 		else
1901 			qec->id = qec->id ? qec->id + 1 : V4L2_CID_BASE;
1902 	}
1903 	if (f->have_query_ext_ctrl)
1904 		return v4l_ioctl(f, VIDIOC_QUERY_EXT_CTRL, qec);
1905 
1906 	for (;;) {
1907 		if (qec->id == V4L2_CID_LASTP1 && next_ctrl)
1908 			qec->id = V4L2_CID_PRIVATE_BASE;
1909 		qc.id = qec->id;
1910 		ret = v4l_ioctl(f, VIDIOC_QUERYCTRL, &qc);
1911 		if (!ret)
1912 			break;
1913 		if (ret != EINVAL)
1914 			return ret;
1915 		if (!next_ctrl || f->have_next_ctrl)
1916 			return ret;
1917 		if (qec->id >= V4L2_CID_PRIVATE_BASE)
1918 			return ret;
1919 		qec->id++;
1920 	}
1921 	qec->id = qc.id;
1922 	qec->type = qc.type;
1923 	memcpy(qec->name, qc.name, sizeof(qec->name));
1924 	qec->minimum = qc.minimum;
1925 	if (qc.type == V4L2_CTRL_TYPE_BITMASK) {
1926 		qec->maximum = (__u32)qc.maximum;
1927 		qec->default_value = (__u32)qc.default_value;
1928 	} else {
1929 		qec->maximum = qc.maximum;
1930 		qec->default_value = qc.default_value;
1931 	}
1932 	qec->step = qc.step;
1933 	qec->flags = qc.flags;
1934 	qec->elems = 1;
1935 	qec->nr_of_dims = 0;
1936 	memset(qec->dims, 0, sizeof(qec->dims));
1937 	switch (qec->type) {
1938 	case V4L2_CTRL_TYPE_INTEGER64:
1939 		qec->elem_size = sizeof(__s64);
1940 		qec->minimum = 0x8000000000000000ULL;
1941 		qec->maximum = 0x7fffffffffffffffULL;
1942 		qec->step = 1;
1943 		break;
1944 	case V4L2_CTRL_TYPE_STRING:
1945 		qec->elem_size = qc.maximum + 1;
1946 		qec->flags |= V4L2_CTRL_FLAG_HAS_PAYLOAD;
1947 		break;
1948 	default:
1949 		qec->elem_size = sizeof(__s32);
1950 		break;
1951 	}
1952 	memset(qec->reserved, 0, sizeof(qec->reserved));
1953 	return 0;
1954 }
1955 
v4l_g_ext_ctrls(v4l_fd * f,struct v4l2_ext_controls * ec)1956 static inline int v4l_g_ext_ctrls(v4l_fd *f, struct v4l2_ext_controls *ec)
1957 {
1958 	unsigned i;
1959 
1960 	if (f->have_ext_ctrls)
1961 		return v4l_ioctl(f, VIDIOC_G_EXT_CTRLS, ec);
1962 	if (ec->count == 0)
1963 		return 0;
1964 	for (i = 0; i < ec->count; i++) {
1965 		struct v4l2_control c = { ec->controls[i].id, 0 };
1966 		int ret = v4l_ioctl(f, VIDIOC_G_CTRL, &c);
1967 
1968 		if (ret) {
1969 			ec->error_idx = i;
1970 			return ret;
1971 		}
1972 		ec->controls[i].value = c.value;
1973 	}
1974 	return 0;
1975 }
1976 
v4l_s_ext_ctrls(v4l_fd * f,struct v4l2_ext_controls * ec)1977 static inline int v4l_s_ext_ctrls(v4l_fd *f, struct v4l2_ext_controls *ec)
1978 {
1979 	unsigned i;
1980 
1981 	if (f->have_ext_ctrls)
1982 		return v4l_ioctl(f, VIDIOC_S_EXT_CTRLS, ec);
1983 	if (ec->count == 0)
1984 		return 0;
1985 	for (i = 0; i < ec->count; i++) {
1986 		struct v4l2_control c = { ec->controls[i].id, ec->controls[i].value };
1987 		int ret = v4l_ioctl(f, VIDIOC_S_CTRL, &c);
1988 
1989 		if (ret) {
1990 			ec->error_idx = i;
1991 			return ret;
1992 		}
1993 	}
1994 	return 0;
1995 }
1996 
v4l_try_ext_ctrls(v4l_fd * f,struct v4l2_ext_controls * ec)1997 static inline int v4l_try_ext_ctrls(v4l_fd *f, struct v4l2_ext_controls *ec)
1998 {
1999 	unsigned i;
2000 
2001 	if (f->have_ext_ctrls)
2002 		return v4l_ioctl(f, VIDIOC_TRY_EXT_CTRLS, ec);
2003 	if (ec->count == 0)
2004 		return 0;
2005 	for (i = 0; i < ec->count; i++) {
2006 		struct v4l2_queryctrl qc;
2007 		int ret;
2008 
2009 		memset(&qc, 0, sizeof(qc));
2010 		qc.id = ec->controls[i].id;
2011 		ret = v4l_ioctl(f, VIDIOC_QUERYCTRL, &qc);
2012 
2013 		if (ret || qc.type == V4L2_CTRL_TYPE_STRING ||
2014 			   qc.type == V4L2_CTRL_TYPE_INTEGER64) {
2015 			ec->error_idx = i;
2016 			return ret ? ret : EINVAL;
2017 		}
2018 	}
2019 	return 0;
2020 }
2021 
v4l_g_selection(v4l_fd * f,struct v4l2_selection * sel)2022 static inline int v4l_g_selection(v4l_fd *f, struct v4l2_selection *sel)
2023 {
2024 	struct v4l2_cropcap cc;
2025 	struct v4l2_crop crop;
2026 	int ret;
2027 
2028 	if (f->have_selection)
2029 		return v4l_ioctl(f, VIDIOC_G_SELECTION, sel);
2030 	crop.type = sel->type;
2031 	cc.type = sel->type;
2032 	ret = v4l_ioctl(f, VIDIOC_CROPCAP, &cc);
2033 	if (ret)
2034 		return ret;
2035 	ret = v4l_ioctl(f, VIDIOC_G_CROP, &crop);
2036 	if (ret)
2037 		return ret;
2038 	if (sel->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
2039 		switch (sel->target) {
2040 		case V4L2_SEL_TGT_CROP:
2041 			sel->r = crop.c;
2042 			return 0;
2043 		case V4L2_SEL_TGT_CROP_DEFAULT:
2044 			sel->r = cc.defrect;
2045 			return 0;
2046 		case V4L2_SEL_TGT_CROP_BOUNDS:
2047 			sel->r = cc.bounds;
2048 			return 0;
2049 		default:
2050 			return EINVAL;
2051 		}
2052 	}
2053 	switch (sel->target) {
2054 	case V4L2_SEL_TGT_COMPOSE:
2055 		sel->r = crop.c;
2056 		return 0;
2057 	case V4L2_SEL_TGT_COMPOSE_DEFAULT:
2058 		sel->r = cc.defrect;
2059 		return 0;
2060 	case V4L2_SEL_TGT_COMPOSE_BOUNDS:
2061 		sel->r = cc.bounds;
2062 		return 0;
2063 	default:
2064 		return EINVAL;
2065 	}
2066 }
2067 
v4l_s_selection(v4l_fd * f,struct v4l2_selection * sel)2068 static inline int v4l_s_selection(v4l_fd *f, struct v4l2_selection *sel)
2069 {
2070 	struct v4l2_crop crop;
2071 	int ret;
2072 
2073 	if (f->have_selection)
2074 		return v4l_ioctl(f, VIDIOC_S_SELECTION, sel);
2075 	crop.type = sel->type;
2076 	ret = v4l_ioctl(f, VIDIOC_G_CROP, &crop);
2077 	if (ret)
2078 		return ret;
2079 	if (sel->type == V4L2_BUF_TYPE_VIDEO_CAPTURE &&
2080 	    sel->target == V4L2_SEL_TGT_CROP) {
2081 		crop.c = sel->r;
2082 		return v4l_ioctl(f, VIDIOC_S_CROP, &crop);
2083 	}
2084 	if (sel->type == V4L2_BUF_TYPE_VIDEO_OUTPUT &&
2085 	    sel->target == V4L2_SEL_TGT_COMPOSE) {
2086 		crop.c = sel->r;
2087 		return v4l_ioctl(f, VIDIOC_S_CROP, &crop);
2088 	}
2089 	return EINVAL;
2090 }
2091 
v4l_frame_selection(struct v4l2_selection * sel,bool to_frame)2092 static inline void v4l_frame_selection(struct v4l2_selection *sel, bool to_frame)
2093 {
2094 	switch (sel->target) {
2095 	case V4L2_SEL_TGT_CROP:
2096 	case V4L2_SEL_TGT_CROP_DEFAULT:
2097 	case V4L2_SEL_TGT_CROP_BOUNDS:
2098 		if (V4L2_TYPE_IS_CAPTURE(sel->type))
2099 			return;
2100 		break;
2101 	case V4L2_SEL_TGT_COMPOSE:
2102 	case V4L2_SEL_TGT_COMPOSE_DEFAULT:
2103 	case V4L2_SEL_TGT_COMPOSE_BOUNDS:
2104 	case V4L2_SEL_TGT_COMPOSE_PADDED:
2105 		if (V4L2_TYPE_IS_OUTPUT(sel->type))
2106 			return;
2107 		break;
2108 	default:
2109 		return;
2110 	}
2111 	if (to_frame) {
2112 		sel->r.top *= 2;
2113 		sel->r.height *= 2;
2114 	} else {
2115 		sel->r.top /= 2;
2116 		sel->r.height /= 2;
2117 	}
2118 }
2119 
v4l_g_frame_selection(v4l_fd * f,struct v4l2_selection * sel,__u32 field)2120 static inline int v4l_g_frame_selection(v4l_fd *f, struct v4l2_selection *sel, __u32 field)
2121 {
2122 	int ret = v4l_g_selection(f, sel);
2123 
2124 	if (V4L2_FIELD_HAS_T_OR_B(field))
2125 		v4l_frame_selection(sel, true);
2126 	return ret;
2127 }
2128 
v4l_s_frame_selection(v4l_fd * f,struct v4l2_selection * sel,__u32 field)2129 static inline int v4l_s_frame_selection(v4l_fd *f, struct v4l2_selection *sel, __u32 field)
2130 {
2131 	int ret;
2132 
2133 	if (V4L2_FIELD_HAS_T_OR_B(field))
2134 		v4l_frame_selection(sel, false);
2135 	ret = v4l_s_selection(f, sel);
2136 	if (V4L2_FIELD_HAS_T_OR_B(field))
2137 		v4l_frame_selection(sel, true);
2138 	return ret;
2139 }
2140 
2141 #ifdef __cplusplus
2142 }
2143 #endif /* __cplusplus */
2144 
2145 #endif
2146