• Home
  • Raw
  • Download

Lines Matching full:devices

35    capture devices.
90 static struct v4l2_dev_info devices[V4L2_MAX_DEVICES] = { variable
100 if (devices[index].convert_mmap_buf != MAP_FAILED) { in v4l2_ensure_convert_mmap_buf()
104 devices[index].convert_mmap_buf_size = in v4l2_ensure_convert_mmap_buf()
105 devices[index].convert_mmap_frame_size * devices[index].no_frames; in v4l2_ensure_convert_mmap_buf()
107 devices[index].convert_mmap_buf = (void *)SYS_MMAP(NULL, in v4l2_ensure_convert_mmap_buf()
108 devices[index].convert_mmap_buf_size, in v4l2_ensure_convert_mmap_buf()
113 if (devices[index].convert_mmap_buf == MAP_FAILED) { in v4l2_ensure_convert_mmap_buf()
114 devices[index].convert_mmap_buf_size = 0; in v4l2_ensure_convert_mmap_buf()
132 req.count = (devices[index].no_frames) ? devices[index].no_frames : in v4l2_request_read_buffers()
133 devices[index].nreadbuffers; in v4l2_request_read_buffers()
136 result = devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_request_read_buffers()
137 devices[index].fd, VIDIOC_REQBUFS, &req); in v4l2_request_read_buffers()
146 if (!devices[index].no_frames && req.count) in v4l2_request_read_buffers()
147 devices[index].flags |= V4L2_BUFFERS_REQUESTED_BY_READ; in v4l2_request_read_buffers()
149 devices[index].no_frames = MIN(req.count, V4L2_MAX_NO_FRAMES); in v4l2_request_read_buffers()
157 if (!(devices[index].flags & V4L2_BUFFERS_REQUESTED_BY_READ) || in v4l2_unrequest_read_buffers()
158 devices[index].no_frames == 0) in v4l2_unrequest_read_buffers()
166 if (devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_unrequest_read_buffers()
167 devices[index].fd, VIDIOC_REQBUFS, &req) < 0) in v4l2_unrequest_read_buffers()
170 devices[index].no_frames = MIN(req.count, V4L2_MAX_NO_FRAMES); in v4l2_unrequest_read_buffers()
171 if (devices[index].no_frames == 0) in v4l2_unrequest_read_buffers()
172 devices[index].flags &= ~V4L2_BUFFERS_REQUESTED_BY_READ; in v4l2_unrequest_read_buffers()
181 for (i = 0; i < devices[index].no_frames; i++) { in v4l2_map_buffers()
182 if (devices[index].frame_pointers[i] != MAP_FAILED) in v4l2_map_buffers()
189 result = devices[index].dev_ops->ioctl( in v4l2_map_buffers()
190 devices[index].dev_ops_priv, in v4l2_map_buffers()
191 devices[index].fd, VIDIOC_QUERYBUF, &buf); in v4l2_map_buffers()
200 devices[index].frame_pointers[i] = (void *)SYS_MMAP(NULL, in v4l2_map_buffers()
201 (size_t)buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, devices[index].fd, in v4l2_map_buffers()
203 if (devices[index].frame_pointers[i] == MAP_FAILED) { in v4l2_map_buffers()
212 devices[index].frame_pointers[i]); in v4l2_map_buffers()
214 devices[index].frame_sizes[i] = buf.length; in v4l2_map_buffers()
225 for (i = 0; i < devices[index].no_frames; i++) { in v4l2_unmap_buffers()
226 if (devices[index].frame_pointers[i] != MAP_FAILED) { in v4l2_unmap_buffers()
227 SYS_MUNMAP(devices[index].frame_pointers[i], in v4l2_unmap_buffers()
228 devices[index].frame_sizes[i]); in v4l2_unmap_buffers()
229 devices[index].frame_pointers[i] = MAP_FAILED; in v4l2_unmap_buffers()
240 if (!(devices[index].flags & V4L2_STREAMON)) { in v4l2_streamon()
241 result = devices[index].dev_ops->ioctl( in v4l2_streamon()
242 devices[index].dev_ops_priv, in v4l2_streamon()
243 devices[index].fd, VIDIOC_STREAMON, &type); in v4l2_streamon()
251 devices[index].flags |= V4L2_STREAMON; in v4l2_streamon()
252 devices[index].first_frame = V4L2_IGNORE_FIRST_FRAME_ERRORS; in v4l2_streamon()
263 if (devices[index].flags & V4L2_STREAMON) { in v4l2_streamoff()
264 result = devices[index].dev_ops->ioctl( in v4l2_streamoff()
265 devices[index].dev_ops_priv, in v4l2_streamoff()
266 devices[index].fd, VIDIOC_STREAMOFF, &type); in v4l2_streamoff()
274 devices[index].flags &= ~V4L2_STREAMON; in v4l2_streamoff()
277 devices[index].frame_queued = 0; in v4l2_streamoff()
288 if (devices[index].frame_queued & (1 << buffer_index)) in v4l2_queue_read_buffer()
295 result = devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_queue_read_buffer()
296 devices[index].fd, VIDIOC_QBUF, &buf); in v4l2_queue_read_buffer()
305 devices[index].frame_queued |= 1 << buffer_index; in v4l2_queue_read_buffer()
321 frame_info_gen = devices[index].frame_info_generation; in v4l2_dequeue_and_convert()
322 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_dequeue_and_convert()
323 result = devices[index].dev_ops->ioctl( in v4l2_dequeue_and_convert()
324 devices[index].dev_ops_priv, in v4l2_dequeue_and_convert()
325 devices[index].fd, VIDIOC_DQBUF, buf); in v4l2_dequeue_and_convert()
326 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_dequeue_and_convert()
337 devices[index].frame_queued &= ~(1 << buf->index); in v4l2_dequeue_and_convert()
339 if (frame_info_gen != devices[index].frame_info_generation) { in v4l2_dequeue_and_convert()
344 result = v4lconvert_convert(devices[index].convert, in v4l2_dequeue_and_convert()
345 &devices[index].src_fmt, &devices[index].dest_fmt, in v4l2_dequeue_and_convert()
346 devices[index].frame_pointers[buf->index], in v4l2_dequeue_and_convert()
347 buf->bytesused, dest ? dest : (devices[index].convert_mmap_buf + in v4l2_dequeue_and_convert()
348 buf->index * devices[index].convert_mmap_frame_size), in v4l2_dequeue_and_convert()
351 if (devices[index].first_frame) { in v4l2_dequeue_and_convert()
357 devices[index].first_frame--; in v4l2_dequeue_and_convert()
365 v4lconvert_get_error_message(devices[index].convert)); in v4l2_dequeue_and_convert()
368 v4lconvert_get_error_message(devices[index].convert)); in v4l2_dequeue_and_convert()
384 max_tries, v4lconvert_get_error_message(devices[index].convert)); in v4l2_dequeue_and_convert()
391 result = devices[index].dest_fmt.fmt.pix.sizeimage; in v4l2_dequeue_and_convert()
403 buf_size = devices[index].dest_fmt.fmt.pix.sizeimage; in v4l2_read_and_convert()
405 if (devices[index].readbuf_size < buf_size) { in v4l2_read_and_convert()
408 new_buf = realloc(devices[index].readbuf, buf_size); in v4l2_read_and_convert()
412 devices[index].readbuf = new_buf; in v4l2_read_and_convert()
413 devices[index].readbuf_size = buf_size; in v4l2_read_and_convert()
417 result = devices[index].dev_ops->read( in v4l2_read_and_convert()
418 devices[index].dev_ops_priv, in v4l2_read_and_convert()
419 devices[index].fd, devices[index].readbuf, in v4l2_read_and_convert()
431 result = v4lconvert_convert(devices[index].convert, in v4l2_read_and_convert()
432 &devices[index].src_fmt, &devices[index].dest_fmt, in v4l2_read_and_convert()
433 devices[index].readbuf, result, dest, dest_size); in v4l2_read_and_convert()
435 if (devices[index].first_frame) { in v4l2_read_and_convert()
441 devices[index].first_frame--; in v4l2_read_and_convert()
449 v4lconvert_get_error_message(devices[index].convert)); in v4l2_read_and_convert()
452 v4lconvert_get_error_message(devices[index].convert)); in v4l2_read_and_convert()
461 max_tries, v4lconvert_get_error_message(devices[index].convert)); in v4l2_read_and_convert()
468 result = devices[index].dest_fmt.fmt.pix.sizeimage; in v4l2_read_and_convert()
480 for (i = 0; i < devices[index].no_frames; i++) { in v4l2_queue_read_buffers()
482 if (devices[index].frame_pointers[i] != MAP_FAILED) { in v4l2_queue_read_buffers()
502 if ((devices[index].flags & V4L2_STREAMON) || devices[index].frame_queued) { in v4l2_activate_read_stream()
515 devices[index].flags |= V4L2_STREAM_CONTROLLED_BY_READ; in v4l2_activate_read_stream()
534 devices[index].flags &= ~V4L2_STREAM_CONTROLLED_BY_READ; in v4l2_deactivate_read_stream()
541 if (devices[index].convert == NULL) in v4l2_needs_conversion()
544 return v4lconvert_needs_conversion(devices[index].convert, in v4l2_needs_conversion()
545 &devices[index].src_fmt, &devices[index].dest_fmt); in v4l2_needs_conversion()
554 if (buf->index >= devices[index].no_frames) in v4l2_set_conversion_buf_params()
558 buf->length = devices[index].convert_mmap_frame_size; in v4l2_set_conversion_buf_params()
559 if (devices[index].frame_map_count[buf->index]) in v4l2_set_conversion_buf_params()
573 for (i = 0; i < devices[index].no_frames; i++) { in v4l2_buffers_mapped()
578 if (devices[index].dev_ops->ioctl( in v4l2_buffers_mapped()
579 devices[index].dev_ops_priv, in v4l2_buffers_mapped()
580 devices[index].fd, VIDIOC_QUERYBUF, in v4l2_buffers_mapped()
593 for (i = 0; i < devices[index].no_frames; i++) in v4l2_buffers_mapped()
594 if (devices[index].frame_map_count[i]) in v4l2_buffers_mapped()
598 if (i != devices[index].no_frames) in v4l2_buffers_mapped()
601 return i != devices[index].no_frames; in v4l2_buffers_mapped()
606 if ((devices[index].flags & V4L2_SUPPORTS_TIMEPERFRAME) && in v4l2_update_fps()
611 devices[index].fps = fps; in v4l2_update_fps()
613 devices[index].fps = 0; in v4l2_update_fps()
727 /* So we have a v4l2 capture device, register it in our devices array */ in v4l2_fd_open()
730 if (devices[index].fd == -1) { in v4l2_fd_open()
731 devices[index].fd = fd; in v4l2_fd_open()
732 devices[index].plugin_library = plugin_library; in v4l2_fd_open()
733 devices[index].dev_ops_priv = dev_ops_priv; in v4l2_fd_open()
734 devices[index].dev_ops = dev_ops; in v4l2_fd_open()
741 V4L2_LOG_ERR("attempting to open more than %d video devices\n", in v4l2_fd_open()
748 devices[index].flags = v4l2_flags; in v4l2_fd_open()
750 devices[index].flags |= V4L2_SUPPORTS_READ; in v4l2_fd_open()
752 devices[index].flags |= V4L2_USE_READ_FOR_READ; in v4l2_fd_open()
755 devices[index].first_frame = V4L2_IGNORE_FIRST_FRAME_ERRORS; in v4l2_fd_open()
759 devices[index].flags |= V4L2_SUPPORTS_TIMEPERFRAME; in v4l2_fd_open()
760 devices[index].open_count = 1; in v4l2_fd_open()
761 devices[index].page_size = page_size; in v4l2_fd_open()
762 devices[index].src_fmt = fmt; in v4l2_fd_open()
763 devices[index].dest_fmt = fmt; in v4l2_fd_open()
764 v4l2_set_src_and_dest_format(index, &devices[index].src_fmt, in v4l2_fd_open()
765 &devices[index].dest_fmt); in v4l2_fd_open()
767 pthread_mutex_init(&devices[index].stream_lock, NULL); in v4l2_fd_open()
769 devices[index].no_frames = 0; in v4l2_fd_open()
770 devices[index].nreadbuffers = V4L2_DEFAULT_NREADBUFFERS; in v4l2_fd_open()
771 devices[index].convert = convert; in v4l2_fd_open()
772 devices[index].convert_mmap_buf = MAP_FAILED; in v4l2_fd_open()
773 devices[index].convert_mmap_buf_size = 0; in v4l2_fd_open()
775 devices[index].frame_pointers[i] = MAP_FAILED; in v4l2_fd_open()
776 devices[index].frame_map_count[i] = 0; in v4l2_fd_open()
778 devices[index].frame_queued = 0; in v4l2_fd_open()
779 devices[index].readbuf = NULL; in v4l2_fd_open()
780 devices[index].readbuf_size = 0; in v4l2_fd_open()
788 if (devices[index].convert) in v4l2_fd_open()
789 v4lconvert_set_fps(devices[index].convert, V4L2_DEFAULT_FPS); in v4l2_fd_open()
807 if (devices[index].fd == fd) in v4l2_get_index()
827 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_close()
828 devices[index].open_count--; in v4l2_close()
829 result = devices[index].open_count != 0; in v4l2_close()
830 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_close()
835 v4l2_plugin_cleanup(devices[index].plugin_library, in v4l2_close()
836 devices[index].dev_ops_priv, in v4l2_close()
837 devices[index].dev_ops); in v4l2_close()
841 if (devices[index].convert_mmap_buf != MAP_FAILED) { in v4l2_close()
843 if (!devices[index].gone) in v4l2_close()
846 SYS_MUNMAP(devices[index].convert_mmap_buf, in v4l2_close()
847 devices[index].convert_mmap_buf_size); in v4l2_close()
849 devices[index].convert_mmap_buf = MAP_FAILED; in v4l2_close()
850 devices[index].convert_mmap_buf_size = 0; in v4l2_close()
852 v4lconvert_destroy(devices[index].convert); in v4l2_close()
853 free(devices[index].readbuf); in v4l2_close()
854 devices[index].readbuf = NULL; in v4l2_close()
855 devices[index].readbuf_size = 0; in v4l2_close()
860 devices[index].fd = -1; in v4l2_close()
880 devices[index].open_count++; in v4l2_dup()
887 devices[index].frame_info_generation++; in v4l2_check_buffer_change_ok()
892 (!(devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) && in v4l2_check_buffer_change_ok()
893 ((devices[index].flags & V4L2_STREAMON) || in v4l2_check_buffer_change_ok()
894 devices[index].frame_queued))) { in v4l2_check_buffer_change_ok()
903 SYS_MUNMAP(devices[index].convert_mmap_buf, in v4l2_check_buffer_change_ok()
904 devices[index].convert_mmap_buf_size); in v4l2_check_buffer_change_ok()
905 devices[index].convert_mmap_buf = MAP_FAILED; in v4l2_check_buffer_change_ok()
906 devices[index].convert_mmap_buf_size = 0; in v4l2_check_buffer_change_ok()
908 if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) { in v4l2_check_buffer_change_ok()
968 devices[index].src_fmt = *src_fmt; in v4l2_set_src_and_dest_format()
969 devices[index].dest_fmt = *dest_fmt; in v4l2_set_src_and_dest_format()
971 devices[index].convert_mmap_frame_size = in v4l2_set_src_and_dest_format()
972 (((dest_fmt->fmt.pix.sizeimage + devices[index].page_size - 1) in v4l2_set_src_and_dest_format()
973 / devices[index].page_size) * devices[index].page_size); in v4l2_set_src_and_dest_format()
992 result = v4lconvert_try_format(devices[index].convert, in v4l2_s_fmt()
1016 result = devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_s_fmt()
1017 devices[index].fd, in v4l2_s_fmt()
1023 *dest_fmt = devices[index].dest_fmt; in v4l2_s_fmt()
1040 if (devices[index].flags & V4L2_SUPPORTS_TIMEPERFRAME) { in v4l2_s_fmt()
1044 if (devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_s_fmt()
1045 devices[index].fd, in v4l2_s_fmt()
1074 if (devices[index].convert == NULL) in v4l2_ioctl()
1136 if (devices[index].flags & V4L2_SUPPORTS_TIMEPERFRAME) in v4l2_ioctl()
1150 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1151 devices[index].dev_ops_priv, in v4l2_ioctl()
1161 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_ioctl()
1166 if (!(devices[index].flags & V4L2_STREAM_TOUCHED) && in v4l2_ioctl()
1167 v4lconvert_supported_dst_fmt_only(devices[index].convert) && in v4l2_ioctl()
1169 devices[index].dest_fmt.fmt.pix.pixelformat)) { in v4l2_ioctl()
1170 struct v4l2_format fmt = devices[index].dest_fmt; in v4l2_ioctl()
1177 devices[index].flags |= V4L2_STREAM_TOUCHED; in v4l2_ioctl()
1182 result = v4lconvert_vidioc_queryctrl(devices[index].convert, arg); in v4l2_ioctl()
1186 result = v4lconvert_vidioc_g_ctrl(devices[index].convert, arg); in v4l2_ioctl()
1190 result = v4lconvert_vidioc_s_ctrl(devices[index].convert, arg); in v4l2_ioctl()
1194 result = v4lconvert_vidioc_g_ext_ctrls(devices[index].convert, arg); in v4l2_ioctl()
1198 result = v4lconvert_vidioc_try_ext_ctrls(devices[index].convert, arg); in v4l2_ioctl()
1202 result = v4lconvert_vidioc_s_ext_ctrls(devices[index].convert, arg); in v4l2_ioctl()
1208 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1209 devices[index].dev_ops_priv, in v4l2_ioctl()
1220 result = v4lconvert_enum_fmt(devices[index].convert, arg); in v4l2_ioctl()
1224 result = v4lconvert_enum_framesizes(devices[index].convert, arg); in v4l2_ioctl()
1228 result = v4lconvert_enum_frameintervals(devices[index].convert, arg); in v4l2_ioctl()
1231 v4lconvert_get_error_message(devices[index].convert)); in v4l2_ioctl()
1235 result = v4lconvert_try_format(devices[index].convert, in v4l2_ioctl()
1246 *fmt = devices[index].dest_fmt; in v4l2_ioctl()
1256 devices[index].dest_fmt.fmt.pix.pixelformat; in v4l2_ioctl()
1258 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1259 devices[index].dev_ops_priv, in v4l2_ioctl()
1266 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1267 devices[index].dev_ops_priv, in v4l2_ioctl()
1276 if (v4l2_pix_fmt_compat(&devices[index].src_fmt, &src_fmt)) { in v4l2_ioctl()
1278 &devices[index].dest_fmt); in v4l2_ioctl()
1283 devices[index].src_fmt = src_fmt; in v4l2_ioctl()
1284 devices[index].dest_fmt = src_fmt; in v4l2_ioctl()
1285 v4l2_set_src_and_dest_format(index, &devices[index].src_fmt, in v4l2_ioctl()
1286 &devices[index].dest_fmt); in v4l2_ioctl()
1317 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1318 devices[index].dev_ops_priv, in v4l2_ioctl()
1324 devices[index].no_frames = MIN(req->count, V4L2_MAX_NO_FRAMES); in v4l2_ioctl()
1325 devices[index].flags &= ~V4L2_BUFFERS_REQUESTED_BY_READ; in v4l2_ioctl()
1332 if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) { in v4l2_ioctl()
1340 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1341 devices[index].dev_ops_priv, in v4l2_ioctl()
1351 if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) { in v4l2_ioctl()
1364 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1365 devices[index].dev_ops_priv, in v4l2_ioctl()
1375 if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) { in v4l2_ioctl()
1382 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_ioctl()
1383 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1384 devices[index].dev_ops_priv, in v4l2_ioctl()
1386 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_ioctl()
1403 devices[index].convert_mmap_frame_size); in v4l2_ioctl()
1415 if (devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) { in v4l2_ioctl()
1432 if ((devices[index].flags & V4L2_SUPPORTS_TIMEPERFRAME) && in v4l2_ioctl()
1440 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1441 devices[index].dev_ops_priv, in v4l2_ioctl()
1451 result = devices[index].dev_ops->ioctl( in v4l2_ioctl()
1452 devices[index].dev_ops_priv, in v4l2_ioctl()
1458 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_ioctl()
1471 struct v4l2_format dest_fmt = devices[index].dest_fmt; in v4l2_adjust_src_fmt_to_fps()
1472 struct v4l2_format orig_src_fmt = devices[index].src_fmt; in v4l2_adjust_src_fmt_to_fps()
1473 struct v4l2_format orig_dest_fmt = devices[index].dest_fmt; in v4l2_adjust_src_fmt_to_fps()
1476 if (fps == devices[index].fps) in v4l2_adjust_src_fmt_to_fps()
1482 v4lconvert_set_fps(devices[index].convert, fps); in v4l2_adjust_src_fmt_to_fps()
1483 r = v4lconvert_try_format(devices[index].convert, &dest_fmt, &src_fmt); in v4l2_adjust_src_fmt_to_fps()
1484 v4lconvert_set_fps(devices[index].convert, V4L2_DEFAULT_FPS); in v4l2_adjust_src_fmt_to_fps()
1493 if (devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_adjust_src_fmt_to_fps()
1494 devices[index].fd, VIDIOC_S_FMT, &src_fmt)) in v4l2_adjust_src_fmt_to_fps()
1521 if (devices[index].dev_ops->ioctl(devices[index].dev_ops_priv, in v4l2_adjust_src_fmt_to_fps()
1522 devices[index].fd, VIDIOC_S_FMT, &src_fmt)) { in v4l2_adjust_src_fmt_to_fps()
1543 if (!devices[index].dev_ops->read) { in v4l2_read()
1548 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_read()
1552 if (devices[index].convert == NULL || in v4l2_read()
1553 ((devices[index].flags & V4L2_SUPPORTS_READ) && in v4l2_read()
1555 result = devices[index].dev_ops->read( in v4l2_read()
1556 devices[index].dev_ops_priv, in v4l2_read()
1569 if (!(devices[index].flags & V4L2_STREAM_CONTROLLED_BY_READ) && in v4l2_read()
1570 !(devices[index].flags & V4L2_USE_READ_FOR_READ)) { in v4l2_read()
1574 devices[index].flags |= V4L2_USE_READ_FOR_READ; in v4l2_read()
1576 devices[index].first_frame = V4L2_IGNORE_FIRST_FRAME_ERRORS; in v4l2_read()
1580 if (devices[index].flags & V4L2_USE_READ_FOR_READ) { in v4l2_read()
1595 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_read()
1608 if (!devices[index].dev_ops->write) { in v4l2_write()
1613 return devices[index].dev_ops->write( in v4l2_write()
1614 devices[index].dev_ops_priv, fd, buffer, n); in v4l2_write()
1628 start || length != devices[index].convert_mmap_frame_size || in v4l2_mmap()
1642 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_mmap()
1645 if (buffer_index >= devices[index].no_frames || in v4l2_mmap()
1659 devices[index].frame_map_count[buffer_index]++; in v4l2_mmap()
1661 result = devices[index].convert_mmap_buf + in v4l2_mmap()
1662 buffer_index * devices[index].convert_mmap_frame_size; in v4l2_mmap()
1668 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_mmap()
1682 if (devices[index].fd != -1 && in v4l2_munmap()
1683 devices[index].convert_mmap_buf != MAP_FAILED && in v4l2_munmap()
1684 length == devices[index].convert_mmap_frame_size && in v4l2_munmap()
1685 start >= devices[index].convert_mmap_buf && in v4l2_munmap()
1686 (start - devices[index].convert_mmap_buf) % length == 0) in v4l2_munmap()
1692 pthread_mutex_lock(&devices[index].stream_lock); in v4l2_munmap()
1694 buffer_index = (start - devices[index].convert_mmap_buf) / length; in v4l2_munmap()
1697 if (devices[index].convert_mmap_buf != MAP_FAILED && in v4l2_munmap()
1698 length == devices[index].convert_mmap_frame_size && in v4l2_munmap()
1699 start >= devices[index].convert_mmap_buf && in v4l2_munmap()
1700 (start - devices[index].convert_mmap_buf) % length == 0 && in v4l2_munmap()
1701 buffer_index < devices[index].no_frames) { in v4l2_munmap()
1702 if (devices[index].frame_map_count[buffer_index] > 0) in v4l2_munmap()
1703 devices[index].frame_map_count[buffer_index]--; in v4l2_munmap()
1707 pthread_mutex_unlock(&devices[index].stream_lock); in v4l2_munmap()
1729 if (index == -1 || devices[index].convert == NULL) { in v4l2_set_control()
1735 result = v4lconvert_vidioc_queryctrl(devices[index].convert, &qctrl); in v4l2_set_control()
1747 result = v4lconvert_vidioc_s_ctrl(devices[index].convert, &ctrl); in v4l2_set_control()
1759 if (index == -1 || devices[index].convert == NULL) { in v4l2_get_control()
1765 if (v4lconvert_vidioc_queryctrl(devices[index].convert, &qctrl)) in v4l2_get_control()
1773 if (v4lconvert_vidioc_g_ctrl(devices[index].convert, &ctrl)) in v4l2_get_control()