1 // Copyright 2024 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 use std::collections::BTreeMap;
6 use std::fmt::Debug;
7 use std::marker::PhantomData;
8 use std::os::fd::AsRawFd;
9 use std::path::PathBuf;
10 use std::sync::Arc;
11
12 use nix::sys::stat::fstat;
13 use thiserror::Error;
14 use v4l2r::bindings::v4l2_streamparm;
15 use v4l2r::controls::codec::VideoBitrate;
16 use v4l2r::controls::codec::VideoBitrateMode;
17 use v4l2r::controls::codec::VideoConstantQuality;
18 use v4l2r::controls::codec::VideoForceKeyFrame;
19 use v4l2r::controls::codec::VideoHeaderMode;
20 use v4l2r::controls::ExtControlTrait;
21 use v4l2r::controls::SafeExtControl;
22 use v4l2r::device::poller::DeviceEvent;
23 use v4l2r::device::poller::PollError;
24 use v4l2r::device::poller::Poller;
25 use v4l2r::device::queue::direction::Capture;
26 use v4l2r::device::queue::direction::Output;
27 use v4l2r::device::queue::dqbuf::DqBuffer;
28 use v4l2r::device::queue::qbuf::QBuffer;
29 use v4l2r::device::queue::BuffersAllocated;
30 use v4l2r::device::queue::CreateQueueError;
31 use v4l2r::device::queue::GetFreeBufferError;
32 use v4l2r::device::queue::GetFreeCaptureBuffer;
33 use v4l2r::device::queue::GetFreeOutputBuffer;
34 use v4l2r::device::queue::OutputQueueable;
35 use v4l2r::device::queue::OutputQueueableProvider;
36 use v4l2r::device::queue::Queue;
37 use v4l2r::device::queue::RequestBuffersError;
38 use v4l2r::device::AllocatedQueue;
39 use v4l2r::device::Device;
40 use v4l2r::device::DeviceConfig;
41 use v4l2r::device::Stream;
42 use v4l2r::device::TryDequeue;
43 use v4l2r::ioctl;
44 use v4l2r::ioctl::BufferFlags;
45 use v4l2r::ioctl::EncoderCommand;
46 use v4l2r::ioctl::StreamOnError;
47 use v4l2r::ioctl::V4l2BufferFromError;
48 use v4l2r::memory::BufferHandles;
49 use v4l2r::memory::DmaBufHandle;
50 use v4l2r::memory::MmapHandle;
51 use v4l2r::memory::PlaneHandle;
52 use v4l2r::memory::PrimitiveBufferHandles;
53 use v4l2r::memory::UserPtrHandle;
54 use v4l2r::nix::errno::Errno;
55 use v4l2r::nix::sys::time::TimeVal;
56 use v4l2r::Format;
57 use v4l2r::PixelFormat;
58 use v4l2r::QueueDirection;
59 use v4l2r::QueueType;
60
61 use crate::encoder::stateful::BackendOutput;
62 use crate::encoder::stateful::BackendRequest;
63 use crate::encoder::stateful::BackendRequestId;
64 use crate::encoder::stateful::StatefulBackendError;
65 use crate::encoder::stateful::StatefulBackendResult;
66 use crate::encoder::stateful::StatefulVideoEncoderBackend;
67 use crate::encoder::CodedBitstreamBuffer;
68 use crate::encoder::EncodeError;
69 use crate::encoder::FrameMetadata;
70 use crate::encoder::RateControl;
71 use crate::encoder::Tunings;
72 use crate::utils::DmabufFrame;
73 use crate::utils::UserPtrFrame;
74 use crate::video_frame::V4l2VideoFrame;
75 use crate::video_frame::VideoFrame;
76 use crate::Fourcc;
77 use crate::FrameLayout;
78 use crate::Resolution;
79
80 #[derive(Debug, Error)]
81 pub enum UnsupportedError {
82 #[error("frame upscaling")]
83 FrameUpscaling,
84
85 #[error("buffer lacking TIMESTAMP_COPY flag")]
86 NoTimestampCopyFlag,
87
88 #[error("unsupported profile")]
89 Profile,
90 }
91
92 #[derive(Debug, Error)]
93 pub enum InitializationError {
94 #[error(transparent)]
95 Unsupported(UnsupportedError),
96
97 #[error("failed to create a CAPTURE queue: {0:?}")]
98 CaptureQueueCreate(CreateQueueError),
99
100 #[error("failed to create a OUTPUT queue: {0:?}")]
101 OutputQueueCreate(CreateQueueError),
102
103 #[error("failed to set format for CAPTURE: {0:?}")]
104 SetFormatCapture(ioctl::SFmtError),
105
106 #[error("failed to set format for OUTPUT: {0:?}")]
107 SetFormatOutput(ioctl::SFmtError),
108
109 #[error("failed to request CAPTURE buffers: {0:?}")]
110 RequestBufferCatpure(RequestBuffersError),
111
112 #[error("failed to request OUTPUT buffers: {0:?}")]
113 RequestBufferOutput(RequestBuffersError),
114
115 #[error("failed to stream on CAPTURE: {0:?}")]
116 StreamOnCapture(StreamOnError),
117
118 #[error("failed to stream on OUTPUT: {0:?}")]
119 StreamOnOutput(StreamOnError),
120
121 #[error(transparent)]
122 EncoderStart(#[from] ioctl::EncoderCmdError),
123
124 #[error(transparent)]
125 CreatePoller(v4l2r::nix::Error),
126
127 #[error(transparent)]
128 SetSelection(ioctl::SSelectionError),
129
130 #[error(transparent)]
131 Contro(#[from] ControlError),
132 }
133
134 #[derive(Debug, Error)]
135 pub struct ControlError {
136 which: &'static str,
137 error: Errno,
138 }
139
140 impl std::fmt::Display for ControlError {
fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result141 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
142 f.write_fmt(format_args!("failed to set '{}': {:?}", self.which, self.error))
143 }
144 }
145
146 #[derive(Debug, Error)]
147 pub enum BackendError {
148 #[error(transparent)]
149 Unsupported(UnsupportedError),
150
151 #[error(transparent)]
152 GetFreeBufferError(#[from] GetFreeBufferError),
153
154 #[error(transparent)]
155 QueueBitstreamBuffer(anyhow::Error),
156
157 #[error(transparent)]
158 MapBitstreamBuffer(anyhow::Error),
159
160 #[error(transparent)]
161 QueueFrameHandleError(anyhow::Error),
162
163 #[error(transparent)]
164 DequeueBuffer(#[from] ioctl::DqBufError<V4l2BufferFromError>),
165
166 #[error("failed to map capture buffer: {0:?}")]
167 FailedToMapCapture(Timestamp),
168
169 #[error(transparent)]
170 DrainCommand(#[from] ioctl::EncoderCmdError),
171
172 #[error(transparent)]
173 Poll(#[from] PollError),
174
175 #[error(transparent)]
176 GetFormat(#[from] ioctl::GFmtError),
177
178 #[error(transparent)]
179 Control(#[from] ControlError),
180 }
181
182 pub type BackendResult<T> = std::result::Result<T, BackendError>;
183
184 impl From<BackendError> for StatefulBackendError {
from(value: BackendError) -> Self185 fn from(value: BackendError) -> Self {
186 StatefulBackendError::Other(anyhow::anyhow!(value))
187 }
188 }
189
190 impl From<BackendError> for EncodeError {
from(value: BackendError) -> Self191 fn from(value: BackendError) -> Self {
192 EncodeError::StatefulBackendError(value.into())
193 }
194 }
195
196 /// Frame timestamp helper struct
197 #[repr(transparent)]
198 #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
199 pub struct Timestamp(pub u64);
200
201 impl From<v4l2r::bindings::timeval> for Timestamp {
from(value: v4l2r::bindings::timeval) -> Self202 fn from(value: v4l2r::bindings::timeval) -> Self {
203 let timestamp = value.tv_sec.wrapping_mul(1_000_000);
204 let timestamp = timestamp.wrapping_add(value.tv_usec);
205 Timestamp(timestamp.max(0) as u64)
206 }
207 }
208
209 impl From<&Timestamp> for TimeVal {
from(value: &Timestamp) -> Self210 fn from(value: &Timestamp) -> Self {
211 let tv_sec = (value.0 / 1_000_000).min(i64::MAX as u64);
212 let tv_usec = (value.0 % 1_000_000).min(i64::MAX as u64);
213 Self::new(tv_sec as i64, tv_usec as i64)
214 }
215 }
216
217 pub type OutputBuffer<'a, P> =
218 <Queue<Output, BuffersAllocated<P>> as OutputQueueableProvider<'a, P>>::Queueable;
219
220 /// Encoder input frame handle, that can be queued to OUTPUT queue.
221 pub trait OutputBufferHandle {
222 type PrimitiveBufferHandles: PrimitiveBufferHandles;
223
queue(self, buffer: OutputBuffer<'_, Self::PrimitiveBufferHandles>) -> anyhow::Result<()>224 fn queue(self, buffer: OutputBuffer<'_, Self::PrimitiveBufferHandles>) -> anyhow::Result<()>;
225 }
226
227 pub trait AlwaysEntireBufferUsed {}
228
229 impl AlwaysEntireBufferUsed for UserPtrFrame {}
230
231 impl AlwaysEntireBufferUsed for DmabufFrame {}
232
233 impl<V: VideoFrame> AlwaysEntireBufferUsed for V4l2VideoFrame<V> {}
234
235 impl<T> OutputBufferHandle for T
236 where
237 T: PrimitiveBufferHandles + AlwaysEntireBufferUsed,
238 {
239 type PrimitiveBufferHandles = Self;
240
queue(self, buffer: OutputBuffer<'_, Self>) -> anyhow::Result<()>241 fn queue(self, buffer: OutputBuffer<'_, Self>) -> anyhow::Result<()> {
242 let mut bytes_used = Vec::new();
243 for i in 0..self.len() {
244 let mut plane = v4l2r::bindings::v4l2_plane::default();
245 self.fill_v4l2_plane(i, &mut plane);
246 bytes_used.push(plane.length as usize);
247 }
248
249 log::trace!("Queueing buffer bytes_used={bytes_used:?}");
250 buffer.queue_with_handles(self, &bytes_used).unwrap();
251 Ok(())
252 }
253 }
254
255 impl BufferHandles for UserPtrFrame {
256 type SupportedMemoryType = v4l2r::memory::MemoryType;
257
fill_v4l2_plane(&self, index: usize, plane: &mut v4l2r::bindings::v4l2_plane)258 fn fill_v4l2_plane(&self, index: usize, plane: &mut v4l2r::bindings::v4l2_plane) {
259 let plane_layout = &self.layout.planes[index];
260
261 plane.m.userptr = self.buffers[plane_layout.buffer_index] as _;
262 plane.data_offset = plane_layout.offset as _;
263 plane.length = self.mem_layout.size() as _;
264 }
265
len(&self) -> usize266 fn len(&self) -> usize {
267 self.layout.planes.len()
268 }
269 }
270
271 impl PrimitiveBufferHandles for UserPtrFrame {
272 type HandleType = UserPtrHandle<[u8; 0]>;
273 const MEMORY_TYPE: Self::SupportedMemoryType = v4l2r::memory::MemoryType::UserPtr;
274 }
275
276 // SAFETY: Access to the frame is read only
277 unsafe impl Send for UserPtrFrame {}
278
279 // SAFETY: Access to the frame is read only
280 unsafe impl Sync for UserPtrFrame {}
281
282 impl BufferHandles for DmabufFrame {
283 type SupportedMemoryType = v4l2r::memory::MemoryType;
284
fill_v4l2_plane(&self, index: usize, plane: &mut v4l2r::bindings::v4l2_plane)285 fn fill_v4l2_plane(&self, index: usize, plane: &mut v4l2r::bindings::v4l2_plane) {
286 let plane_layout = &self.layout.planes[index];
287 let fd = &self.fds[plane_layout.buffer_index];
288
289 plane.m.fd = fd.as_raw_fd();
290 plane.data_offset = plane_layout.offset as u32;
291 plane.length = fstat(fd.as_raw_fd()).map(|stat| stat.st_size as u32).unwrap_or(0);
292
293 if plane.length == 0 {
294 log::warn!("Failed to fstat proper plane size index={index}");
295 }
296 }
297
len(&self) -> usize298 fn len(&self) -> usize {
299 self.layout.planes.len()
300 }
301 }
302
303 impl PrimitiveBufferHandles for DmabufFrame {
304 type HandleType = DmaBufHandle<std::fs::File>;
305 const MEMORY_TYPE: Self::SupportedMemoryType = v4l2r::memory::MemoryType::DmaBuf;
306 }
307
308 /// Encoder's coded specific trait enabling setting codec specific tunings
309 pub trait EncoderCodec {
310 /// Set's [`Tunings`] for the [`v4l2r::device::Device`]
apply_tunings(device: &Device, tunings: &Tunings) -> Result<(), ControlError>311 fn apply_tunings(device: &Device, tunings: &Tunings) -> Result<(), ControlError>;
312 }
313
314 /// Trait responsible for CAPTURE buffers of the encoder's [`V4L2Backend`]. Enable custom logic of
315 /// CAPTURE specific for device/client use case. Useful especially when MMAP buffer type is not
316 /// supported for CAPTURE queue. In such scenario the client may choose to implement this function
317 /// and use own logic for allocating DMABUF or USERPTR.
318 pub trait CaptureBuffers {
319 /// [`PlaneHandle`] that is going to be used for CAPTURE buffers.
320 type PlaneHandle: PlaneHandle;
321
322 /// Queues the buffer with [`CaptureBuffers::PlaneHandle`]s and returns true,
323 /// otherwise if the buffer may not be queue returns false.
queue( &mut self, buffer: QBuffer< Capture, Vec<Self::PlaneHandle>, Vec<Self::PlaneHandle>, &Queue<Capture, BuffersAllocated<Vec<Self::PlaneHandle>>>, >, ) -> anyhow::Result<bool>324 fn queue(
325 &mut self,
326 buffer: QBuffer<
327 Capture,
328 Vec<Self::PlaneHandle>,
329 Vec<Self::PlaneHandle>,
330 &Queue<Capture, BuffersAllocated<Vec<Self::PlaneHandle>>>,
331 >,
332 ) -> anyhow::Result<bool>;
333
334 /// Maps the the buffer and returns its contents in form of [`Vec<u8>`]
export(&self, buffer: DqBuffer<Capture, Vec<Self::PlaneHandle>>) -> anyhow::Result<Vec<u8>>335 fn export(&self, buffer: DqBuffer<Capture, Vec<Self::PlaneHandle>>) -> anyhow::Result<Vec<u8>>;
336 }
337
338 /// [`CaptureBuffers`] implementation for MMAP memory type
339 pub struct MmapingCapture;
340
341 impl CaptureBuffers for MmapingCapture {
342 type PlaneHandle = MmapHandle;
343
queue( &mut self, buffer: QBuffer< Capture, Vec<Self::PlaneHandle>, Vec<Self::PlaneHandle>, &Queue<Capture, BuffersAllocated<Vec<Self::PlaneHandle>>>, >, ) -> anyhow::Result<bool>344 fn queue(
345 &mut self,
346 buffer: QBuffer<
347 Capture,
348 Vec<Self::PlaneHandle>,
349 Vec<Self::PlaneHandle>,
350 &Queue<Capture, BuffersAllocated<Vec<Self::PlaneHandle>>>,
351 >,
352 ) -> anyhow::Result<bool> {
353 buffer.queue()?;
354 Ok(true)
355 }
356
export(&self, buffer: DqBuffer<Capture, Vec<Self::PlaneHandle>>) -> anyhow::Result<Vec<u8>>357 fn export(&self, buffer: DqBuffer<Capture, Vec<Self::PlaneHandle>>) -> anyhow::Result<Vec<u8>> {
358 let timestamp = Timestamp::from(buffer.data.timestamp());
359 let Some(mapping) = buffer.get_plane_mapping(0) else {
360 log::error!("CAPTURE: Failed to map buffer timestamp={timestamp:?}");
361 return Err(BackendError::FailedToMapCapture(timestamp).into());
362 };
363
364 let bytesused = *buffer.data.get_first_plane().bytesused as usize;
365
366 Ok(Vec::from(&mapping.data[..bytesused]))
367 }
368 }
369
370 /// V4L2 stateful encoder implementation
371 pub struct V4L2Backend<Handle, CaptureBufferz, Codec>
372 where
373 Handle: OutputBufferHandle,
374 CaptureBufferz: CaptureBuffers,
375 Self: EncoderCodec,
376 {
377 /// V4L2 encoder device
378 device: Arc<Device>,
379
380 /// OUTPUT_MPLANE V4L2 queue
381 output_queue: Queue<Output, BuffersAllocated<Handle::PrimitiveBufferHandles>>,
382
383 /// CAPTURE_MPLANE V4L2 queue
384 capture_queue: Queue<Capture, BuffersAllocated<Vec<CaptureBufferz::PlaneHandle>>>,
385
386 /// [`CaptureBuffers`] implementation
387 capture_buffers: CaptureBufferz,
388
389 /// Buffers that are currently processed by the encoder device
390 currently_processed: BTreeMap<Timestamp, (BackendRequestId, FrameMetadata)>,
391
392 /// Currently set [`Tunings`] used to detected tunings change
393 current_tunings: Tunings,
394
395 /// Device poller for implementing [`StatefulVideoEncoderBackend::sync`]
396 poller: Poller,
397
398 _phantom: PhantomData<(Handle, Codec)>,
399 }
400
401 impl<Handle, CaptureBufferz, Codec> V4L2Backend<Handle, CaptureBufferz, Codec>
402 where
403 Handle: OutputBufferHandle,
404 CaptureBufferz: CaptureBuffers,
405 Self: EncoderCodec,
406 {
407 /// Checks if the device has the given control and sets it to desired value if it's diffrent
apply_ctrl<C>( device: &Device, name: &'static str, value: C, ) -> Result<(), ControlError> where C: ExtControlTrait<PAYLOAD = i32> + Into<i32>,408 pub(crate) fn apply_ctrl<C>(
409 device: &Device,
410 name: &'static str,
411 value: C,
412 ) -> Result<(), ControlError>
413 where
414 C: ExtControlTrait<PAYLOAD = i32> + Into<i32>,
415 {
416 let mut current = SafeExtControl::<C>::from_value(0);
417
418 log::trace!("Trying to set control {name}");
419 match ioctl::g_ext_ctrls(device, ioctl::CtrlWhich::Current, &mut current) {
420 Ok(()) => (),
421 Err(ioctl::ExtControlError {
422 error_idx: _,
423 error: ioctl::ExtControlErrorType::IoctlError(Errno::EINVAL),
424 }) => {
425 log::debug!("Setting/getting {name} control is not supported for this device");
426 return Ok(());
427 }
428 Err(ioctl::ExtControlError {
429 error_idx: _,
430 error: ioctl::ExtControlErrorType::IoctlError(error),
431 }) => {
432 log::error!("Getting {name} control returned {:?}", error.desc());
433 return Err(ControlError { which: name, error });
434 }
435 };
436
437 let desired: i32 = value.into();
438 if current.value() == desired {
439 log::debug!("Control {name} already has desired value");
440 }
441
442 let mut value = SafeExtControl::<C>::from_value(desired);
443
444 match ioctl::s_ext_ctrls(device, ioctl::CtrlWhich::Current, &mut value) {
445 Ok(()) => (),
446 Err(ioctl::ExtControlError {
447 error_idx: _,
448 error: ioctl::ExtControlErrorType::IoctlError(Errno::EINVAL),
449 }) => {
450 log::debug!("Setting/getting {name} control is not supported for this device");
451 return Ok(());
452 }
453 Err(ioctl::ExtControlError {
454 error_idx: _,
455 error: ioctl::ExtControlErrorType::IoctlError(error),
456 }) => return Err(ControlError { which: name, error }),
457 };
458
459 let value = value.value();
460
461 if value != desired {
462 // TODO: raise error?
463 log::warn!("Failed to set desired {name} (to: {desired}, is: {value})",);
464 } else {
465 log::trace!("Control {name} set correctly to {value}");
466 }
467
468 Ok(())
469 }
470
471 /// Sets the frame rate using S_PARM ioctl for the queue type on the device.
apply_parm(device: &Device, queue_type: QueueType, framerate: u32)472 pub(crate) fn apply_parm(device: &Device, queue_type: QueueType, framerate: u32) {
473 let mut parm = v4l2_streamparm { type_: queue_type as u32, ..Default::default() };
474
475 let (num, denum) = if framerate != 0 { (1, framerate) } else { (0, 1) };
476
477 if matches!(queue_type, v4l2r::QueueType::VideoOutputMplane) {
478 parm.parm.output.capability = 0;
479 parm.parm.output.outputmode = 0;
480 parm.parm.output.timeperframe.numerator = num;
481 parm.parm.output.timeperframe.denominator = denum;
482 } else {
483 parm.parm.capture.capability = 0;
484 parm.parm.capture.timeperframe.numerator = num;
485 parm.parm.capture.timeperframe.denominator = denum;
486 }
487
488 match v4l2r::ioctl::s_parm::<_, v4l2_streamparm>(device, parm) {
489 Ok(parm) => match QueueType::n(parm.type_).as_ref().map(QueueType::direction) {
490 // SAFETY: The type is set to output
491 Some(QueueDirection::Output) => unsafe {
492 log::debug!(
493 "OUTPUT: Time per frame set to {}/{}",
494 parm.parm.output.timeperframe.numerator,
495 parm.parm.output.timeperframe.denominator,
496 );
497 },
498 // SAFETY: The type is set to capture
499 Some(QueueDirection::Capture) => unsafe {
500 log::debug!(
501 "CAPTURE: Time per frame set to {}/{}",
502 parm.parm.capture.timeperframe.numerator,
503 parm.parm.capture.timeperframe.denominator,
504 );
505 },
506 _ => {}
507 },
508 Err(errno) => log::warn!("{:?}: Failed to set parm: {errno:?}", queue_type.direction()),
509 }
510 }
511
512 /// Sets the rate mode and bitrate params on the device.
apply_rate_control( device: &Device, framerate: u32, rate_control: &RateControl, ) -> Result<(), ControlError>513 fn apply_rate_control(
514 device: &Device,
515 framerate: u32,
516 rate_control: &RateControl,
517 ) -> Result<(), ControlError> {
518 Self::apply_parm(device, QueueType::VideoOutputMplane, framerate);
519 Self::apply_parm(device, QueueType::VideoCaptureMplane, 1000);
520
521 Self::apply_ctrl(
522 device,
523 "bitrate mode",
524 match rate_control {
525 RateControl::ConstantBitrate(_) => VideoBitrateMode::ConstantBitrate,
526 RateControl::ConstantQuality(_) => VideoBitrateMode::ConstantQuality,
527 },
528 )?;
529
530 if let Some(bitrate) = rate_control.bitrate_target() {
531 Self::apply_ctrl(device, "bitrate", VideoBitrate(bitrate as i32))?;
532 }
533
534 if let RateControl::ConstantQuality(qp) = rate_control {
535 Self::apply_ctrl(device, "constant quality", VideoConstantQuality(*qp as i32))?;
536 }
537
538 Ok(())
539 }
540
541 /// Sets the crop.
apply_selection( device: &Device, visible_size: Resolution, ) -> Result<(), ioctl::SSelectionError>542 pub fn apply_selection(
543 device: &Device,
544 visible_size: Resolution,
545 ) -> Result<(), ioctl::SSelectionError> {
546 let rect =
547 v4l2r::Rect { left: 0, top: 0, width: visible_size.width, height: visible_size.height };
548
549 log::trace!(
550 "Trying to apply to selection to (left: {}, top: {}, width: {}, height: {})",
551 rect.left,
552 rect.top,
553 rect.width,
554 rect.height
555 );
556
557 let rect = ioctl::s_selection::<_, v4l2r::Rect>(
558 device,
559 ioctl::SelectionType::Output,
560 ioctl::SelectionTarget::Crop,
561 rect,
562 ioctl::SelectionFlags::empty(),
563 )?;
564
565 if rect.left == 0
566 && rect.top == 0
567 && rect.width == visible_size.width
568 && rect.height == visible_size.height
569 {
570 log::trace!("Selection set successfully");
571 } else {
572 log::warn!(
573 "Driver set selection to (left: {}, top: {}, width: {}, height: {})",
574 rect.left,
575 rect.top,
576 rect.width,
577 rect.height
578 );
579 }
580
581 Ok(())
582 }
583
584 /// Creates and sets up the backend instance using the given configuration
create( device: Arc<Device>, capture_buffers: CaptureBufferz, fourcc: Fourcc, coded_size: Resolution, visible_size: Resolution, capture_pixfmt: v4l2r::PixelFormat, tunings: Tunings, ) -> Result<Self, InitializationError>585 pub fn create(
586 device: Arc<Device>,
587 capture_buffers: CaptureBufferz,
588 fourcc: Fourcc,
589 coded_size: Resolution,
590 visible_size: Resolution,
591 capture_pixfmt: v4l2r::PixelFormat,
592 tunings: Tunings,
593 ) -> Result<Self, InitializationError> {
594 let mut capture_queue = Queue::get_capture_mplane_queue(device.clone())
595 .map_err(InitializationError::CaptureQueueCreate)?;
596
597 let mut output_queue = Queue::get_output_mplane_queue(device.clone())
598 .map_err(InitializationError::OutputQueueCreate)?;
599
600 // Coded buffer size multiplier. It's inteded to give head room for the encoder.
601 const CODED_SIZE_MUL: u32 = 2;
602
603 // Default coded buffer size if bitrate control is not used.
604 const DEFAULT_CODED_SIZE: u32 = 1_500_000;
605
606 let coded_buffer_size = tunings
607 .rate_control
608 .bitrate_target()
609 .map(|e| e as u32 * CODED_SIZE_MUL)
610 .unwrap_or(DEFAULT_CODED_SIZE);
611
612 let capture_format = Format {
613 width: coded_size.width,
614 height: coded_size.height,
615 pixelformat: capture_pixfmt,
616 plane_fmt: vec![v4l2r::PlaneLayout { sizeimage: coded_buffer_size, bytesperline: 0 }],
617 };
618
619 let capture_format = capture_queue
620 .set_format(capture_format)
621 .map_err(InitializationError::SetFormatCapture)?;
622
623 // TODO: Map single planar formats to mutli planar format if single planar is not
624 // supported.
625 let output_pixfmt: PixelFormat = fourcc.0.into();
626
627 let output_format = Format {
628 width: coded_size.width,
629 height: coded_size.height,
630 pixelformat: output_pixfmt,
631 // Let the driver pick
632 plane_fmt: vec![],
633 };
634
635 let output_format =
636 output_queue.set_format(output_format).map_err(InitializationError::SetFormatOutput)?;
637
638 log::debug!("CAPTURE queue format = {capture_format:#?}");
639 log::debug!("OUTPUT queue format = {output_format:#?}");
640
641 Self::apply_rate_control(&device, tunings.framerate, &tunings.rate_control)?;
642 Self::apply_tunings(&device, &tunings)?;
643
644 Self::apply_ctrl(&device, "header mode", VideoHeaderMode::JoinedWith1stFrame)?;
645
646 if visible_size.width > output_format.width || visible_size.height > output_format.height {
647 return Err(InitializationError::Unsupported(UnsupportedError::FrameUpscaling));
648 } else if visible_size.width != output_format.width
649 || visible_size.height != output_format.height
650 {
651 log::info!("The frame visible size is not aligned to coded size, applying selection");
652 if let Err(err) = Self::apply_selection(&device, visible_size) {
653 log::error!("Failed to set selection: {err:?}");
654 }
655 }
656
657 log::debug!("CAPTURE: Requesting buffers");
658 let capture_queue = capture_queue
659 .request_buffers::<_>(16)
660 .map_err(InitializationError::RequestBufferOutput)?;
661
662 log::debug!("OUTPUT: Requesting buffers");
663 let output_queue = output_queue
664 .request_buffers::<Handle::PrimitiveBufferHandles>(16)
665 .map_err(InitializationError::RequestBufferOutput)?;
666
667 log::debug!("CAPTURE: Invoking stream on");
668 capture_queue.stream_on().map_err(InitializationError::StreamOnCapture)?;
669
670 log::debug!("OUTPUT: Invoking stream on");
671 output_queue.stream_on().map_err(InitializationError::StreamOnOutput)?;
672
673 log::debug!("Sending start command to encoder");
674 ioctl::encoder_cmd::<_, ()>(&device, &EncoderCommand::Start)
675 .map_err(InitializationError::EncoderStart)?;
676
677 let mut poller = Poller::new(device.clone()).map_err(InitializationError::CreatePoller)?;
678
679 poller
680 .enable_event(DeviceEvent::CaptureReady)
681 .map_err(InitializationError::CreatePoller)?;
682
683 Ok(Self {
684 device,
685 output_queue,
686 capture_queue,
687 capture_buffers,
688 currently_processed: Default::default(),
689 current_tunings: tunings,
690 poller,
691 _phantom: Default::default(),
692 })
693 }
694
output_format<T: TryFrom<v4l2r::bindings::v4l2_format>>(&self) -> BackendResult<T>695 pub fn output_format<T: TryFrom<v4l2r::bindings::v4l2_format>>(&self) -> BackendResult<T> {
696 Ok(self.output_queue.get_format()?)
697 }
698
poll_device(&mut self) -> BackendResult<()>699 fn poll_device(&mut self) -> BackendResult<()> {
700 self.poller.poll(None)?;
701
702 Ok(())
703 }
704
705 /// Attempts to queue all free CAPTURE buffer for filling with encoded bitstream
queue_capture(&mut self) -> BackendResult<()>706 fn queue_capture(&mut self) -> BackendResult<()> {
707 while self.capture_queue.num_free_buffers() != 0 {
708 let buffer = self.capture_queue.try_get_free_buffer()?;
709 let buffer_index = buffer.index();
710
711 let queued =
712 self.capture_buffers.queue(buffer).map_err(BackendError::QueueBitstreamBuffer)?;
713
714 if !queued {
715 log::warn!("CAPTURE: Capture buffer was queued. Will retry later");
716 break;
717 }
718
719 log::trace!("CAPTURE: Queued new buffer index={}", buffer_index);
720 }
721
722 Ok(())
723 }
724
725 /// Tries to dequeue a CAPTURE buffer and transforms the buffer contents into [`BackendOutput`]
dequeue_capture(&mut self) -> BackendResult<Option<BackendOutput>>726 fn dequeue_capture(&mut self) -> BackendResult<Option<BackendOutput>> {
727 if self.capture_queue.num_queued_buffers() == 0 {
728 // Don't dequeue if there is nothing to dequeue
729 log::warn!("Polled while no buffer was queued on CAPTURE queue");
730 return Ok(None);
731 }
732
733 let buffer = match self.capture_queue.try_dequeue() {
734 Ok(buffer) => buffer,
735 Err(ioctl::DqBufError::IoctlError(
736 err @ ioctl::DqBufIoctlError::NotReady | err @ ioctl::DqBufIoctlError::Eos,
737 )) => {
738 log::trace!("Dequeue result: {err:?}");
739 return Ok(None);
740 }
741 Err(err) => return Err(err.into()),
742 };
743
744 let timestamp = Timestamp::from(buffer.data.timestamp());
745 log::debug!(
746 "CAPTRUE: Dequeued buffer index={} timestamp={:?} is_last={} bytesused={}, flags={:?}",
747 buffer.data.index(),
748 timestamp,
749 buffer.data.is_last(),
750 *buffer.data.get_first_plane().bytesused,
751 buffer.data.flags(),
752 );
753
754 if *buffer.data.get_first_plane().bytesused == 0 {
755 // Don't warn about empty lasty buffer
756 if !buffer.data.is_last() {
757 log::warn!("CAPTURE: Dequeued empty buffer. Skipping it.");
758 }
759 return Ok(None);
760 }
761
762 if !buffer.data.flags().intersects(BufferFlags::TIMESTAMP_COPY) {
763 log::error!("CAPTURE: Buffer does not have TIMESTAMP_COPY flag");
764 return Err(BackendError::Unsupported(UnsupportedError::NoTimestampCopyFlag));
765 }
766
767 let Some((request_id, meta)) = self.currently_processed.remove(×tamp) else {
768 log::error!("CAPTURE: Failed to find buffer timestamp={timestamp:?}");
769 return Err(BackendError::FailedToMapCapture(timestamp));
770 };
771
772 let bitstream =
773 self.capture_buffers.export(buffer).map_err(BackendError::MapBitstreamBuffer)?;
774
775 let output =
776 BackendOutput { request_id, buffer: CodedBitstreamBuffer::new(meta, bitstream) };
777
778 Ok(Some(output))
779 }
780
781 /// Dequeues all processed OUTPUT buffers and drops them
drain_output_queue(&mut self) -> BackendResult<()>782 fn drain_output_queue(&mut self) -> BackendResult<()> {
783 // Don't dequeue if there is nothing to dequeue
784 while self.output_queue.num_queued_buffers() != 0 {
785 match self.output_queue.try_dequeue() {
786 Ok(buffer) => {
787 log::debug!(
788 "OUTPUT: Dequeued buffer index={} timestamp={:?}",
789 buffer.data.index(),
790 Timestamp::from(buffer.data.timestamp())
791 );
792 // Drop the finished buffer
793 drop(buffer);
794 }
795 Err(ioctl::DqBufError::IoctlError(ioctl::DqBufIoctlError::NotReady)) => break,
796 Err(ioctl::DqBufError::IoctlError(ioctl::DqBufIoctlError::Eos)) => {}
797 Err(err) => return Err(err.into()),
798 }
799 }
800
801 Ok(())
802 }
803
804 /// Takes the [`BackendRequest`] and queues it to OUTPUT queue
handle_request(&mut self, request: BackendRequest<Handle>) -> BackendResult<()>805 fn handle_request(&mut self, request: BackendRequest<Handle>) -> BackendResult<()> {
806 if self.current_tunings != request.tunings {
807 log::debug!("Changing tunings to {:#?}", request.tunings);
808 Self::apply_rate_control(
809 &self.device,
810 request.tunings.framerate,
811 &request.tunings.rate_control,
812 )?;
813 Self::apply_tunings(&self.device, &request.tunings)?;
814 self.current_tunings = request.tunings;
815 }
816
817 let buffer = self.output_queue.try_get_free_buffer()?;
818
819 let timestamp = Timestamp(request.meta.timestamp);
820 let buffer = buffer.set_timestamp(TimeVal::from(×tamp));
821
822 let index = buffer.index();
823
824 if request.meta.force_keyframe {
825 let mut force = SafeExtControl::<VideoForceKeyFrame>::from_value(1);
826 ioctl::s_ext_ctrls(&self.device, ioctl::CtrlWhich::Current, &mut force).map_err(
827 |error| ControlError { which: "force keyframe", error: error.error.into() },
828 )?;
829 }
830
831 request.handle.queue(buffer).map_err(BackendError::QueueFrameHandleError)?;
832
833 log::debug!("OUTPUT: Queued buffer index={} timestamp={:?}", index, timestamp);
834
835 // TODO: Use RequestId for this?
836 self.currently_processed.insert(timestamp, (request.request_id, request.meta));
837
838 Ok(())
839 }
840
841 /// Performs the essential processing ie. queues and dequeues the buffers from CAPTURE and
842 /// OUTPUT queue.
handle_buffers(&mut self) -> BackendResult<()>843 fn handle_buffers(&mut self) -> BackendResult<()> {
844 self.queue_capture()?;
845 self.drain_output_queue()?;
846
847 log::debug!(
848 "Queue status: OUTPUT(free: {}, queued: {}) CAPTURE(free: {}, queued: {})",
849 self.output_queue.num_free_buffers(),
850 self.output_queue.num_queued_buffers(),
851 self.capture_queue.num_free_buffers(),
852 self.capture_queue.num_queued_buffers(),
853 );
854
855 Ok(())
856 }
857 }
858
859 impl<Handle, CaptureBufferz, Codec> StatefulVideoEncoderBackend<Handle>
860 for V4L2Backend<Handle, CaptureBufferz, Codec>
861 where
862 Handle: OutputBufferHandle,
863 CaptureBufferz: CaptureBuffers,
864 Self: EncoderCodec,
865 {
consume_request( &mut self, request: &mut Option<BackendRequest<Handle>>, ) -> StatefulBackendResult<()>866 fn consume_request(
867 &mut self,
868 request: &mut Option<BackendRequest<Handle>>,
869 ) -> StatefulBackendResult<()> {
870 self.handle_buffers()?;
871
872 if self.output_queue.num_free_buffers() == 0 {
873 return Ok(());
874 }
875
876 let Some(request) = request.take() else {
877 log::error!("StatefulEncoder passed an empty request");
878 return Err(StatefulBackendError::InvalidInternalState);
879 };
880
881 self.handle_request(request)?;
882
883 Ok(())
884 }
885
sync(&mut self) -> StatefulBackendResult<()>886 fn sync(&mut self) -> StatefulBackendResult<()> {
887 self.poll_device()?;
888 Ok(())
889 }
890
poll(&mut self) -> StatefulBackendResult<Option<BackendOutput>>891 fn poll(&mut self) -> StatefulBackendResult<Option<BackendOutput>> {
892 Ok(self.dequeue_capture()?)
893 }
894
drain(&mut self) -> StatefulBackendResult<Vec<BackendOutput>>895 fn drain(&mut self) -> StatefulBackendResult<Vec<BackendOutput>> {
896 if self.currently_processed.is_empty() {
897 log::info!("Skipping drain sequence, nothing to drain.");
898 return Ok(Vec::new());
899 }
900
901 log::debug!(
902 "Sending stop command to encoder. Currently processing count: {}",
903 self.currently_processed.len()
904 );
905
906 ioctl::encoder_cmd::<_, ()>(&self.device, &EncoderCommand::Stop(false))
907 .map_err(BackendError::DrainCommand)?;
908
909 let mut drained_output = Vec::new();
910 while !self.currently_processed.is_empty() {
911 self.poll_device()?;
912 self.handle_buffers()?;
913
914 if let Some(output) = self.dequeue_capture()? {
915 drained_output.push(output);
916 }
917 }
918
919 // Dequeue is_last=true buffer
920 if let Some(output) = self.dequeue_capture()? {
921 drained_output.push(output);
922 }
923
924 log::debug!("Sending start command to encoder");
925 ioctl::encoder_cmd::<_, ()>(&self.device, &EncoderCommand::Start)
926 .map_err(BackendError::DrainCommand)?;
927
928 log::debug!("Drain finished");
929 Ok(drained_output)
930 }
931 }
932
find_device_with_capture(pixfmt: v4l2r::PixelFormat) -> Option<PathBuf>933 pub fn find_device_with_capture(pixfmt: v4l2r::PixelFormat) -> Option<PathBuf> {
934 const MAX_DEVICE_NO: usize = 128;
935 for dev_no in 0..MAX_DEVICE_NO {
936 let device_path = PathBuf::from(format!("/dev/video{dev_no}"));
937 let Ok(device) = Device::open(&device_path, DeviceConfig::new()) else {
938 continue;
939 };
940
941 let device = Arc::new(device);
942
943 let Ok(queue) = Queue::get_capture_mplane_queue(device) else {
944 continue;
945 };
946
947 for fmt in queue.format_iter() {
948 if fmt.pixelformat == pixfmt {
949 return Some(device_path);
950 }
951 }
952 }
953
954 None
955 }
956
v4l2_format_to_frame_layout(format: &v4l2r::Format) -> FrameLayout957 pub fn v4l2_format_to_frame_layout(format: &v4l2r::Format) -> FrameLayout {
958 let mut layout = FrameLayout {
959 format: (Fourcc::from(format.pixelformat.to_u32()), 0),
960 size: Resolution { width: format.width, height: format.height },
961 planes: format
962 .plane_fmt
963 .iter()
964 .map(|plane| crate::PlaneLayout {
965 buffer_index: 0,
966 offset: 0,
967 stride: plane.bytesperline as usize,
968 })
969 .collect(),
970 };
971
972 // Patch FrameLayout
973 match &format.pixelformat.to_fourcc() {
974 b"NM12" if layout.planes.len() == 2 => {
975 layout.planes[1].buffer_index = 1;
976 }
977 b"NV12" if layout.planes.len() == 1 => {}
978 _ => panic!("Unknown format"),
979 };
980
981 layout
982 }
983
984 #[cfg(test)]
985 pub(crate) mod tests {
986 use std::os::fd::AsFd;
987 use std::os::fd::BorrowedFd;
988 use std::os::fd::OwnedFd;
989 use std::path::Path;
990 use std::path::PathBuf;
991
992 use anyhow::Context;
993 use v4l2r::device::queue::CaptureQueueable;
994 use v4l2r::device::DeviceConfig;
995 use v4l2r::memory::DmaBufSource;
996
997 use super::*;
998
999 use crate::backend::v4l2::encoder::CaptureBuffers;
1000 use crate::encoder::simple_encode_loop;
1001 use crate::encoder::stateful::StatefulEncoder;
1002 use crate::encoder::tests::fill_test_frame_nm12;
1003 use crate::encoder::tests::fill_test_frame_nv12;
1004 use crate::encoder::tests::get_test_frame_t;
1005
1006 /// A simple wrapper for a GBM device node.
1007 pub struct GbmDevice(std::fs::File);
1008
1009 impl AsFd for GbmDevice {
as_fd(&self) -> BorrowedFd<'_>1010 fn as_fd(&self) -> BorrowedFd<'_> {
1011 self.0.as_fd()
1012 }
1013 }
1014
1015 impl drm::Device for GbmDevice {}
1016
1017 /// Simple helper methods for opening a `Card`.
1018 impl GbmDevice {
open<P: AsRef<Path>>(path: P) -> std::io::Result<Self>1019 pub fn open<P: AsRef<Path>>(path: P) -> std::io::Result<Self> {
1020 std::fs::OpenOptions::new().read(true).write(true).open(path).map(GbmDevice)
1021 }
1022 }
1023
1024 pub struct BoCaptureBuffer {
1025 bo: gbm::BufferObject<()>,
1026 fd: OwnedFd,
1027 len: u64,
1028 }
1029
1030 impl AsRawFd for BoCaptureBuffer {
as_raw_fd(&self) -> std::os::unix::prelude::RawFd1031 fn as_raw_fd(&self) -> std::os::unix::prelude::RawFd {
1032 self.fd.as_raw_fd()
1033 }
1034 }
1035
1036 impl AsFd for BoCaptureBuffer {
as_fd(&self) -> BorrowedFd<'_>1037 fn as_fd(&self) -> BorrowedFd<'_> {
1038 self.fd.as_fd()
1039 }
1040 }
1041
1042 impl DmaBufSource for BoCaptureBuffer {
len(&self) -> u641043 fn len(&self) -> u64 {
1044 self.len
1045 }
1046 }
1047
1048 impl std::fmt::Debug for BoCaptureBuffer {
fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result1049 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
1050 f.debug_struct("BoCaptureBuffer").finish()
1051 }
1052 }
1053
1054 unsafe impl Sync for BoCaptureBuffer {}
1055
1056 unsafe impl Send for BoCaptureBuffer {}
1057
1058 // SAFETY: copied from gbm.h
1059 pub const GBM_BO_USE_SW_READ_OFTEN: gbm::BufferObjectFlags =
1060 unsafe { gbm::BufferObjectFlags::from_bits_truncate(1 << 9) };
1061
1062 // SAFETY: copied from gbm.h
1063 pub const GBM_BO_USE_HW_VIDEO_ENCODER: gbm::BufferObjectFlags =
1064 unsafe { gbm::BufferObjectFlags::from_bits_truncate(1 << 14) };
1065
1066 pub struct BoPoolAllocator {
1067 gbm: Arc<gbm::Device<GbmDevice>>,
1068 }
1069
1070 impl BoPoolAllocator {
new(gbm: Arc<gbm::Device<GbmDevice>>) -> Self1071 pub fn new(gbm: Arc<gbm::Device<GbmDevice>>) -> Self {
1072 Self { gbm }
1073 }
1074 }
1075
1076 impl CaptureBuffers for BoPoolAllocator {
1077 type PlaneHandle = DmaBufHandle<BoCaptureBuffer>;
1078
queue( &mut self, buffer: QBuffer< Capture, Vec<Self::PlaneHandle>, Vec<Self::PlaneHandle>, &Queue<Capture, BuffersAllocated<Vec<Self::PlaneHandle>>>, >, ) -> anyhow::Result<bool>1079 fn queue(
1080 &mut self,
1081 buffer: QBuffer<
1082 Capture,
1083 Vec<Self::PlaneHandle>,
1084 Vec<Self::PlaneHandle>,
1085 &Queue<Capture, BuffersAllocated<Vec<Self::PlaneHandle>>>,
1086 >,
1087 ) -> anyhow::Result<bool> {
1088 let len = 2 * 1024 * 1024;
1089
1090 log::trace!("Allocating new bo");
1091 let bo = self
1092 .gbm
1093 .create_buffer_object::<()>(
1094 len as u32,
1095 1,
1096 gbm::Format::R8,
1097 GBM_BO_USE_HW_VIDEO_ENCODER | GBM_BO_USE_SW_READ_OFTEN,
1098 )
1099 .context("gbm_bo_create")?;
1100
1101 let fd = bo.fd_for_plane(0).unwrap();
1102 let handle = BoCaptureBuffer { bo, fd, len };
1103
1104 buffer
1105 .queue_with_handles(vec![DmaBufHandle(handle)])
1106 .context("queue bo as dmabuf handle")?;
1107
1108 Ok(true)
1109 }
1110
export( &self, mut buffer: DqBuffer<Capture, Vec<Self::PlaneHandle>>, ) -> anyhow::Result<Vec<u8>>1111 fn export(
1112 &self,
1113 mut buffer: DqBuffer<Capture, Vec<Self::PlaneHandle>>,
1114 ) -> anyhow::Result<Vec<u8>> {
1115 let timestamp = Timestamp::from(buffer.data.timestamp());
1116
1117 let Some(mut handle) = buffer.take_handles() else {
1118 log::error!("CAPTURE: Failed to map buffer timestamp={timestamp:?}");
1119 return Err(BackendError::FailedToMapCapture(timestamp).into());
1120 };
1121
1122 let Some(handle) = handle.pop() else {
1123 log::error!("CAPTURE: Failed to map buffer timestamp={timestamp:?}");
1124 return Err(BackendError::FailedToMapCapture(timestamp).into());
1125 };
1126
1127 let bytesused = *buffer.data.get_first_plane().bytesused;
1128
1129 let mut content = Vec::with_capacity(bytesused as usize);
1130
1131 handle.0.bo.map(&self.gbm, 0, 0, bytesused, 1, |mapped| {
1132 content.extend(mapped.buffer());
1133 })??;
1134
1135 Ok(content)
1136 }
1137 }
1138
1139 pub struct TestMmapFrame {
1140 meta: FrameMetadata,
1141 frame_count: u64,
1142 }
1143
1144 impl OutputBufferHandle for TestMmapFrame {
1145 type PrimitiveBufferHandles = Vec<MmapHandle>;
1146
queue( self, buffer: OutputBuffer<'_, Self::PrimitiveBufferHandles>, ) -> anyhow::Result<()>1147 fn queue(
1148 self,
1149 buffer: OutputBuffer<'_, Self::PrimitiveBufferHandles>,
1150 ) -> anyhow::Result<()> {
1151 if self.meta.layout.format == (Fourcc::from(b"NM12"), 0) {
1152 let mut y_plane = buffer.get_plane_mapping(0).unwrap();
1153 let mut uv_plane = buffer.get_plane_mapping(1).unwrap();
1154
1155 fill_test_frame_nm12(
1156 self.meta.layout.size.width as usize,
1157 self.meta.layout.size.height as usize,
1158 [self.meta.layout.planes[0].stride, self.meta.layout.planes[1].stride],
1159 get_test_frame_t(self.meta.timestamp, self.frame_count),
1160 y_plane.as_mut(),
1161 uv_plane.as_mut(),
1162 );
1163
1164 buffer.queue(&[y_plane.len(), uv_plane.len()])?;
1165 } else if self.meta.layout.format == (Fourcc::from(b"NV12"), 0) {
1166 let mut plane = buffer.get_plane_mapping(0).unwrap();
1167
1168 let strides =
1169 [self.meta.layout.planes[0].stride, self.meta.layout.planes[0].stride];
1170 let offsets = [
1171 self.meta.layout.planes[0].offset,
1172 self.meta.layout.planes[0].stride * self.meta.layout.size.height as usize,
1173 ];
1174
1175 fill_test_frame_nv12(
1176 self.meta.layout.size.width as usize,
1177 self.meta.layout.size.height as usize,
1178 strides,
1179 offsets,
1180 get_test_frame_t(self.meta.timestamp, self.frame_count),
1181 plane.as_mut(),
1182 );
1183
1184 buffer.queue(&[plane.len()])?;
1185 } else {
1186 return Err(anyhow::anyhow!("unsupported format"));
1187 }
1188
1189 Ok(())
1190 }
1191 }
1192
1193 /// Helper struct. Procedurally generate NV12 or NM12 frames for test purposes.
1194 pub struct TestMmapFrameGenerator {
1195 counter: u64,
1196 max_count: u64,
1197 frame_layout: FrameLayout,
1198 }
1199
1200 impl TestMmapFrameGenerator {
new(max_count: u64, frame_layout: FrameLayout) -> Self1201 pub fn new(max_count: u64, frame_layout: FrameLayout) -> Self {
1202 Self { counter: 0, max_count, frame_layout }
1203 }
1204 }
1205
1206 impl Iterator for TestMmapFrameGenerator {
1207 type Item = (FrameMetadata, TestMmapFrame);
1208
next(&mut self) -> Option<Self::Item>1209 fn next(&mut self) -> Option<Self::Item> {
1210 if self.counter > self.max_count {
1211 return None;
1212 }
1213
1214 self.counter += 1;
1215
1216 let meta = FrameMetadata {
1217 timestamp: self.counter,
1218 layout: self.frame_layout.clone(),
1219 force_keyframe: false,
1220 };
1221
1222 let handle = TestMmapFrame { meta: meta.clone(), frame_count: self.max_count };
1223
1224 Some((meta, handle))
1225 }
1226 }
1227
perform_v4l2_encoder_mmap_test<Codec>( frame_count: u64, mut encoder: StatefulEncoder< TestMmapFrame, V4L2Backend<TestMmapFrame, MmapingCapture, Codec>, >, coded_consumer: impl FnMut(CodedBitstreamBuffer), ) where V4L2Backend<TestMmapFrame, MmapingCapture, Codec>: EncoderCodec,1228 pub fn perform_v4l2_encoder_mmap_test<Codec>(
1229 frame_count: u64,
1230 mut encoder: StatefulEncoder<
1231 TestMmapFrame,
1232 V4L2Backend<TestMmapFrame, MmapingCapture, Codec>,
1233 >,
1234 coded_consumer: impl FnMut(CodedBitstreamBuffer),
1235 ) where
1236 V4L2Backend<TestMmapFrame, MmapingCapture, Codec>: EncoderCodec,
1237 {
1238 let format: v4l2r::Format = encoder.backend().output_format().unwrap();
1239 let layout = v4l2_format_to_frame_layout(&format);
1240 let mut frame_producer = TestMmapFrameGenerator::new(frame_count, layout);
1241
1242 simple_encode_loop(&mut encoder, &mut frame_producer, coded_consumer).expect("encode loop");
1243 }
1244
1245 /// Helper struct. Procedurally generate NV12 or NM12 frames for test purposes.
1246 pub struct TestDmabufFrameGenerator {
1247 counter: u64,
1248 max_count: u64,
1249 coded_size: Resolution,
1250 visible_size: Resolution,
1251 gbm: Arc<gbm::Device<GbmDevice>>,
1252 }
1253
1254 impl TestDmabufFrameGenerator {
new( max_count: u64, coded_size: Resolution, visible_size: Resolution, gbm: Arc<gbm::Device<GbmDevice>>, ) -> Self1255 pub fn new(
1256 max_count: u64,
1257 coded_size: Resolution,
1258 visible_size: Resolution,
1259 gbm: Arc<gbm::Device<GbmDevice>>,
1260 ) -> Self {
1261 Self { counter: 0, max_count, coded_size, visible_size, gbm }
1262 }
1263 }
1264
1265 impl Iterator for TestDmabufFrameGenerator {
1266 type Item = (FrameMetadata, DmabufFrame);
1267
next(&mut self) -> Option<Self::Item>1268 fn next(&mut self) -> Option<Self::Item> {
1269 if self.counter > self.max_count {
1270 return None;
1271 }
1272
1273 self.counter += 1;
1274
1275 let bo = self
1276 .gbm
1277 .create_buffer_object::<()>(
1278 self.coded_size.width,
1279 self.coded_size.height,
1280 gbm::Format::Nv12,
1281 GBM_BO_USE_HW_VIDEO_ENCODER,
1282 )
1283 .expect("create bo");
1284
1285 let plane_count = bo.plane_count().unwrap() as i32;
1286 let fourcc = bo.format().unwrap();
1287
1288 if plane_count > 2 {
1289 // NOTE: NV12 should be at most 2 plane.
1290 panic!("Unsupported plane count for bo");
1291 }
1292
1293 let mut fds: Vec<OwnedFd> = Vec::new();
1294 let mut inodes: Vec<u64> = Vec::new();
1295 let mut planes = Vec::new();
1296
1297 for plane in 0..(bo.plane_count().unwrap() as i32) {
1298 let fd = bo.fd_for_plane(plane).unwrap();
1299 let stat = fstat(fd.as_raw_fd()).unwrap();
1300 let offset = bo.offset(plane as _).unwrap() as usize;
1301 let stride = bo.stride_for_plane(plane as _).unwrap() as usize;
1302 let buffer_index;
1303
1304 // Deduplicate fds
1305 if let Some((index, _)) =
1306 inodes.iter().enumerate().find(|(_, s)| **s == stat.st_ino)
1307 {
1308 buffer_index = index;
1309 } else {
1310 buffer_index = fds.len();
1311 fds.push(fd);
1312 inodes.push(stat.st_ino);
1313 }
1314
1315 planes.push(crate::PlaneLayout { buffer_index, offset, stride })
1316 }
1317
1318 let layout = FrameLayout {
1319 format: (Fourcc::from(fourcc as u32), 0),
1320 size: self.visible_size,
1321 planes,
1322 };
1323 dbg!(&layout);
1324
1325 let meta = FrameMetadata {
1326 timestamp: self.counter,
1327 layout: layout.clone(),
1328 force_keyframe: false,
1329 };
1330
1331 let frame = DmabufFrame { fds, layout };
1332
1333 Some((meta, frame))
1334 }
1335 }
1336
perform_v4l2_encoder_dmabuf_test<Codec>( coded_size: Resolution, visible_size: Resolution, frame_count: u64, gbm: Arc<gbm::Device<GbmDevice>>, mut encoder: StatefulEncoder<DmabufFrame, V4L2Backend<DmabufFrame, BoPoolAllocator, Codec>>, coded_consumer: impl FnMut(CodedBitstreamBuffer), ) where V4L2Backend<DmabufFrame, BoPoolAllocator, Codec>: EncoderCodec,1337 pub fn perform_v4l2_encoder_dmabuf_test<Codec>(
1338 coded_size: Resolution,
1339 visible_size: Resolution,
1340 frame_count: u64,
1341 gbm: Arc<gbm::Device<GbmDevice>>,
1342 mut encoder: StatefulEncoder<DmabufFrame, V4L2Backend<DmabufFrame, BoPoolAllocator, Codec>>,
1343 coded_consumer: impl FnMut(CodedBitstreamBuffer),
1344 ) where
1345 V4L2Backend<DmabufFrame, BoPoolAllocator, Codec>: EncoderCodec,
1346 {
1347 let format: v4l2r::Format = encoder.backend().output_format().unwrap();
1348
1349 let mut frame_producer =
1350 TestDmabufFrameGenerator::new(frame_count, coded_size, visible_size, gbm).map(
1351 |(meta, mut frame)| {
1352 if frame.layout.format.0 == Fourcc::from(b"NV12")
1353 && frame.layout.planes.len() == 2
1354 && format.pixelformat == PixelFormat::from_fourcc(b"NV12")
1355 && format.plane_fmt.len() == 1
1356 {
1357 // Remove last NV12 plane when GBM advertises 2 plaens and V4L2 expects a
1358 // single frame.
1359 frame.layout.planes.pop();
1360 }
1361
1362 (meta, frame)
1363 },
1364 );
1365
1366 simple_encode_loop(&mut encoder, &mut frame_producer, coded_consumer).expect("encode loop");
1367 }
1368 }
1369