• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2024 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use std::ops::Deref;
6 use std::os::fd::BorrowedFd;
7 
8 use v4l2r::bindings;
9 use v4l2r::ioctl::BufferCapabilities;
10 use v4l2r::ioctl::BufferField;
11 use v4l2r::ioctl::BufferFlags;
12 use v4l2r::ioctl::DecoderCmd;
13 use v4l2r::ioctl::EventType;
14 use v4l2r::ioctl::SelectionTarget;
15 use v4l2r::ioctl::SelectionType;
16 use v4l2r::ioctl::SrcChanges;
17 use v4l2r::ioctl::V4l2Buffer;
18 use v4l2r::ioctl::V4l2MplaneFormat;
19 use v4l2r::ioctl::V4l2PlanesWithBacking;
20 use v4l2r::ioctl::V4l2PlanesWithBackingMut;
21 use v4l2r::memory::MemoryType;
22 use v4l2r::Colorspace;
23 use v4l2r::Quantization;
24 use v4l2r::QueueClass;
25 use v4l2r::QueueDirection;
26 use v4l2r::QueueType;
27 use v4l2r::XferFunc;
28 use v4l2r::YCbCrEncoding;
29 
30 use crate::io::ReadFromDescriptorChain;
31 use crate::io::WriteToDescriptorChain;
32 use crate::ioctl::virtio_media_dispatch_ioctl;
33 use crate::ioctl::IoctlResult;
34 use crate::ioctl::VirtioMediaIoctlHandler;
35 use crate::mmap::MmapMappingManager;
36 use crate::DequeueBufferEvent;
37 use crate::SessionEvent;
38 use crate::SgEntry;
39 use crate::V4l2Event;
40 use crate::V4l2Ioctl;
41 use crate::VirtioMediaDevice;
42 use crate::VirtioMediaDeviceSession;
43 use crate::VirtioMediaEventQueue;
44 use crate::VirtioMediaHostMemoryMapper;
45 use crate::VIRTIO_MEDIA_MMAP_FLAG_RW;
46 
47 /// Backing MMAP memory for `VirtioVideoMediaDecoderBuffer`.
48 pub trait VideoDecoderBufferBacking {
new(queue: QueueType, index: u32, sizes: &[usize]) -> IoctlResult<Self> where Self: Sized49     fn new(queue: QueueType, index: u32, sizes: &[usize]) -> IoctlResult<Self>
50     where
51         Self: Sized;
52 
fd_for_plane(&self, plane_idx: usize) -> Option<BorrowedFd>53     fn fd_for_plane(&self, plane_idx: usize) -> Option<BorrowedFd>;
54 }
55 
56 pub struct VideoDecoderBuffer<S: VideoDecoderBufferBacking> {
57     v4l2_buffer: V4l2Buffer,
58 
59     /// Backend-specific storage.
60     pub backing: S,
61 }
62 
63 impl<S: VideoDecoderBufferBacking> VideoDecoderBuffer<S> {
new( queue: QueueType, index: u32, sizes: &[usize], mmap_offset: u32, ) -> IoctlResult<Self>64     fn new(
65         queue: QueueType,
66         index: u32,
67         sizes: &[usize],
68         // TODO: need as many offsets as there are planes.
69         mmap_offset: u32,
70     ) -> IoctlResult<Self> {
71         let backing = S::new(queue, index, sizes)?;
72 
73         let mut v4l2_buffer = V4l2Buffer::new(queue, index, MemoryType::Mmap);
74         if let V4l2PlanesWithBackingMut::Mmap(mut planes) =
75             v4l2_buffer.planes_with_backing_iter_mut()
76         {
77             // SAFETY: every buffer has at least one plane.
78             let mut plane = planes.next().unwrap();
79             plane.set_mem_offset(mmap_offset);
80             *plane.length = sizes[0] as u32;
81         } else {
82             // SAFETY: we have just set the buffer type to MMAP. Reaching this point means a bug in
83             // the code.
84             panic!()
85         }
86 
87         v4l2_buffer.set_flags(BufferFlags::TIMESTAMP_MONOTONIC);
88         v4l2_buffer.set_field(BufferField::None);
89 
90         Ok(Self {
91             v4l2_buffer,
92             backing,
93         })
94     }
95 
index(&self) -> u3296     pub fn index(&self) -> u32 {
97         self.v4l2_buffer.index()
98     }
99 
timestamp(&self) -> bindings::timeval100     pub fn timestamp(&self) -> bindings::timeval {
101         self.v4l2_buffer.timestamp()
102     }
103 }
104 
105 /// Events reported by the [`VideoDecoderBackendSession::next_event`] method.
106 #[derive(Debug, Clone, PartialEq, Eq)]
107 pub enum VideoDecoderBackendEvent {
108     /// Sent whenever the format of the stream has changed. The new format can be read using
109     /// [`VideoDecoderBackendSession::current_format`].
110     StreamFormatChanged,
111     /// Sent whenever an `OUTPUT` buffer is done processing and can be reused.
112     InputBufferDone(u32),
113     /// Sent whenever a decoded frame is ready on the `CAPTURE` queue.
114     FrameCompleted {
115         buffer_id: u32,
116         timestamp: bindings::timeval,
117         bytes_used: Vec<u32>,
118         is_last: bool,
119     },
120 }
121 
122 /// Description of the current stream parameters, as parsed from the input.
123 #[derive(Clone)]
124 pub struct StreamParams {
125     /// Minimum number of output buffers necessary to decode the stream.
126     pub min_output_buffers: u32,
127     /// Coded size of the stream.
128     pub coded_size: (u32, u32),
129     /// Visible rectangle containing the part of the frame to display.
130     pub visible_rect: v4l2r::Rect,
131 }
132 
133 /// Trait for a video decoding session.
134 pub trait VideoDecoderBackendSession {
135     type BufferStorage: VideoDecoderBufferBacking;
136 
137     /// Decode the encoded stream in `input`, of length `bytes_used`, which corresponds to
138     /// OUTPUT buffer `index`.
139     ///
140     /// `timestamp` is the timestamp of the frame, to be reported in any frame produced from this
141     /// call.
decode( &mut self, input: &Self::BufferStorage, index: u32, timestamp: bindings::timeval, bytes_used: u32, ) -> IoctlResult<()>142     fn decode(
143         &mut self,
144         input: &Self::BufferStorage,
145         index: u32,
146         timestamp: bindings::timeval,
147         bytes_used: u32,
148     ) -> IoctlResult<()>;
149 
150     /// Use `backing` as the backing storage for output buffer `index`.
use_as_output(&mut self, index: u32, backing: &mut Self::BufferStorage) -> IoctlResult<()>151     fn use_as_output(&mut self, index: u32, backing: &mut Self::BufferStorage) -> IoctlResult<()>;
152 
153     /// Start draining the decoder pipeline for all buffers still in it.
154     ///
155     /// The backend will report a frame with the `V4L2_BUF_FLAG_LAST` once the drain
156     /// process is completed.
drain(&mut self) -> IoctlResult<()>157     fn drain(&mut self) -> IoctlResult<()>;
158 
159     /// Remove any output buffer that has been previously added using [`use_as_output`].
clear_output_buffers(&mut self) -> IoctlResult<()>160     fn clear_output_buffers(&mut self) -> IoctlResult<()>;
161 
162     /// Returns the next pending event if there is one, or `None` if there aren't any.
next_event(&mut self) -> Option<VideoDecoderBackendEvent>163     fn next_event(&mut self) -> Option<VideoDecoderBackendEvent>;
164 
165     /// Returns the current format set for the given `direction`, in a form suitable as a reply to
166     /// `VIDIOC_G_FMT`.
current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat167     fn current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat;
168 
169     /// Returns the stream parameters as read from the input.
stream_params(&self) -> StreamParams170     fn stream_params(&self) -> StreamParams;
171 
172     /// Called whenever the decoder device has allocated buffers for a given queue.
173     ///
174     /// This can be useful for some backends that need to know how many buffers they will work
175     /// with. The default implementation does nothing, which should be suitable for backends that
176     /// don't care.
buffers_allocated(&mut self, _direction: QueueDirection, _num_buffers: u32)177     fn buffers_allocated(&mut self, _direction: QueueDirection, _num_buffers: u32) {}
178 
179     /// Returns a file descriptor that signals `POLLIN` whenever an event is pending and can be
180     /// read using [`next_event`], or `None` if the backend does not support this.
poll_fd(&self) -> Option<BorrowedFd>181     fn poll_fd(&self) -> Option<BorrowedFd> {
182         None
183     }
184 
185     /// Optional hook called whenever the streaming state of a queue changes. Some backends may
186     /// need this information to operate properly.
streaming_state(&mut self, _direction: QueueDirection, _streaming: bool)187     fn streaming_state(&mut self, _direction: QueueDirection, _streaming: bool) {}
188 
189     /// Optional hook called by the decoder to signal it has processed a pausing event
190     /// sent by the backend.
191     ///
192     /// Pausing event are currently limited to [`VideoDecoderBackendEvent::StreamFormatChanged`].
193     /// Whenever the resolution changes, the backend must stop processing until the decoder has
194     /// adapted its conditions for decoding to resume (e.g. CAPTURE buffers of the proper size and
195     /// format have been allocated).
resume(&mut self)196     fn resume(&mut self) {}
197 }
198 
199 /// State of a session.
200 #[derive(Debug)]
201 enum VideoDecoderStreamingState {
202     /// Initial state, and state after a `STOP` command or a successful drain. Contains the
203     /// state of both streaming queues.
204     Stopped {
205         input_streaming: bool,
206         output_streaming: bool,
207     },
208     /// State when both queues are streaming.
209     Running,
210     /// State when a `PAUSE` command has been received. Both queues are streaming in this state.
211     Paused,
212 }
213 
214 impl Default for VideoDecoderStreamingState {
default() -> Self215     fn default() -> Self {
216         Self::Stopped {
217             input_streaming: false,
218             output_streaming: false,
219         }
220     }
221 }
222 
223 impl VideoDecoderStreamingState {
input_streamon(&mut self)224     fn input_streamon(&mut self) {
225         match self {
226             Self::Stopped {
227                 ref mut input_streaming,
228                 output_streaming,
229             } if !(*input_streaming) => {
230                 *input_streaming = true;
231                 // If we switch to a state where both queues are streaming, then the device is
232                 // running.
233                 if *output_streaming {
234                     *self = Self::Running;
235                 }
236             }
237             Self::Stopped { .. } | Self::Running | Self::Paused => (),
238         }
239     }
240 
input_streamoff(&mut self)241     fn input_streamoff(&mut self) {
242         match self {
243             Self::Stopped {
244                 ref mut input_streaming,
245                 ..
246             } => *input_streaming = false,
247             Self::Running | Self::Paused => {
248                 *self = Self::Stopped {
249                     input_streaming: false,
250                     output_streaming: true,
251                 }
252             }
253         }
254     }
255 
output_streamon(&mut self)256     fn output_streamon(&mut self) {
257         match self {
258             Self::Stopped {
259                 input_streaming,
260                 ref mut output_streaming,
261             } if !(*output_streaming) => {
262                 *output_streaming = true;
263                 // If we switch to a state where both queues are streaming, then the device is
264                 // running.
265                 if *input_streaming {
266                     *self = Self::Running;
267                 }
268             }
269             Self::Stopped { .. } | Self::Running | Self::Paused => (),
270         }
271     }
272 
output_streamoff(&mut self)273     fn output_streamoff(&mut self) {
274         match self {
275             Self::Stopped {
276                 ref mut output_streaming,
277                 ..
278             } => *output_streaming = false,
279             Self::Running | Self::Paused => {
280                 *self = Self::Stopped {
281                     input_streaming: true,
282                     output_streaming: false,
283                 }
284             }
285         }
286     }
287 
is_output_streaming(&mut self) -> bool288     fn is_output_streaming(&mut self) -> bool {
289         matches!(
290             self,
291             Self::Running
292                 | Self::Stopped {
293                     output_streaming: true,
294                     ..
295                 }
296         )
297     }
298 }
299 
300 /// Management of the crop rectangle.
301 ///
302 /// There are two ways this parameter can be set:
303 ///
304 /// * Manually by the client, by calling `VIDIOC_S_SELECTION` with `V4L2_SEL_TGT_COMPOSE`. This has
305 ///   an effect only before the first resolution change event is emitted, and is the only way to
306 ///   properly set the crop rectangle for codecs/hardware that don't support DRC detection.
307 ///
308 /// * From the information contained in the stream, signaled via a
309 ///   [`VideoDecoderBackendEvent::StreamFormatChanged`] event. Once this event has been emitted, the
310 ///   crop rectangle is fixed and determined by the stream.
311 enum CropRectangle {
312     /// Crop rectangle has not been determined from the stream yet and can be set by the client.
313     Settable(v4l2r::Rect),
314     /// Crop rectangle has been determined from the stream and cannot be modified.
315     FromStream(v4l2r::Rect),
316 }
317 
318 impl Deref for CropRectangle {
319     type Target = v4l2r::Rect;
320 
deref(&self) -> &Self::Target321     fn deref(&self) -> &Self::Target {
322         match self {
323             CropRectangle::Settable(r) => r,
324             CropRectangle::FromStream(r) => r,
325         }
326     }
327 }
328 
329 /// Struct containing validated colorspace information for a format.
330 #[derive(Debug, Clone, Copy)]
331 struct V4l2FormatColorspace {
332     colorspace: Colorspace,
333     xfer_func: XferFunc,
334     ycbcr_enc: YCbCrEncoding,
335     quantization: Quantization,
336 }
337 
338 impl Default for V4l2FormatColorspace {
default() -> Self339     fn default() -> Self {
340         Self {
341             colorspace: Colorspace::Rec709,
342             xfer_func: XferFunc::None,
343             ycbcr_enc: YCbCrEncoding::E709,
344             quantization: Quantization::LimRange,
345         }
346     }
347 }
348 
349 impl V4l2FormatColorspace {
350     /// Apply the colorspace information of this object to `pix_mp`.
apply(self, pix_mp: &mut bindings::v4l2_pix_format_mplane)351     fn apply(self, pix_mp: &mut bindings::v4l2_pix_format_mplane) {
352         pix_mp.colorspace = self.colorspace as u32;
353         pix_mp.__bindgen_anon_1 = bindings::v4l2_pix_format_mplane__bindgen_ty_1 {
354             ycbcr_enc: self.ycbcr_enc as u8,
355         };
356         pix_mp.quantization = self.quantization as u8;
357         pix_mp.xfer_func = self.xfer_func as u8;
358     }
359 }
360 
361 pub struct VideoDecoderSession<S: VideoDecoderBackendSession> {
362     id: u32,
363 
364     state: VideoDecoderStreamingState,
365 
366     input_buffers: Vec<VideoDecoderBuffer<S::BufferStorage>>,
367     output_buffers: Vec<VideoDecoderBuffer<S::BufferStorage>>,
368     /// Indices of CAPTURE buffers that are queued but not send to the backend yet because the
369     /// decoder is not running.
370     pending_output_buffers: Vec<u32>,
371 
372     sequence_cpt: u32,
373 
374     /// Whether the input source change event has been subscribed to by the driver. If `true` then
375     /// the device will emit resolution change events.
376     src_change_subscribed: bool,
377     /// Whether the EOS event has been subscribed to by the driver. If `true` then the device will
378     /// emit EOS events.
379     eos_subscribed: bool,
380 
381     crop_rectangle: CropRectangle,
382 
383     /// Current colorspace information of the format.
384     colorspace: V4l2FormatColorspace,
385 
386     /// Adapter-specific data.
387     backend_session: S,
388 }
389 
390 impl<S: VideoDecoderBackendSession> VirtioMediaDeviceSession for VideoDecoderSession<S> {
poll_fd(&self) -> Option<BorrowedFd>391     fn poll_fd(&self) -> Option<BorrowedFd> {
392         self.backend_session.poll_fd()
393     }
394 }
395 
396 impl<S: VideoDecoderBackendSession> VideoDecoderSession<S> {
397     /// Returns the current format for `direction`.
398     ///
399     /// This is essentially like calling the backend's corresponding
400     /// [`VideoDecoderBackendSession::current_format`] method, but also applies the colorspace
401     /// information potentially set by the user.
current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat402     fn current_format(&self, direction: QueueDirection) -> V4l2MplaneFormat {
403         let format = self.backend_session.current_format(direction);
404 
405         let mut pix_mp =
406             *<V4l2MplaneFormat as AsRef<bindings::v4l2_pix_format_mplane>>::as_ref(&format);
407 
408         self.colorspace.apply(&mut pix_mp);
409 
410         V4l2MplaneFormat::from((direction, pix_mp))
411     }
412 
try_decoder_cmd(&self, cmd: DecoderCmd) -> IoctlResult<DecoderCmd>413     fn try_decoder_cmd(&self, cmd: DecoderCmd) -> IoctlResult<DecoderCmd> {
414         match cmd {
415             DecoderCmd::Stop { .. } => Ok(DecoderCmd::stop()),
416             DecoderCmd::Start { .. } => Ok(DecoderCmd::start()),
417             DecoderCmd::Pause { .. } => {
418                 match &self.state {
419                     // The V4L2 documentation says this should return `EPERM`, but v4l2-compliance
420                     // requires `EINVAL`...
421                     VideoDecoderStreamingState::Stopped { .. } => Err(libc::EINVAL),
422                     VideoDecoderStreamingState::Running | VideoDecoderStreamingState::Paused => {
423                         Ok(DecoderCmd::pause())
424                     }
425                 }
426             }
427             DecoderCmd::Resume => {
428                 match &self.state {
429                     // The V4L2 documentation says this should return `EPERM`, but v4l2-compliance
430                     // requires `EINVAL`...
431                     VideoDecoderStreamingState::Stopped { .. } => Err(libc::EINVAL),
432                     VideoDecoderStreamingState::Paused | VideoDecoderStreamingState::Running => {
433                         Ok(DecoderCmd::resume())
434                     }
435                 }
436             }
437         }
438     }
439 
440     /// Send all the output buffers that are pending to the backend, if the decoder is running.
441     ///
442     /// In the adapter backend, if we receive buffers this means both queues are streaming - IOW we
443     /// can queue them as soon as the condition is good.
444     ///
445     /// In the decoder device, we need to keep them until both queues are streaming. Same applies
446     /// to input buffers BTW.
try_send_pending_output_buffers(&mut self)447     fn try_send_pending_output_buffers(&mut self) {
448         if !self.state.is_output_streaming() {
449             return;
450         }
451 
452         for i in self.pending_output_buffers.drain(..) {
453             let buffer = self.output_buffers.get_mut(i as usize).unwrap();
454             self.backend_session
455                 .use_as_output(buffer.index(), &mut buffer.backing)
456                 .unwrap();
457         }
458     }
459 }
460 
461 /// Trait for actual implementations of video decoding, to be used with [`VideoDecoder`].
462 ///
463 /// [`VideoDecoder`] takes care of (mostly) abstracting V4L2 away ; implementors of this trait are
464 /// the ones that provide the actual video decoding service.
465 pub trait VideoDecoderBackend: Sized {
466     type Session: VideoDecoderBackendSession;
467 
468     /// Create a new session with the provided `id`.
new_session(&mut self, id: u32) -> IoctlResult<Self::Session>469     fn new_session(&mut self, id: u32) -> IoctlResult<Self::Session>;
470     /// Close and destroy `session`.
close_session(&mut self, session: Self::Session)471     fn close_session(&mut self, session: Self::Session);
472 
473     /// Returns the format at `index` for the given queue `direction`, or None if `index` is out of
474     /// bounds.
enum_formats( &self, session: &VideoDecoderSession<Self::Session>, direction: QueueDirection, index: u32, ) -> Option<bindings::v4l2_fmtdesc>475     fn enum_formats(
476         &self,
477         session: &VideoDecoderSession<Self::Session>,
478         direction: QueueDirection,
479         index: u32,
480     ) -> Option<bindings::v4l2_fmtdesc>;
481     /// Returns the supported frame sizes for `pixel_format`, or None if the format is not
482     /// supported.
frame_sizes(&self, pixel_format: u32) -> Option<bindings::v4l2_frmsize_stepwise>483     fn frame_sizes(&self, pixel_format: u32) -> Option<bindings::v4l2_frmsize_stepwise>;
484 
485     /// Adjust `format` to make it applicable to the queue with the given `direction` for the current `session`.
486     ///
487     /// This method doesn't fail, implementations must return the closest acceptable format that
488     /// can be applied unchanged with [`Self::apply_format`].
adjust_format( &self, session: &Self::Session, direction: QueueDirection, format: V4l2MplaneFormat, ) -> V4l2MplaneFormat489     fn adjust_format(
490         &self,
491         session: &Self::Session,
492         direction: QueueDirection,
493         format: V4l2MplaneFormat,
494     ) -> V4l2MplaneFormat;
495 
496     /// Applies `format` to the queue of the given `direction`. The format is adjusted if needed.
apply_format( &self, session: &mut Self::Session, direction: QueueDirection, format: &V4l2MplaneFormat, )497     fn apply_format(
498         &self,
499         session: &mut Self::Session,
500         direction: QueueDirection,
501         format: &V4l2MplaneFormat,
502     );
503 }
504 
505 pub struct VideoDecoder<
506     D: VideoDecoderBackend,
507     Q: VirtioMediaEventQueue,
508     HM: VirtioMediaHostMemoryMapper,
509 > {
510     backend: D,
511     event_queue: Q,
512     host_mapper: MmapMappingManager<HM>,
513 }
514 
515 impl<B, Q, HM> VideoDecoder<B, Q, HM>
516 where
517     B: VideoDecoderBackend,
518     Q: VirtioMediaEventQueue,
519     HM: VirtioMediaHostMemoryMapper,
520 {
new(backend: B, event_queue: Q, host_mapper: HM) -> Self521     pub fn new(backend: B, event_queue: Q, host_mapper: HM) -> Self {
522         Self {
523             backend,
524             event_queue,
525             host_mapper: MmapMappingManager::from(host_mapper),
526         }
527     }
528 
529     /// Validate `format` for `queue` and return the adjusted format.
try_format( &self, session: &VideoDecoderSession<B::Session>, queue: QueueType, format: bindings::v4l2_format, ) -> IoctlResult<V4l2MplaneFormat>530     fn try_format(
531         &self,
532         session: &VideoDecoderSession<B::Session>,
533         queue: QueueType,
534         format: bindings::v4l2_format,
535     ) -> IoctlResult<V4l2MplaneFormat> {
536         if queue.class() != QueueClass::VideoMplane {
537             return Err(libc::EINVAL);
538         }
539 
540         // SAFETY: safe because we have just confirmed the queue type is mplane.
541         let pix_mp = unsafe { format.fmt.pix_mp };
542 
543         // Process the colorspace now so we can restore it after applying the backend adjustment.
544         let colorspace = if queue.direction() == QueueDirection::Output {
545             V4l2FormatColorspace {
546                 colorspace: Colorspace::n(pix_mp.colorspace)
547                     .unwrap_or(session.colorspace.colorspace),
548                 xfer_func: XferFunc::n(pix_mp.xfer_func as u32)
549                     .unwrap_or(session.colorspace.xfer_func),
550                 // TODO: safe because...
551                 ycbcr_enc: YCbCrEncoding::n(unsafe { pix_mp.__bindgen_anon_1.ycbcr_enc as u32 })
552                     .unwrap_or(session.colorspace.ycbcr_enc),
553                 quantization: Quantization::n(pix_mp.quantization as u32)
554                     .unwrap_or(session.colorspace.quantization),
555             }
556         } else {
557             session.colorspace
558         };
559 
560         let format = V4l2MplaneFormat::from((queue.direction(), pix_mp));
561 
562         let format =
563             self.backend
564                 .adjust_format(&session.backend_session, queue.direction(), format);
565 
566         let mut pix_mp =
567             *<V4l2MplaneFormat as AsRef<bindings::v4l2_pix_format_mplane>>::as_ref(&format);
568 
569         colorspace.apply(&mut pix_mp);
570 
571         Ok(V4l2MplaneFormat::from((queue.direction(), pix_mp)))
572     }
573 }
574 
575 impl<B, Q, HM, Reader, Writer> VirtioMediaDevice<Reader, Writer> for VideoDecoder<B, Q, HM>
576 where
577     B: VideoDecoderBackend,
578     Q: VirtioMediaEventQueue,
579     HM: VirtioMediaHostMemoryMapper,
580     Reader: ReadFromDescriptorChain,
581     Writer: WriteToDescriptorChain,
582 {
583     type Session = <Self as VirtioMediaIoctlHandler>::Session;
584 
new_session(&mut self, session_id: u32) -> Result<Self::Session, i32>585     fn new_session(&mut self, session_id: u32) -> Result<Self::Session, i32> {
586         let backend_session = self.backend.new_session(session_id)?;
587 
588         Ok(VideoDecoderSession {
589             id: session_id,
590             backend_session,
591             state: Default::default(),
592             input_buffers: Default::default(),
593             output_buffers: Default::default(),
594             pending_output_buffers: Default::default(),
595             sequence_cpt: 0,
596             src_change_subscribed: false,
597             eos_subscribed: false,
598             crop_rectangle: CropRectangle::Settable(v4l2r::Rect::new(0, 0, 0, 0)),
599             colorspace: Default::default(),
600         })
601     }
602 
close_session(&mut self, session: Self::Session)603     fn close_session(&mut self, session: Self::Session) {
604         // Unregister all MMAP buffers.
605         for buffer in session
606             .input_buffers
607             .iter()
608             .chain(session.output_buffers.iter())
609         {
610             if let V4l2PlanesWithBacking::Mmap(planes) =
611                 buffer.v4l2_buffer.planes_with_backing_iter()
612             {
613                 for plane in planes {
614                     self.host_mapper.unregister_buffer(plane.mem_offset());
615                 }
616             }
617         }
618     }
619 
do_ioctl( &mut self, session: &mut Self::Session, ioctl: V4l2Ioctl, reader: &mut Reader, writer: &mut Writer, ) -> std::io::Result<()>620     fn do_ioctl(
621         &mut self,
622         session: &mut Self::Session,
623         ioctl: V4l2Ioctl,
624         reader: &mut Reader,
625         writer: &mut Writer,
626     ) -> std::io::Result<()> {
627         virtio_media_dispatch_ioctl(self, session, ioctl, reader, writer)
628     }
629 
do_mmap( &mut self, session: &mut Self::Session, flags: u32, offset: u32, ) -> Result<(u64, u64), i32>630     fn do_mmap(
631         &mut self,
632         session: &mut Self::Session,
633         flags: u32,
634         offset: u32,
635     ) -> Result<(u64, u64), i32> {
636         // Search for a MMAP plane with the right offset.
637         // TODO: O(n), not critical but not great either.
638         let (buffer, plane_idx) = session
639             .input_buffers
640             .iter()
641             .chain(session.output_buffers.iter())
642             .filter_map(|b| {
643                 if let V4l2PlanesWithBacking::Mmap(planes) =
644                     b.v4l2_buffer.planes_with_backing_iter()
645                 {
646                     Some(std::iter::repeat(b).zip(planes.enumerate()))
647                 } else {
648                     None
649                 }
650             })
651             .flatten()
652             .find(|(_, (_, p))| p.mem_offset() == offset)
653             .map(|(b, (i, _))| (b, i))
654             .ok_or(libc::EINVAL)?;
655         let rw = (flags & VIRTIO_MEDIA_MMAP_FLAG_RW) != 0;
656 
657         let fd = buffer.backing.fd_for_plane(plane_idx).unwrap();
658 
659         self.host_mapper
660             .create_mapping(offset, fd, rw)
661             .map_err(|e| {
662                 log::error!(
663                     "failed to map MMAP buffer at offset 0x{:x}: {:#}",
664                     offset,
665                     e
666                 );
667                 libc::EINVAL
668             })
669     }
670 
do_munmap(&mut self, guest_addr: u64) -> Result<(), i32>671     fn do_munmap(&mut self, guest_addr: u64) -> Result<(), i32> {
672         self.host_mapper
673             .remove_mapping(guest_addr)
674             .map(|_| ())
675             .map_err(|_| libc::EINVAL)
676     }
677 
process_events(&mut self, session: &mut Self::Session) -> Result<(), i32>678     fn process_events(&mut self, session: &mut Self::Session) -> Result<(), i32> {
679         let has_event = if let Some(event) = session.backend_session.next_event() {
680             match event {
681                 VideoDecoderBackendEvent::InputBufferDone(id) => {
682                     let Some(buffer) = session.input_buffers.get_mut(id as usize) else {
683                         log::error!("no matching OUTPUT buffer with id {} to process event", id);
684                         return Ok(());
685                     };
686 
687                     buffer.v4l2_buffer.clear_flags(BufferFlags::QUEUED);
688 
689                     self.event_queue
690                         .send_event(V4l2Event::DequeueBuffer(DequeueBufferEvent::new(
691                             session.id,
692                             buffer.v4l2_buffer.clone(),
693                         )));
694                 }
695                 VideoDecoderBackendEvent::StreamFormatChanged => {
696                     let stream_params = session.backend_session.stream_params();
697 
698                     // The crop rectangle is now determined by the stream and cannot be changed.
699                     session.crop_rectangle = CropRectangle::FromStream(stream_params.visible_rect);
700 
701                     if session.src_change_subscribed {
702                         self.event_queue
703                             .send_event(V4l2Event::Event(SessionEvent::new(
704                                 session.id,
705                                 bindings::v4l2_event {
706                                     type_: bindings::V4L2_EVENT_SOURCE_CHANGE,
707                                     u: bindings::v4l2_event__bindgen_ty_1 {
708                                         src_change: bindings::v4l2_event_src_change {
709                                             changes: SrcChanges::RESOLUTION.bits(),
710                                         },
711                                     },
712                                     // TODO: fill pending, sequence, and timestamp.
713                                     ..Default::default()
714                                 },
715                             )))
716                     }
717                 }
718                 VideoDecoderBackendEvent::FrameCompleted {
719                     buffer_id,
720                     timestamp,
721                     bytes_used,
722                     is_last,
723                 } => {
724                     let Some(buffer) = session.output_buffers.get_mut(buffer_id as usize) else {
725                         log::error!(
726                             "no matching CAPTURE buffer with id {} to process event",
727                             buffer_id
728                         );
729                         return Ok(());
730                     };
731 
732                     buffer.v4l2_buffer.clear_flags(BufferFlags::QUEUED);
733                     buffer.v4l2_buffer.set_flags(BufferFlags::TIMESTAMP_COPY);
734                     if is_last {
735                         buffer.v4l2_buffer.set_flags(BufferFlags::LAST);
736                     }
737                     buffer.v4l2_buffer.set_sequence(session.sequence_cpt);
738                     session.sequence_cpt += 1;
739                     buffer.v4l2_buffer.set_timestamp(timestamp);
740                     let first_plane = buffer.v4l2_buffer.get_first_plane_mut();
741                     *first_plane.bytesused = bytes_used.first().copied().unwrap_or(0);
742                     self.event_queue
743                         .send_event(V4l2Event::DequeueBuffer(DequeueBufferEvent::new(
744                             session.id,
745                             buffer.v4l2_buffer.clone(),
746                         )));
747 
748                     if is_last && session.eos_subscribed {
749                         self.event_queue
750                             .send_event(V4l2Event::Event(SessionEvent::new(
751                                 session.id,
752                                 bindings::v4l2_event {
753                                     type_: bindings::V4L2_EVENT_EOS,
754                                     ..Default::default()
755                                 },
756                             )))
757                     }
758                 }
759             }
760             true
761         } else {
762             false
763         };
764 
765         if !has_event {
766             log::warn!("process_events called but no event was pending");
767         }
768 
769         Ok(())
770     }
771 }
772 
773 impl<B, Q, HM> VirtioMediaIoctlHandler for VideoDecoder<B, Q, HM>
774 where
775     B: VideoDecoderBackend,
776     Q: VirtioMediaEventQueue,
777     HM: VirtioMediaHostMemoryMapper,
778 {
779     type Session = VideoDecoderSession<B::Session>;
780 
enum_fmt( &mut self, session: &Self::Session, queue: QueueType, index: u32, ) -> IoctlResult<bindings::v4l2_fmtdesc>781     fn enum_fmt(
782         &mut self,
783         session: &Self::Session,
784         queue: QueueType,
785         index: u32,
786     ) -> IoctlResult<bindings::v4l2_fmtdesc> {
787         match queue {
788             QueueType::VideoOutputMplane | QueueType::VideoCaptureMplane => {
789                 self.backend.enum_formats(session, queue.direction(), index)
790             }
791             _ => None,
792         }
793         .ok_or(libc::EINVAL)
794     }
795 
enum_framesizes( &mut self, _session: &Self::Session, index: u32, pixel_format: u32, ) -> IoctlResult<bindings::v4l2_frmsizeenum>796     fn enum_framesizes(
797         &mut self,
798         _session: &Self::Session,
799         index: u32,
800         pixel_format: u32,
801     ) -> IoctlResult<bindings::v4l2_frmsizeenum> {
802         // We only support step-wise frame sizes.
803         if index != 0 {
804             return Err(libc::EINVAL);
805         }
806 
807         Ok(bindings::v4l2_frmsizeenum {
808             index: 0,
809             pixel_format,
810             type_: bindings::v4l2_frmsizetypes_V4L2_FRMSIZE_TYPE_STEPWISE,
811             __bindgen_anon_1: bindings::v4l2_frmsizeenum__bindgen_ty_1 {
812                 stepwise: self.backend.frame_sizes(pixel_format).ok_or(libc::EINVAL)?,
813             },
814             ..Default::default()
815         })
816     }
817 
g_fmt( &mut self, session: &Self::Session, queue: QueueType, ) -> IoctlResult<bindings::v4l2_format>818     fn g_fmt(
819         &mut self,
820         session: &Self::Session,
821         queue: QueueType,
822     ) -> IoctlResult<bindings::v4l2_format> {
823         if !matches!(
824             queue,
825             QueueType::VideoOutputMplane | QueueType::VideoCaptureMplane,
826         ) {
827             return Err(libc::EINVAL);
828         }
829 
830         let format = session.current_format(queue.direction());
831         let v4l2_format: &bindings::v4l2_format = format.as_ref();
832         Ok(*v4l2_format)
833     }
834 
try_fmt( &mut self, session: &Self::Session, queue: QueueType, format: bindings::v4l2_format, ) -> IoctlResult<bindings::v4l2_format>835     fn try_fmt(
836         &mut self,
837         session: &Self::Session,
838         queue: QueueType,
839         format: bindings::v4l2_format,
840     ) -> IoctlResult<bindings::v4l2_format> {
841         let format = self.try_format(session, queue, format)?;
842 
843         let v4l2_format: &bindings::v4l2_format = format.as_ref();
844         Ok(*v4l2_format)
845     }
846 
s_fmt( &mut self, session: &mut Self::Session, queue: QueueType, format: bindings::v4l2_format, ) -> IoctlResult<bindings::v4l2_format>847     fn s_fmt(
848         &mut self,
849         session: &mut Self::Session,
850         queue: QueueType,
851         format: bindings::v4l2_format,
852     ) -> IoctlResult<bindings::v4l2_format> {
853         let format = self.try_format(session, queue, format)?;
854 
855         self.backend
856             .apply_format(&mut session.backend_session, queue.direction(), &format);
857 
858         //  Setting the colorspace information on the `OUTPUT` queue sets it for both queues.
859         if queue.direction() == QueueDirection::Output {
860             session.colorspace.colorspace = format.colorspace();
861             session.colorspace.xfer_func = format.xfer_func();
862             session.colorspace.ycbcr_enc = format.ycbcr_enc();
863             session.colorspace.quantization = format.quantization();
864         }
865 
866         // If the crop rectangle is still settable, adjust it to the size of the new format.
867         if let CropRectangle::Settable(rect) = &mut session.crop_rectangle {
868             let (width, height) = format.size();
869             *rect = v4l2r::Rect::new(0, 0, width, height);
870         }
871 
872         let v4l2_format: &bindings::v4l2_format = format.as_ref();
873         Ok(*v4l2_format)
874     }
875 
reqbufs( &mut self, session: &mut Self::Session, queue: QueueType, memory: MemoryType, count: u32, ) -> IoctlResult<bindings::v4l2_requestbuffers>876     fn reqbufs(
877         &mut self,
878         session: &mut Self::Session,
879         queue: QueueType,
880         memory: MemoryType,
881         count: u32,
882     ) -> IoctlResult<bindings::v4l2_requestbuffers> {
883         if memory != MemoryType::Mmap {
884             return Err(libc::EINVAL);
885         }
886         // TODO: fail if streaming?
887 
888         let (buffers, count) = match queue {
889             QueueType::VideoOutputMplane => (&mut session.input_buffers, count),
890             QueueType::VideoCaptureMplane => (
891                 &mut session.output_buffers,
892                 // TODO: no no, we need to reallocate all the buffers if the queue parameters have
893                 // changed... especially if the new format won't fit into the old buffers!
894                 // count.max(session.backend_session.stream_params().min_output_buffers),
895                 count,
896             ),
897             _ => return Err(libc::EINVAL),
898         };
899 
900         if (count as usize) < buffers.len() {
901             for buffer in &buffers[count as usize..] {
902                 if let V4l2PlanesWithBacking::Mmap(planes) =
903                     buffer.v4l2_buffer.planes_with_backing_iter()
904                 {
905                     for plane in planes {
906                         self.host_mapper.unregister_buffer(plane.mem_offset());
907                     }
908                 }
909             }
910             buffers.truncate(count as usize);
911         } else {
912             let sizeimage = session
913                 .backend_session
914                 .current_format(queue.direction())
915                 .planes()
916                 .first()
917                 .ok_or(libc::EINVAL)?
918                 .sizeimage;
919             let new_buffers = (buffers.len()..count as usize)
920                 .map(|i| {
921                     let mmap_offset = self
922                         .host_mapper
923                         .register_buffer(None, sizeimage)
924                         .map_err(|_| libc::EINVAL)?;
925 
926                     VideoDecoderBuffer::new(
927                         queue,
928                         i as u32,
929                         // TODO: only single-planar formats supported.
930                         &[sizeimage as usize],
931                         mmap_offset,
932                     )
933                     .inspect_err(|_| {
934                         // TODO: no, we need to unregister all the buffers and restore the
935                         // previous state?
936                         self.host_mapper.unregister_buffer(mmap_offset);
937                     })
938                 })
939                 .collect::<IoctlResult<Vec<_>>>()?;
940             buffers.extend(new_buffers);
941         }
942 
943         session
944             .backend_session
945             .buffers_allocated(queue.direction(), count);
946 
947         Ok(bindings::v4l2_requestbuffers {
948             count,
949             type_: queue as u32,
950             memory: memory as u32,
951             capabilities: (BufferCapabilities::SUPPORTS_MMAP
952                 | BufferCapabilities::SUPPORTS_ORPHANED_BUFS)
953                 .bits(),
954             flags: 0,
955             reserved: Default::default(),
956         })
957     }
958 
querybuf( &mut self, session: &Self::Session, queue: QueueType, index: u32, ) -> IoctlResult<V4l2Buffer>959     fn querybuf(
960         &mut self,
961         session: &Self::Session,
962         queue: QueueType,
963         index: u32,
964     ) -> IoctlResult<V4l2Buffer> {
965         let buffers = match queue {
966             QueueType::VideoOutputMplane => &session.input_buffers,
967             QueueType::VideoCaptureMplane => &session.output_buffers,
968             _ => return Err(libc::EINVAL),
969         };
970         let buffer = buffers.get(index as usize).ok_or(libc::EINVAL)?;
971 
972         Ok(buffer.v4l2_buffer.clone())
973     }
974 
subscribe_event( &mut self, session: &mut Self::Session, event: v4l2r::ioctl::EventType, _flags: v4l2r::ioctl::SubscribeEventFlags, ) -> IoctlResult<()>975     fn subscribe_event(
976         &mut self,
977         session: &mut Self::Session,
978         event: v4l2r::ioctl::EventType,
979         _flags: v4l2r::ioctl::SubscribeEventFlags,
980     ) -> IoctlResult<()> {
981         match event {
982             EventType::SourceChange(0) => {
983                 session.src_change_subscribed = true;
984                 Ok(())
985             }
986             EventType::Eos => {
987                 session.eos_subscribed = true;
988                 Ok(())
989             }
990             _ => Err(libc::EINVAL),
991         }
992     }
993 
994     // TODO: parse the event and use an enum value to signal ALL or single event?
unsubscribe_event( &mut self, session: &mut Self::Session, event: bindings::v4l2_event_subscription, ) -> IoctlResult<()>995     fn unsubscribe_event(
996         &mut self,
997         session: &mut Self::Session,
998         event: bindings::v4l2_event_subscription,
999     ) -> IoctlResult<()> {
1000         let mut valid = false;
1001 
1002         if event.type_ == 0 || matches!(EventType::try_from(&event), Ok(EventType::SourceChange(0)))
1003         {
1004             session.src_change_subscribed = false;
1005             valid = true;
1006         }
1007         if event.type_ == 0 || matches!(EventType::try_from(&event), Ok(EventType::Eos)) {
1008             session.eos_subscribed = false;
1009             valid = true;
1010         }
1011 
1012         if valid {
1013             Ok(())
1014         } else {
1015             Err(libc::EINVAL)
1016         }
1017     }
1018 
streamon(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()>1019     fn streamon(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()> {
1020         let buffers = match queue {
1021             QueueType::VideoOutputMplane => &session.input_buffers,
1022             QueueType::VideoCaptureMplane => &session.output_buffers,
1023             _ => return Err(libc::EINVAL),
1024         };
1025 
1026         let already_running = matches!(session.state, VideoDecoderStreamingState::Running);
1027 
1028         // Cannot stream if no buffers allocated.
1029         if buffers.is_empty() {
1030             return Err(libc::EINVAL);
1031         }
1032 
1033         match queue.direction() {
1034             QueueDirection::Output => session.state.input_streamon(),
1035             QueueDirection::Capture => session.state.output_streamon(),
1036         }
1037 
1038         session
1039             .backend_session
1040             .streaming_state(queue.direction(), true);
1041 
1042         if !already_running && matches!(session.state, VideoDecoderStreamingState::Running) {
1043             // TODO: start queueing pending buffers?
1044         }
1045 
1046         session.try_send_pending_output_buffers();
1047 
1048         Ok(())
1049     }
1050 
streamoff(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()>1051     fn streamoff(&mut self, session: &mut Self::Session, queue: QueueType) -> IoctlResult<()> {
1052         let buffers = match queue.direction() {
1053             QueueDirection::Output => {
1054                 // TODO: something to do on the backend?
1055                 session.state.input_streamoff();
1056 
1057                 &mut session.input_buffers
1058             }
1059             QueueDirection::Capture => {
1060                 session.backend_session.clear_output_buffers()?;
1061                 session.state.output_streamoff();
1062                 session.pending_output_buffers.clear();
1063 
1064                 &mut session.output_buffers
1065             }
1066         };
1067 
1068         for buffer in buffers {
1069             buffer.v4l2_buffer.clear_flags(BufferFlags::QUEUED);
1070         }
1071 
1072         session
1073             .backend_session
1074             .streaming_state(queue.direction(), false);
1075 
1076         Ok(())
1077     }
1078 
g_selection( &mut self, session: &Self::Session, sel_type: SelectionType, sel_target: SelectionTarget, ) -> IoctlResult<bindings::v4l2_rect>1079     fn g_selection(
1080         &mut self,
1081         session: &Self::Session,
1082         sel_type: SelectionType,
1083         sel_target: SelectionTarget,
1084     ) -> IoctlResult<bindings::v4l2_rect> {
1085         match (sel_type, sel_target) {
1086             // Coded resolution of the stream.
1087             (SelectionType::Capture, SelectionTarget::CropBounds) => {
1088                 let coded_size = session.backend_session.stream_params().coded_size;
1089                 Ok(v4l2r::Rect::new(0, 0, coded_size.0, coded_size.1).into())
1090             }
1091             // Visible area of CAPTURE buffers.
1092             (
1093                 SelectionType::Capture,
1094                 SelectionTarget::Crop
1095                 | SelectionTarget::CropDefault
1096                 | SelectionTarget::ComposeDefault
1097                 | SelectionTarget::ComposeBounds
1098                 | SelectionTarget::Compose,
1099             ) => {
1100                 //Ok(session.backend_session.stream_params().visible_rect.into())
1101                 Ok((*session.crop_rectangle).into())
1102             }
1103             _ => Err(libc::EINVAL),
1104         }
1105     }
1106 
s_selection( &mut self, session: &mut Self::Session, sel_type: SelectionType, sel_target: SelectionTarget, mut sel_rect: bindings::v4l2_rect, _sel_flags: v4l2r::ioctl::SelectionFlags, ) -> IoctlResult<bindings::v4l2_rect>1107     fn s_selection(
1108         &mut self,
1109         session: &mut Self::Session,
1110         sel_type: SelectionType,
1111         sel_target: SelectionTarget,
1112         mut sel_rect: bindings::v4l2_rect,
1113         _sel_flags: v4l2r::ioctl::SelectionFlags,
1114     ) -> IoctlResult<bindings::v4l2_rect> {
1115         if !matches!(
1116             (sel_type, sel_target),
1117             (SelectionType::Capture, SelectionTarget::Compose)
1118         ) {
1119             return Err(libc::EINVAL);
1120         }
1121 
1122         // If the crop rectangle is still settable, allow its modification within the bounds of the
1123         // coded resolution.
1124         if let CropRectangle::Settable(rect) = &mut session.crop_rectangle {
1125             let coded_size = session
1126                 .backend_session
1127                 .current_format(QueueDirection::Capture)
1128                 .size();
1129             sel_rect.left = std::cmp::max(0, sel_rect.left);
1130             sel_rect.top = std::cmp::max(0, sel_rect.top);
1131             sel_rect.width = std::cmp::min(coded_size.0, sel_rect.width - sel_rect.left as u32);
1132             sel_rect.height = std::cmp::min(coded_size.0, sel_rect.height - sel_rect.top as u32);
1133 
1134             *rect = sel_rect.into();
1135         }
1136 
1137         self.g_selection(session, sel_type, sel_target)
1138     }
1139 
qbuf( &mut self, session: &mut Self::Session, buffer: V4l2Buffer, _guest_regions: Vec<Vec<SgEntry>>, ) -> IoctlResult<V4l2Buffer>1140     fn qbuf(
1141         &mut self,
1142         session: &mut Self::Session,
1143         buffer: V4l2Buffer,
1144         _guest_regions: Vec<Vec<SgEntry>>,
1145     ) -> IoctlResult<V4l2Buffer> {
1146         let buffers = match buffer.queue() {
1147             QueueType::VideoOutputMplane => &mut session.input_buffers,
1148             QueueType::VideoCaptureMplane => &mut session.output_buffers,
1149             _ => return Err(libc::EINVAL),
1150         };
1151         let host_buffer = buffers
1152             .get_mut(buffer.index() as usize)
1153             .ok_or(libc::EINVAL)?;
1154 
1155         // Check that the buffer's memory type corresponds to the one requested during allocation.
1156         if buffer.memory() != host_buffer.v4l2_buffer.memory() {
1157             return Err(libc::EINVAL);
1158         }
1159 
1160         match buffer.queue().direction() {
1161             QueueDirection::Output => {
1162                 // Update buffer state
1163                 let v4l2_buffer = &mut host_buffer.v4l2_buffer;
1164                 v4l2_buffer.set_field(BufferField::None);
1165                 v4l2_buffer.set_timestamp(buffer.timestamp());
1166                 let first_plane = buffer.get_first_plane();
1167                 *v4l2_buffer.get_first_plane_mut().bytesused = *first_plane.bytesused;
1168                 let host_first_plane = v4l2_buffer.get_first_plane_mut();
1169                 *host_first_plane.length = *first_plane.length;
1170                 *host_first_plane.bytesused = *first_plane.bytesused;
1171                 if let Some(data_offset) = host_first_plane.data_offset {
1172                     *data_offset = first_plane.data_offset.copied().unwrap_or(0);
1173                 }
1174 
1175                 let bytes_used = {
1176                     let first_plane = host_buffer.v4l2_buffer.get_first_plane();
1177                     // V4L2's spec mentions that if `bytes_used == 0` then the whole buffer is considered to be
1178                     // used.
1179                     if *first_plane.bytesused == 0 {
1180                         *first_plane.length
1181                     } else {
1182                         *first_plane.bytesused
1183                     }
1184                 };
1185 
1186                 session.backend_session.decode(
1187                     &host_buffer.backing,
1188                     host_buffer.index(),
1189                     host_buffer.timestamp(),
1190                     bytes_used,
1191                 )?;
1192 
1193                 host_buffer.v4l2_buffer.add_flags(BufferFlags::QUEUED);
1194 
1195                 Ok(host_buffer.v4l2_buffer.clone())
1196             }
1197             QueueDirection::Capture => {
1198                 // Update buffer state
1199                 let v4l2_buffer = &mut host_buffer.v4l2_buffer;
1200                 v4l2_buffer.add_flags(BufferFlags::QUEUED);
1201                 v4l2_buffer.clear_flags(BufferFlags::LAST);
1202                 let host_first_plane = v4l2_buffer.get_first_plane_mut();
1203                 let first_plane = buffer.get_first_plane();
1204                 *host_first_plane.length = *first_plane.length;
1205                 *host_first_plane.bytesused = *first_plane.bytesused;
1206                 if let Some(data_offset) = host_first_plane.data_offset {
1207                     *data_offset = first_plane.data_offset.copied().unwrap_or(0);
1208                 }
1209 
1210                 let res = v4l2_buffer.clone();
1211 
1212                 session.pending_output_buffers.push(buffer.index());
1213                 session.try_send_pending_output_buffers();
1214 
1215                 Ok(res)
1216             }
1217         }
1218     }
1219 
try_decoder_cmd( &mut self, session: &Self::Session, cmd: bindings::v4l2_decoder_cmd, ) -> IoctlResult<bindings::v4l2_decoder_cmd>1220     fn try_decoder_cmd(
1221         &mut self,
1222         session: &Self::Session,
1223         cmd: bindings::v4l2_decoder_cmd,
1224     ) -> IoctlResult<bindings::v4l2_decoder_cmd> {
1225         let cmd = DecoderCmd::try_from(cmd).map_err(|_| libc::EINVAL)?;
1226         session.try_decoder_cmd(cmd).map(Into::into)
1227     }
1228 
decoder_cmd( &mut self, session: &mut Self::Session, cmd: bindings::v4l2_decoder_cmd, ) -> IoctlResult<bindings::v4l2_decoder_cmd>1229     fn decoder_cmd(
1230         &mut self,
1231         session: &mut Self::Session,
1232         cmd: bindings::v4l2_decoder_cmd,
1233     ) -> IoctlResult<bindings::v4l2_decoder_cmd> {
1234         let cmd = DecoderCmd::try_from(cmd).map_err(|_| libc::EINVAL)?;
1235         let cmd = session.try_decoder_cmd(cmd)?;
1236 
1237         // The command is valid, apply it.
1238         match cmd {
1239             DecoderCmd::Stop { .. } => {
1240                 // Switch to stopped state if we aren't already there.
1241                 if !matches!(session.state, VideoDecoderStreamingState::Stopped { .. }) {
1242                     session.state = VideoDecoderStreamingState::Stopped {
1243                         input_streaming: true,
1244                         output_streaming: true,
1245                     };
1246 
1247                     // Start the `DRAIN` sequence.
1248                     session.backend_session.drain()?;
1249                 }
1250             }
1251             DecoderCmd::Start { .. } => {
1252                 // Restart the decoder if we were in the stopped state with both queues streaming.
1253                 if let VideoDecoderStreamingState::Stopped {
1254                     input_streaming,
1255                     output_streaming,
1256                 } = &session.state
1257                 {
1258                     if *input_streaming && *output_streaming {
1259                         session.state = VideoDecoderStreamingState::Running;
1260                         session
1261                             .backend_session
1262                             .streaming_state(QueueDirection::Capture, true);
1263                     }
1264                     session.try_send_pending_output_buffers();
1265                 }
1266             }
1267             DecoderCmd::Pause { .. } => {
1268                 if matches!(session.state, VideoDecoderStreamingState::Running) {
1269                     session.state = VideoDecoderStreamingState::Paused;
1270                 }
1271             }
1272             DecoderCmd::Resume => {
1273                 if matches!(session.state, VideoDecoderStreamingState::Paused) {
1274                     session.state = VideoDecoderStreamingState::Running;
1275                 }
1276             }
1277         }
1278 
1279         Ok(cmd.into())
1280     }
1281 }
1282