• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //! This module implements the interface that actual decoder devices need to
6 //! implement in order to provide video decoding capability to the guest.
7 
8 use base::AsRawDescriptor;
9 
10 use crate::virtio::video::decoder::Capability;
11 use crate::virtio::video::error::VideoError;
12 use crate::virtio::video::error::VideoResult;
13 use crate::virtio::video::format::Format;
14 use crate::virtio::video::format::Rect;
15 use crate::virtio::video::resource::GuestResource;
16 use crate::virtio::video::resource::GuestResourceHandle;
17 
18 #[cfg(feature = "ffmpeg")]
19 pub mod ffmpeg;
20 
21 #[cfg(feature = "vaapi")]
22 pub mod vaapi;
23 #[cfg(feature = "libvda")]
24 pub mod vda;
25 
26 /// Contains the device's state for one playback session, i.e. one stream.
27 pub trait DecoderSession {
28     /// Tell how many output buffers will be used for this session and which format they will carry.
29     /// This method must be called after a `ProvidePictureBuffers` event is emitted, and before the
30     /// first call to `use_output_buffer()`.
set_output_parameters(&mut self, buffer_count: usize, format: Format) -> VideoResult<()>31     fn set_output_parameters(&mut self, buffer_count: usize, format: Format) -> VideoResult<()>;
32 
33     /// Decode the compressed stream contained in [`offset`..`offset`+`bytes_used`] of the shared
34     /// memory in the input `resource`.
35     ///
36     /// `resource_id` is the ID of the input resource. It will be signaled using the
37     /// `NotifyEndOfBitstreamBuffer` once the input resource is not used anymore.
38     ///
39     /// `timestamp` is a timestamp that will be copied into the frames decoded from that input
40     /// stream. Units are effectively free and provided by the input stream.
41     ///
42     /// The device takes ownership of `resource` and is responsible for closing it once it is not
43     /// used anymore.
44     ///
45     /// The device will emit a `NotifyEndOfBitstreamBuffer` event with the `resource_id` value after
46     /// the input buffer has been entirely processed.
47     ///
48     /// The device will emit a `PictureReady` event with the `timestamp` value for each picture
49     /// produced from that input buffer.
decode( &mut self, resource_id: u32, timestamp: u64, resource: GuestResourceHandle, offset: u32, bytes_used: u32, ) -> VideoResult<()>50     fn decode(
51         &mut self,
52         resource_id: u32,
53         timestamp: u64,
54         resource: GuestResourceHandle,
55         offset: u32,
56         bytes_used: u32,
57     ) -> VideoResult<()>;
58 
59     /// Flush the decoder device, i.e. finish processing all queued decode requests and emit frames
60     /// for them.
61     ///
62     /// The device will emit a `FlushCompleted` event once the flush is done.
flush(&mut self) -> VideoResult<()>63     fn flush(&mut self) -> VideoResult<()>;
64 
65     /// Reset the decoder device, i.e. cancel all pending decoding requests.
66     ///
67     /// The device will emit a `ResetCompleted` event once the reset is done.
reset(&mut self) -> VideoResult<()>68     fn reset(&mut self) -> VideoResult<()>;
69 
70     /// Immediately release all buffers passed using `use_output_buffer()` and
71     /// `reuse_output_buffer()`.
clear_output_buffers(&mut self) -> VideoResult<()>72     fn clear_output_buffers(&mut self) -> VideoResult<()>;
73 
74     /// Returns the event pipe on which the availability of events will be signaled. Note that the
75     /// returned value is borrowed and only valid as long as the session is alive.
event_pipe(&self) -> &dyn AsRawDescriptor76     fn event_pipe(&self) -> &dyn AsRawDescriptor;
77 
78     /// Ask the device to use `resource` to store decoded frames according to its layout.
79     /// `picture_buffer_id` is the ID of the picture that will be reproduced in `PictureReady`
80     /// events using this buffer.
81     ///
82     /// The device takes ownership of `resource` and is responsible for closing it once the buffer
83     /// is not used anymore (either when the session is closed, or a new set of buffers is provided
84     /// for the session).
85     ///
86     /// The device will emit a `PictureReady` event with the `picture_buffer_id` field set to the
87     /// same value as the argument of the same name when a frame has been decoded into that buffer.
use_output_buffer( &mut self, picture_buffer_id: i32, resource: GuestResource, ) -> VideoResult<()>88     fn use_output_buffer(
89         &mut self,
90         picture_buffer_id: i32,
91         resource: GuestResource,
92     ) -> VideoResult<()>;
93 
94     /// Ask the device to reuse an output buffer previously passed to
95     /// `use_output_buffer` and that has previously been returned to the decoder
96     /// in a `PictureReady` event.
97     ///
98     /// The device will emit a `PictureReady` event with the `picture_buffer_id`
99     /// field set to the same value as the argument of the same name when a
100     /// frame has been decoded into that buffer.
reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()>101     fn reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()>;
102 
103     /// Blocking call to read a single event from the event pipe.
read_event(&mut self) -> VideoResult<DecoderEvent>104     fn read_event(&mut self) -> VideoResult<DecoderEvent>;
105 }
106 
107 pub trait DecoderBackend {
108     type Session: DecoderSession;
109 
110     /// Return the decoding capabilities for this backend instance.
get_capabilities(&self) -> Capability111     fn get_capabilities(&self) -> Capability;
112 
113     /// Create a new decoding session for the passed `format`.
new_session(&mut self, format: Format) -> VideoResult<Self::Session>114     fn new_session(&mut self, format: Format) -> VideoResult<Self::Session>;
115 }
116 
117 #[derive(Debug)]
118 pub enum DecoderEvent {
119     /// Emitted when the device knows the buffer format it will need to decode frames, and how many
120     /// buffers it will need. The decoder is supposed to call `set_output_parameters()` to confirm
121     /// the pixel format and actual number of buffers used, and provide buffers of the requested
122     /// dimensions using `use_output_buffer()`.
123     ProvidePictureBuffers {
124         min_num_buffers: u32,
125         width: i32,
126         height: i32,
127         visible_rect: Rect,
128     },
129     /// Emitted when the decoder is done decoding a picture. `picture_buffer_id`
130     /// corresponds to the argument of the same name passed to `use_output_buffer()`
131     /// or `reuse_output_buffer()`. `bitstream_id` corresponds to the argument of
132     /// the same name passed to `decode()` and can be used to match decoded frames
133     /// to the input buffer they were produced from.
134     PictureReady {
135         picture_buffer_id: i32,
136         timestamp: u64,
137         visible_rect: Rect,
138     },
139     /// Emitted when an input buffer passed to `decode()` is not used by the
140     /// device anymore and can be reused by the decoder. The parameter corresponds
141     /// to the `timestamp` argument passed to `decode()`.
142     NotifyEndOfBitstreamBuffer(u32),
143     /// Emitted when a decoding error has occured.
144     NotifyError(VideoError),
145     /// Emitted after `flush()` has been called to signal that the flush is completed.
146     FlushCompleted(VideoResult<()>),
147     /// Emitted after `reset()` has been called to signal that the reset is completed.
148     ResetCompleted(VideoResult<()>),
149 }
150 
151 #[cfg(test)]
152 /// Shared functions that can be used to test individual backends.
153 mod tests {
154     use std::time::Duration;
155 
156     use base::FromRawDescriptor;
157     use base::MappedRegion;
158     use base::MemoryMappingBuilder;
159     use base::SafeDescriptor;
160     use base::SharedMemory;
161     use base::WaitContext;
162 
163     use super::*;
164     use crate::virtio::video::format::FramePlane;
165     use crate::virtio::video::resource::GuestMemArea;
166     use crate::virtio::video::resource::GuestMemHandle;
167     use crate::virtio::video::resource::VirtioObjectHandle;
168 
169     // Test video stream and its properties.
170     const H264_STREAM: &[u8] = include_bytes!("test-25fps.h264");
171     const H264_STREAM_WIDTH: i32 = 320;
172     const H264_STREAM_HEIGHT: i32 = 240;
173     const H264_STREAM_NUM_FRAMES: usize = 250;
174     const H264_STREAM_CRCS: &str = include_str!("test-25fps.crc");
175 
176     /// Splits a H.264 annex B stream into chunks that are all guaranteed to contain a full frame
177     /// worth of data.
178     ///
179     /// This is a pretty naive implementation that is only guaranteed to work with our test stream.
180     /// We are not using `AVCodecParser` because it seems to modify the decoding context, which
181     /// would result in testing conditions that diverge more from our real use case where parsing
182     /// has already been done.
183     struct H264NalIterator<'a> {
184         stream: &'a [u8],
185         pos: usize,
186     }
187 
188     impl<'a> H264NalIterator<'a> {
new(stream: &'a [u8]) -> Self189         fn new(stream: &'a [u8]) -> Self {
190             Self { stream, pos: 0 }
191         }
192 
193         /// Returns the position of the start of the next frame in the stream.
next_frame_pos(&self) -> Option<usize>194         fn next_frame_pos(&self) -> Option<usize> {
195             const H264_START_CODE: [u8; 4] = [0x0, 0x0, 0x0, 0x1];
196             self.stream[self.pos + 1..]
197                 .windows(H264_START_CODE.len())
198                 .position(|window| window == H264_START_CODE)
199                 .map(|pos| self.pos + pos + 1)
200         }
201 
202         /// Returns whether `slice` contains frame data, i.e. a header where the NAL unit type is
203         /// 0x1 or 0x5.
contains_frame(slice: &[u8]) -> bool204         fn contains_frame(slice: &[u8]) -> bool {
205             slice[4..].windows(4).any(|window| {
206                 window[0..3] == [0x0, 0x0, 0x1]
207                     && (window[3] & 0x1f == 0x5 || window[3] & 0x1f == 0x1)
208             })
209         }
210     }
211 
212     impl<'a> Iterator for H264NalIterator<'a> {
213         type Item = &'a [u8];
214 
next(&mut self) -> Option<Self::Item>215         fn next(&mut self) -> Option<Self::Item> {
216             match self.pos {
217                 cur_pos if cur_pos == self.stream.len() => None,
218                 cur_pos => loop {
219                     self.pos = self.next_frame_pos().unwrap_or(self.stream.len());
220                     let slice = &self.stream[cur_pos..self.pos];
221 
222                     // Keep advancing as long as we don't have frame data in our slice.
223                     if Self::contains_frame(slice) || self.pos == self.stream.len() {
224                         return Some(slice);
225                     }
226                 },
227             }
228         }
229     }
230 
231     // Build a virtio object handle from a linear memory area. This is useful to emulate the
232     // scenario where we are decoding from or into virtio objects.
233     #[allow(dead_code)]
build_object_handle(mem: &SharedMemory) -> GuestResourceHandle234     pub fn build_object_handle(mem: &SharedMemory) -> GuestResourceHandle {
235         GuestResourceHandle::VirtioObject(VirtioObjectHandle {
236             // Safe because we are taking ownership of a just-duplicated FD.
237             desc: unsafe {
238                 SafeDescriptor::from_raw_descriptor(base::clone_descriptor(mem).unwrap())
239             },
240             modifier: 0,
241         })
242     }
243 
244     // Build a guest memory handle from a linear memory area. This is useful to emulate the
245     // scenario where we are decoding from or into guest memory.
246     #[allow(dead_code)]
build_guest_mem_handle(mem: &SharedMemory) -> GuestResourceHandle247     pub fn build_guest_mem_handle(mem: &SharedMemory) -> GuestResourceHandle {
248         GuestResourceHandle::GuestPages(GuestMemHandle {
249             // Safe because we are taking ownership of a just-duplicated FD.
250             desc: unsafe {
251                 SafeDescriptor::from_raw_descriptor(base::clone_descriptor(mem).unwrap())
252             },
253             mem_areas: vec![GuestMemArea {
254                 offset: 0,
255                 length: mem.size() as usize,
256             }],
257         })
258     }
259 
260     /// Full decoding test of a H.264 video, checking that the flow of events is happening as
261     /// expected.
decode_h264_generic<D, I, O>( decoder: &mut D, input_resource_builder: I, output_resource_builder: O, ) where D: DecoderBackend, I: Fn(&SharedMemory) -> GuestResourceHandle, O: Fn(&SharedMemory) -> GuestResourceHandle,262     pub fn decode_h264_generic<D, I, O>(
263         decoder: &mut D,
264         input_resource_builder: I,
265         output_resource_builder: O,
266     ) where
267         D: DecoderBackend,
268         I: Fn(&SharedMemory) -> GuestResourceHandle,
269         O: Fn(&SharedMemory) -> GuestResourceHandle,
270     {
271         const NUM_OUTPUT_BUFFERS: usize = 4;
272         const INPUT_BUF_SIZE: usize = 0x4000;
273         const OUTPUT_BUFFER_SIZE: usize =
274             (H264_STREAM_WIDTH * (H264_STREAM_HEIGHT + H264_STREAM_HEIGHT / 2)) as usize;
275         let mut session = decoder
276             .new_session(Format::H264)
277             .expect("failed to create H264 decoding session.");
278         let wait_ctx = WaitContext::new().expect("Failed to create wait context");
279         wait_ctx
280             .add(session.event_pipe(), 0u8)
281             .expect("Failed to add event pipe to wait context");
282         // Output buffers suitable for receiving NV12 frames for our stream.
283         let output_buffers = (0..NUM_OUTPUT_BUFFERS)
284             .into_iter()
285             .map(|i| {
286                 SharedMemory::new(
287                     format!("video-output-buffer-{}", i),
288                     OUTPUT_BUFFER_SIZE as u64,
289                 )
290                 .unwrap()
291             })
292             .collect::<Vec<_>>();
293         let input_shm = SharedMemory::new("video-input-buffer", INPUT_BUF_SIZE as u64).unwrap();
294         let input_mapping = MemoryMappingBuilder::new(input_shm.size() as usize)
295             .from_shared_memory(&input_shm)
296             .build()
297             .unwrap();
298 
299         let mut decoded_frames_count = 0usize;
300         let mut expected_frames_crcs = H264_STREAM_CRCS.lines();
301 
302         let mut on_frame_decoded =
303             |session: &mut D::Session, picture_buffer_id: i32, visible_rect: Rect| {
304                 assert_eq!(
305                     visible_rect,
306                     Rect {
307                         left: 0,
308                         top: 0,
309                         right: H264_STREAM_WIDTH,
310                         bottom: H264_STREAM_HEIGHT,
311                     }
312                 );
313 
314                 // Verify that the CRC of the decoded frame matches the expected one.
315                 let mapping = MemoryMappingBuilder::new(OUTPUT_BUFFER_SIZE)
316                     .from_shared_memory(&output_buffers[picture_buffer_id as usize])
317                     .build()
318                     .unwrap();
319                 let mut frame_data = vec![0u8; mapping.size()];
320                 assert_eq!(
321                     mapping.read_slice(&mut frame_data, 0).unwrap(),
322                     mapping.size()
323                 );
324 
325                 let mut hasher = crc32fast::Hasher::new();
326                 hasher.update(&frame_data);
327                 let frame_crc = hasher.finalize();
328                 assert_eq!(
329                     format!("{:08x}", frame_crc),
330                     expected_frames_crcs
331                         .next()
332                         .expect("No CRC for decoded frame")
333                 );
334 
335                 // We can recycle the frame now.
336                 session.reuse_output_buffer(picture_buffer_id).unwrap();
337                 decoded_frames_count += 1;
338             };
339 
340         // Simple value by which we will multiply the frame number to obtain a fake timestamp.
341         const TIMESTAMP_FOR_INPUT_ID_FACTOR: u64 = 1_000_000;
342         for (input_id, slice) in H264NalIterator::new(H264_STREAM).enumerate() {
343             let buffer_handle = input_resource_builder(&input_shm);
344             input_mapping
345                 .write_slice(slice, 0)
346                 .expect("Failed to write stream data into input buffer.");
347             session
348                 .decode(
349                     input_id as u32,
350                     input_id as u64 * TIMESTAMP_FOR_INPUT_ID_FACTOR,
351                     buffer_handle,
352                     0,
353                     slice.len() as u32,
354                 )
355                 .expect("Call to decode() failed.");
356 
357             assert!(
358                 matches!(session.read_event().unwrap(), DecoderEvent::NotifyEndOfBitstreamBuffer(index) if index == input_id as u32)
359             );
360 
361             // After sending the first buffer we should get the initial resolution change event and
362             // can provide the frames to decode into.
363             if input_id == 0 {
364                 let event = session.read_event().unwrap();
365                 assert!(matches!(
366                     event,
367                     DecoderEvent::ProvidePictureBuffers {
368                         width: H264_STREAM_WIDTH,
369                         height: H264_STREAM_HEIGHT,
370                         visible_rect: Rect {
371                             left: 0,
372                             top: 0,
373                             right: H264_STREAM_WIDTH,
374                             bottom: H264_STREAM_HEIGHT,
375                         },
376                         ..
377                     }
378                 ));
379 
380                 let out_format = Format::NV12;
381 
382                 session
383                     .set_output_parameters(NUM_OUTPUT_BUFFERS, out_format)
384                     .unwrap();
385 
386                 // Pass the buffers we will decode into.
387                 for (picture_buffer_id, buffer) in output_buffers.iter().enumerate() {
388                     session
389                         .use_output_buffer(
390                             picture_buffer_id as i32,
391                             GuestResource {
392                                 handle: output_resource_builder(buffer),
393                                 planes: vec![
394                                     FramePlane {
395                                         offset: 0,
396                                         stride: H264_STREAM_WIDTH as usize,
397                                         size: (H264_STREAM_WIDTH * H264_STREAM_HEIGHT) as usize,
398                                     },
399                                     FramePlane {
400                                         offset: (H264_STREAM_WIDTH * H264_STREAM_HEIGHT) as usize,
401                                         stride: H264_STREAM_WIDTH as usize,
402                                         size: (H264_STREAM_WIDTH * H264_STREAM_HEIGHT) as usize,
403                                     },
404                                 ],
405                                 width: H264_STREAM_WIDTH as _,
406                                 height: H264_STREAM_HEIGHT as _,
407                                 format: out_format,
408                             },
409                         )
410                         .unwrap();
411                 }
412             }
413 
414             // If we have remaining events, they must be decoded frames. Get them and recycle them.
415             while wait_ctx.wait_timeout(Duration::ZERO).unwrap().len() > 0 {
416                 match session.read_event().unwrap() {
417                     DecoderEvent::PictureReady {
418                         picture_buffer_id,
419                         visible_rect,
420                         ..
421                     } => on_frame_decoded(&mut session, picture_buffer_id, visible_rect),
422                     e => panic!("Unexpected event: {:?}", e),
423                 }
424             }
425         }
426 
427         session.flush().unwrap();
428 
429         // Keep getting frames until the final event, which should be `FlushCompleted`.
430         let mut received_flush_completed = false;
431         while wait_ctx.wait_timeout(Duration::ZERO).unwrap().len() > 0 {
432             match session.read_event().unwrap() {
433                 DecoderEvent::PictureReady {
434                     picture_buffer_id,
435                     visible_rect,
436                     ..
437                 } => on_frame_decoded(&mut session, picture_buffer_id, visible_rect),
438                 DecoderEvent::FlushCompleted(Ok(())) => {
439                     received_flush_completed = true;
440                     break;
441                 }
442                 e => panic!("Unexpected event: {:?}", e),
443             }
444         }
445 
446         // Confirm that we got the FlushCompleted event.
447         assert_eq!(received_flush_completed, true);
448 
449         // We should have read all the events for that session.
450         assert_eq!(wait_ctx.wait_timeout(Duration::ZERO).unwrap().len(), 0);
451 
452         // We should not be expecting any more frame
453         assert_eq!(expected_frames_crcs.next(), None);
454 
455         // Check that we decoded the expected number of frames.
456         assert_eq!(decoded_frames_count, H264_STREAM_NUM_FRAMES);
457     }
458 }
459