• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //! This module implements the interface that actual decoder devices need to
6 //! implement in order to provide video decoding capability to the guest.
7 
8 use base::AsRawDescriptor;
9 
10 use crate::virtio::video::decoder::Capability;
11 use crate::virtio::video::error::VideoError;
12 use crate::virtio::video::error::VideoResult;
13 use crate::virtio::video::format::Format;
14 use crate::virtio::video::format::Rect;
15 use crate::virtio::video::resource::GuestResource;
16 use crate::virtio::video::resource::GuestResourceHandle;
17 
18 #[cfg(feature = "ffmpeg")]
19 pub mod ffmpeg;
20 
21 #[cfg(feature = "vaapi")]
22 pub mod vaapi;
23 #[cfg(feature = "libvda")]
24 pub mod vda;
25 
26 /// Contains the device's state for one playback session, i.e. one stream.
27 pub trait DecoderSession {
28     /// Tell how many output buffers will be used for this session and which format they will carry.
29     /// This method must be called after a `ProvidePictureBuffers` event is emitted, and before the
30     /// first call to `use_output_buffer()`.
set_output_parameters(&mut self, buffer_count: usize, format: Format) -> VideoResult<()>31     fn set_output_parameters(&mut self, buffer_count: usize, format: Format) -> VideoResult<()>;
32 
33     /// Decode the compressed stream contained in [`offset`..`offset`+`bytes_used`] of the shared
34     /// memory in the input `resource`.
35     ///
36     /// `resource_id` is the ID of the input resource. It will be signaled using the
37     /// `NotifyEndOfBitstreamBuffer` once the input resource is not used anymore.
38     ///
39     /// `timestamp` is a timestamp that will be copied into the frames decoded from that input
40     /// stream. Units are effectively free and provided by the input stream.
41     ///
42     /// The device takes ownership of `resource` and is responsible for closing it once it is not
43     /// used anymore.
44     ///
45     /// The device will emit a `NotifyEndOfBitstreamBuffer` event with the `resource_id` value after
46     /// the input buffer has been entirely processed.
47     ///
48     /// The device will emit a `PictureReady` event with the `timestamp` value for each picture
49     /// produced from that input buffer.
decode( &mut self, resource_id: u32, timestamp: u64, resource: GuestResourceHandle, offset: u32, bytes_used: u32, ) -> VideoResult<()>50     fn decode(
51         &mut self,
52         resource_id: u32,
53         timestamp: u64,
54         resource: GuestResourceHandle,
55         offset: u32,
56         bytes_used: u32,
57     ) -> VideoResult<()>;
58 
59     /// Flush the decoder device, i.e. finish processing all queued decode requests and emit frames
60     /// for them.
61     ///
62     /// The device will emit a `FlushCompleted` event once the flush is done.
flush(&mut self) -> VideoResult<()>63     fn flush(&mut self) -> VideoResult<()>;
64 
65     /// Reset the decoder device, i.e. cancel all pending decoding requests.
66     ///
67     /// The device will emit a `ResetCompleted` event once the reset is done.
reset(&mut self) -> VideoResult<()>68     fn reset(&mut self) -> VideoResult<()>;
69 
70     /// Immediately release all buffers passed using `use_output_buffer()` and
71     /// `reuse_output_buffer()`.
clear_output_buffers(&mut self) -> VideoResult<()>72     fn clear_output_buffers(&mut self) -> VideoResult<()>;
73 
74     /// Returns the event pipe on which the availability of events will be signaled. Note that the
75     /// returned value is borrowed and only valid as long as the session is alive.
event_pipe(&self) -> &dyn AsRawDescriptor76     fn event_pipe(&self) -> &dyn AsRawDescriptor;
77 
78     /// Ask the device to use `resource` to store decoded frames according to its layout.
79     /// `picture_buffer_id` is the ID of the picture that will be reproduced in `PictureReady`
80     /// events using this buffer.
81     ///
82     /// The device takes ownership of `resource` and is responsible for closing it once the buffer
83     /// is not used anymore (either when the session is closed, or a new set of buffers is provided
84     /// for the session).
85     ///
86     /// The device will emit a `PictureReady` event with the `picture_buffer_id` field set to the
87     /// same value as the argument of the same name when a frame has been decoded into that buffer.
use_output_buffer( &mut self, picture_buffer_id: i32, resource: GuestResource, ) -> VideoResult<()>88     fn use_output_buffer(
89         &mut self,
90         picture_buffer_id: i32,
91         resource: GuestResource,
92     ) -> VideoResult<()>;
93 
94     /// Ask the device to reuse an output buffer previously passed to
95     /// `use_output_buffer` and that has previously been returned to the decoder
96     /// in a `PictureReady` event.
97     ///
98     /// The device will emit a `PictureReady` event with the `picture_buffer_id`
99     /// field set to the same value as the argument of the same name when a
100     /// frame has been decoded into that buffer.
reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()>101     fn reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()>;
102 
103     /// Blocking call to read a single event from the event pipe.
read_event(&mut self) -> VideoResult<DecoderEvent>104     fn read_event(&mut self) -> VideoResult<DecoderEvent>;
105 }
106 
107 pub trait DecoderBackend {
108     type Session: DecoderSession;
109 
110     /// Return the decoding capabilities for this backend instance.
get_capabilities(&self) -> Capability111     fn get_capabilities(&self) -> Capability;
112 
113     /// Create a new decoding session for the passed `format`.
new_session(&mut self, format: Format) -> VideoResult<Self::Session>114     fn new_session(&mut self, format: Format) -> VideoResult<Self::Session>;
115 }
116 
117 #[derive(Debug)]
118 pub enum DecoderEvent {
119     /// Emitted when the device knows the buffer format it will need to decode frames, and how many
120     /// buffers it will need. The decoder is supposed to call `set_output_parameters()` to confirm
121     /// the pixel format and actual number of buffers used, and provide buffers of the requested
122     /// dimensions using `use_output_buffer()`.
123     ProvidePictureBuffers {
124         min_num_buffers: u32,
125         width: i32,
126         height: i32,
127         visible_rect: Rect,
128     },
129     /// Emitted when the decoder is done decoding a picture. `picture_buffer_id`
130     /// corresponds to the argument of the same name passed to `use_output_buffer()`
131     /// or `reuse_output_buffer()`. `bitstream_id` corresponds to the argument of
132     /// the same name passed to `decode()` and can be used to match decoded frames
133     /// to the input buffer they were produced from.
134     PictureReady {
135         picture_buffer_id: i32,
136         timestamp: u64,
137         visible_rect: Rect,
138     },
139     /// Emitted when an input buffer passed to `decode()` is not used by the
140     /// device anymore and can be reused by the decoder. The parameter corresponds
141     /// to the `timestamp` argument passed to `decode()`.
142     NotifyEndOfBitstreamBuffer(u32),
143     /// Emitted when a decoding error has occured.
144     NotifyError(VideoError),
145     /// Emitted after `flush()` has been called to signal that the flush is completed.
146     FlushCompleted(VideoResult<()>),
147     /// Emitted after `reset()` has been called to signal that the reset is completed.
148     ResetCompleted(VideoResult<()>),
149 }
150 
151 #[cfg(test)]
152 /// Shared functions that can be used to test individual backends.
153 mod tests {
154     use std::time::Duration;
155 
156     use base::MappedRegion;
157     use base::MemoryMappingBuilder;
158     use base::SharedMemory;
159     use base::WaitContext;
160 
161     use super::*;
162     use crate::virtio::video::format::FramePlane;
163     use crate::virtio::video::resource::GuestMemArea;
164     use crate::virtio::video::resource::GuestMemHandle;
165     use crate::virtio::video::resource::VirtioObjectHandle;
166 
167     // Test video stream and its properties.
168     const H264_STREAM: &[u8] = include_bytes!("test-25fps.h264");
169     const H264_STREAM_WIDTH: i32 = 320;
170     const H264_STREAM_HEIGHT: i32 = 240;
171     const H264_STREAM_NUM_FRAMES: usize = 250;
172     const H264_STREAM_CRCS: &str = include_str!("test-25fps.crc");
173 
174     /// Splits a H.264 annex B stream into chunks that are all guaranteed to contain a full frame
175     /// worth of data.
176     ///
177     /// This is a pretty naive implementation that is only guaranteed to work with our test stream.
178     /// We are not using `AVCodecParser` because it seems to modify the decoding context, which
179     /// would result in testing conditions that diverge more from our real use case where parsing
180     /// has already been done.
181     struct H264NalIterator<'a> {
182         stream: &'a [u8],
183         pos: usize,
184     }
185 
186     impl<'a> H264NalIterator<'a> {
new(stream: &'a [u8]) -> Self187         fn new(stream: &'a [u8]) -> Self {
188             Self { stream, pos: 0 }
189         }
190 
191         /// Returns the position of the start of the next frame in the stream.
next_frame_pos(&self) -> Option<usize>192         fn next_frame_pos(&self) -> Option<usize> {
193             const H264_START_CODE: [u8; 4] = [0x0, 0x0, 0x0, 0x1];
194             self.stream[self.pos + 1..]
195                 .windows(H264_START_CODE.len())
196                 .position(|window| window == H264_START_CODE)
197                 .map(|pos| self.pos + pos + 1)
198         }
199 
200         /// Returns whether `slice` contains frame data, i.e. a header where the NAL unit type is
201         /// 0x1 or 0x5.
contains_frame(slice: &[u8]) -> bool202         fn contains_frame(slice: &[u8]) -> bool {
203             slice[4..].windows(4).any(|window| {
204                 window[0..3] == [0x0, 0x0, 0x1]
205                     && (window[3] & 0x1f == 0x5 || window[3] & 0x1f == 0x1)
206             })
207         }
208     }
209 
210     impl<'a> Iterator for H264NalIterator<'a> {
211         type Item = &'a [u8];
212 
next(&mut self) -> Option<Self::Item>213         fn next(&mut self) -> Option<Self::Item> {
214             match self.pos {
215                 cur_pos if cur_pos == self.stream.len() => None,
216                 cur_pos => loop {
217                     self.pos = self.next_frame_pos().unwrap_or(self.stream.len());
218                     let slice = &self.stream[cur_pos..self.pos];
219 
220                     // Keep advancing as long as we don't have frame data in our slice.
221                     if Self::contains_frame(slice) || self.pos == self.stream.len() {
222                         return Some(slice);
223                     }
224                 },
225             }
226         }
227     }
228 
229     // Build a virtio object handle from a linear memory area. This is useful to emulate the
230     // scenario where we are decoding from or into virtio objects.
231     #[allow(dead_code)]
build_object_handle(mem: &SharedMemory) -> GuestResourceHandle232     pub fn build_object_handle(mem: &SharedMemory) -> GuestResourceHandle {
233         GuestResourceHandle::VirtioObject(VirtioObjectHandle {
234             desc: base::clone_descriptor(mem).unwrap(),
235             modifier: 0,
236         })
237     }
238 
239     // Build a guest memory handle from a linear memory area. This is useful to emulate the
240     // scenario where we are decoding from or into guest memory.
241     #[allow(dead_code)]
build_guest_mem_handle(mem: &SharedMemory) -> GuestResourceHandle242     pub fn build_guest_mem_handle(mem: &SharedMemory) -> GuestResourceHandle {
243         GuestResourceHandle::GuestPages(GuestMemHandle {
244             desc: base::clone_descriptor(mem).unwrap(),
245             mem_areas: vec![GuestMemArea {
246                 offset: 0,
247                 length: mem.size() as usize,
248             }],
249         })
250     }
251 
252     /// Full decoding test of a H.264 video, checking that the flow of events is happening as
253     /// expected.
decode_h264_generic<D, I, O>( decoder: &mut D, input_resource_builder: I, output_resource_builder: O, ) where D: DecoderBackend, I: Fn(&SharedMemory) -> GuestResourceHandle, O: Fn(&SharedMemory) -> GuestResourceHandle,254     pub fn decode_h264_generic<D, I, O>(
255         decoder: &mut D,
256         input_resource_builder: I,
257         output_resource_builder: O,
258     ) where
259         D: DecoderBackend,
260         I: Fn(&SharedMemory) -> GuestResourceHandle,
261         O: Fn(&SharedMemory) -> GuestResourceHandle,
262     {
263         const NUM_OUTPUT_BUFFERS: usize = 4;
264         const INPUT_BUF_SIZE: usize = 0x4000;
265         const OUTPUT_BUFFER_SIZE: usize =
266             (H264_STREAM_WIDTH * (H264_STREAM_HEIGHT + H264_STREAM_HEIGHT / 2)) as usize;
267         let mut session = decoder
268             .new_session(Format::H264)
269             .expect("failed to create H264 decoding session.");
270         let wait_ctx = WaitContext::new().expect("Failed to create wait context");
271         wait_ctx
272             .add(session.event_pipe(), 0u8)
273             .expect("Failed to add event pipe to wait context");
274         // Output buffers suitable for receiving NV12 frames for our stream.
275         let output_buffers = (0..NUM_OUTPUT_BUFFERS)
276             .map(|i| {
277                 SharedMemory::new(
278                     format!("video-output-buffer-{}", i),
279                     OUTPUT_BUFFER_SIZE as u64,
280                 )
281                 .unwrap()
282             })
283             .collect::<Vec<_>>();
284         let input_shm = SharedMemory::new("video-input-buffer", INPUT_BUF_SIZE as u64).unwrap();
285         let input_mapping = MemoryMappingBuilder::new(input_shm.size() as usize)
286             .from_shared_memory(&input_shm)
287             .build()
288             .unwrap();
289 
290         let mut decoded_frames_count = 0usize;
291         let mut expected_frames_crcs = H264_STREAM_CRCS.lines();
292 
293         let mut on_frame_decoded =
294             |session: &mut D::Session, picture_buffer_id: i32, visible_rect: Rect| {
295                 assert_eq!(
296                     visible_rect,
297                     Rect {
298                         left: 0,
299                         top: 0,
300                         right: H264_STREAM_WIDTH,
301                         bottom: H264_STREAM_HEIGHT,
302                     }
303                 );
304 
305                 // Verify that the CRC of the decoded frame matches the expected one.
306                 let mapping = MemoryMappingBuilder::new(OUTPUT_BUFFER_SIZE)
307                     .from_shared_memory(&output_buffers[picture_buffer_id as usize])
308                     .build()
309                     .unwrap();
310                 let mut frame_data = vec![0u8; mapping.size()];
311                 assert_eq!(
312                     mapping.read_slice(&mut frame_data, 0).unwrap(),
313                     mapping.size()
314                 );
315 
316                 let mut hasher = crc32fast::Hasher::new();
317                 hasher.update(&frame_data);
318                 let frame_crc = hasher.finalize();
319                 assert_eq!(
320                     format!("{:08x}", frame_crc),
321                     expected_frames_crcs
322                         .next()
323                         .expect("No CRC for decoded frame")
324                 );
325 
326                 // We can recycle the frame now.
327                 session.reuse_output_buffer(picture_buffer_id).unwrap();
328                 decoded_frames_count += 1;
329             };
330 
331         // Simple value by which we will multiply the frame number to obtain a fake timestamp.
332         const TIMESTAMP_FOR_INPUT_ID_FACTOR: u64 = 1_000_000;
333         for (input_id, slice) in H264NalIterator::new(H264_STREAM).enumerate() {
334             let buffer_handle = input_resource_builder(&input_shm);
335             input_mapping
336                 .write_slice(slice, 0)
337                 .expect("Failed to write stream data into input buffer.");
338             session
339                 .decode(
340                     input_id as u32,
341                     input_id as u64 * TIMESTAMP_FOR_INPUT_ID_FACTOR,
342                     buffer_handle,
343                     0,
344                     slice.len() as u32,
345                 )
346                 .expect("Call to decode() failed.");
347 
348             // Get all the events resulting from this submission.
349             let mut events = Vec::new();
350             while !wait_ctx.wait_timeout(Duration::ZERO).unwrap().is_empty() {
351                 events.push(session.read_event().unwrap());
352             }
353 
354             // Our bitstream buffer should have been returned.
355             let event_idx = events
356                 .iter()
357                 .position(|event| {
358                     let input_id = input_id as u32;
359                     matches!(event, DecoderEvent::NotifyEndOfBitstreamBuffer(index) if *index == input_id)
360                 })
361                 .unwrap();
362             events.remove(event_idx);
363 
364             // After sending the first buffer we should get the initial resolution change event and
365             // can provide the frames to decode into.
366             if input_id == 0 {
367                 let event_idx = events
368                     .iter()
369                     .position(|event| {
370                         matches!(
371                             event,
372                             DecoderEvent::ProvidePictureBuffers {
373                                 width: H264_STREAM_WIDTH,
374                                 height: H264_STREAM_HEIGHT,
375                                 visible_rect: Rect {
376                                     left: 0,
377                                     top: 0,
378                                     right: H264_STREAM_WIDTH,
379                                     bottom: H264_STREAM_HEIGHT,
380                                 },
381                                 ..
382                             }
383                         )
384                     })
385                     .unwrap();
386                 events.remove(event_idx);
387 
388                 let out_format = Format::NV12;
389 
390                 session
391                     .set_output_parameters(NUM_OUTPUT_BUFFERS, out_format)
392                     .unwrap();
393 
394                 // Pass the buffers we will decode into.
395                 for (picture_buffer_id, buffer) in output_buffers.iter().enumerate() {
396                     session
397                         .use_output_buffer(
398                             picture_buffer_id as i32,
399                             GuestResource {
400                                 handle: output_resource_builder(buffer),
401                                 planes: vec![
402                                     FramePlane {
403                                         offset: 0,
404                                         stride: H264_STREAM_WIDTH as usize,
405                                         size: (H264_STREAM_WIDTH * H264_STREAM_HEIGHT) as usize,
406                                     },
407                                     FramePlane {
408                                         offset: (H264_STREAM_WIDTH * H264_STREAM_HEIGHT) as usize,
409                                         stride: H264_STREAM_WIDTH as usize,
410                                         size: (H264_STREAM_WIDTH * H264_STREAM_HEIGHT) as usize,
411                                     },
412                                 ],
413                                 width: H264_STREAM_WIDTH as _,
414                                 height: H264_STREAM_HEIGHT as _,
415                                 format: out_format,
416                                 guest_cpu_mappable: false,
417                             },
418                         )
419                         .unwrap();
420                 }
421             }
422 
423             // If we have remaining events, they must be decoded frames. Get them and recycle them.
424             for event in events {
425                 match event {
426                     DecoderEvent::PictureReady {
427                         picture_buffer_id,
428                         visible_rect,
429                         ..
430                     } => on_frame_decoded(&mut session, picture_buffer_id, visible_rect),
431                     e => panic!("Unexpected event: {:?}", e),
432                 }
433             }
434         }
435 
436         session.flush().unwrap();
437 
438         // Keep getting frames until the final event, which should be `FlushCompleted`.
439         let mut received_flush_completed = false;
440         while !wait_ctx.wait_timeout(Duration::ZERO).unwrap().is_empty() {
441             match session.read_event().unwrap() {
442                 DecoderEvent::PictureReady {
443                     picture_buffer_id,
444                     visible_rect,
445                     ..
446                 } => on_frame_decoded(&mut session, picture_buffer_id, visible_rect),
447                 DecoderEvent::FlushCompleted(Ok(())) => {
448                     received_flush_completed = true;
449                     break;
450                 }
451                 e => panic!("Unexpected event: {:?}", e),
452             }
453         }
454 
455         // Confirm that we got the FlushCompleted event.
456         assert!(received_flush_completed);
457 
458         // We should have read all the events for that session.
459         assert_eq!(wait_ctx.wait_timeout(Duration::ZERO).unwrap().len(), 0);
460 
461         // We should not be expecting any more frame
462         assert_eq!(expected_frames_crcs.next(), None);
463 
464         // Check that we decoded the expected number of frames.
465         assert_eq!(decoded_frames_count, H264_STREAM_NUM_FRAMES);
466     }
467 }
468