• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2022 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #![deny(missing_docs)]
6 
7 use std::collections::btree_map::Entry;
8 use std::collections::BTreeMap;
9 use std::collections::VecDeque;
10 use std::rc::Rc;
11 
12 use anyhow::anyhow;
13 use anyhow::Result;
14 use base::MappedRegion;
15 use base::MemoryMappingArena;
16 use cros_codecs::decoders::BlockingMode;
17 use cros_codecs::decoders::DynDecodedHandle;
18 use cros_codecs::decoders::VideoDecoder;
19 use cros_codecs::DecodedFormat;
20 use libva::Display;
21 
22 use crate::virtio::video::decoder::Capability;
23 use crate::virtio::video::decoder::DecoderBackend;
24 use crate::virtio::video::decoder::DecoderEvent;
25 use crate::virtio::video::decoder::DecoderSession;
26 use crate::virtio::video::error::VideoError;
27 use crate::virtio::video::error::VideoResult;
28 use crate::virtio::video::format::Format;
29 use crate::virtio::video::format::FormatDesc;
30 use crate::virtio::video::format::FormatRange;
31 use crate::virtio::video::format::FrameFormat;
32 use crate::virtio::video::format::Level;
33 use crate::virtio::video::format::Profile;
34 use crate::virtio::video::format::Rect;
35 use crate::virtio::video::resource::BufferHandle;
36 use crate::virtio::video::resource::GuestResource;
37 use crate::virtio::video::resource::GuestResourceHandle;
38 use crate::virtio::video::utils::EventQueue;
39 use crate::virtio::video::utils::OutputQueue;
40 
41 /// Represents a buffer we have not yet sent to the accelerator.
42 struct PendingJob {
43     resource_id: u32,
44     timestamp: u64,
45     resource: GuestResourceHandle,
46     offset: u32,
47     bytes_used: u32,
48 }
49 
50 /// A set of params returned when a dynamic resolution change is found in the
51 /// bitstream.
52 pub struct DrcParams {
53     /// The minimum amount of buffers needed to decode the stream.
54     min_num_buffers: usize,
55     /// The stream's new width.
56     width: u32,
57     /// The stream's new height.
58     height: u32,
59     /// The visible resolution.
60     visible_rect: Rect,
61 }
62 
63 impl TryFrom<DecodedFormat> for Format {
64     type Error = anyhow::Error;
65 
try_from(value: DecodedFormat) -> Result<Self, Self::Error>66     fn try_from(value: DecodedFormat) -> Result<Self, Self::Error> {
67         match value {
68             DecodedFormat::NV12 => Ok(Format::NV12),
69             DecodedFormat::I420 => Err(anyhow!("Unsupported format")),
70         }
71     }
72 }
73 
74 impl TryFrom<Format> for DecodedFormat {
75     type Error = anyhow::Error;
76 
try_from(value: Format) -> Result<Self, Self::Error>77     fn try_from(value: Format) -> Result<Self, Self::Error> {
78         match value {
79             Format::NV12 => Ok(DecodedFormat::NV12),
80             _ => Err(anyhow!("Unsupported format")),
81         }
82     }
83 }
84 
85 impl TryFrom<libva::VAProfile::Type> for Profile {
86     type Error = anyhow::Error;
87 
try_from(value: libva::VAProfile::Type) -> Result<Self, Self::Error>88     fn try_from(value: libva::VAProfile::Type) -> Result<Self, Self::Error> {
89         match value {
90             libva::VAProfile::VAProfileH264Baseline => Ok(Self::H264Baseline),
91             libva::VAProfile::VAProfileH264Main => Ok(Self::H264Main),
92             libva::VAProfile::VAProfileH264High => Ok(Self::H264High),
93             libva::VAProfile::VAProfileH264StereoHigh => Ok(Self::H264StereoHigh),
94             libva::VAProfile::VAProfileH264MultiviewHigh => Ok(Self::H264MultiviewHigh),
95             libva::VAProfile::VAProfileHEVCMain => Ok(Self::HevcMain),
96             libva::VAProfile::VAProfileHEVCMain10 => Ok(Self::HevcMain10),
97             libva::VAProfile::VAProfileVP8Version0_3 => Ok(Self::VP8Profile0),
98             libva::VAProfile::VAProfileVP9Profile0 => Ok(Self::VP9Profile0),
99             libva::VAProfile::VAProfileVP9Profile1 => Ok(Self::VP9Profile1),
100             libva::VAProfile::VAProfileVP9Profile2 => Ok(Self::VP9Profile2),
101             libva::VAProfile::VAProfileVP9Profile3 => Ok(Self::VP9Profile3),
102             _ => Err(anyhow!(
103                 "Conversion failed for unexpected VAProfile: {}",
104                 value
105             )),
106         }
107     }
108 }
109 
110 /// The state for the output queue containing the buffers that will receive the
111 /// decoded data.
112 enum OutputQueueState {
113     /// Waiting for the client to call `set_output_buffer_count`.
114     AwaitingBufferCount,
115     /// Codec is capable of decoding frames.
116     Decoding {
117         /// The output queue which indirectly contains the output buffers given by crosvm
118         output_queue: OutputQueue,
119     },
120     /// Dynamic Resolution Change - we can still accept buffers in the old
121     /// format, but are waiting for new parameters before doing any decoding.
122     Drc,
123 }
124 
125 impl OutputQueueState {
output_queue_mut(&mut self) -> Result<&mut OutputQueue>126     fn output_queue_mut(&mut self) -> Result<&mut OutputQueue> {
127         match self {
128             OutputQueueState::Decoding { output_queue } => Ok(output_queue),
129             _ => Err(anyhow!("Invalid state")),
130         }
131     }
132 }
133 
134 ///A safe decoder abstraction over libva for a single vaContext
135 pub struct VaapiDecoder {
136     /// The capabilities for the decoder
137     caps: Capability,
138 }
139 
140 // The VA capabilities for the coded side
141 struct CodedCap {
142     profile: libva::VAProfile::Type,
143     max_width: u32,
144     max_height: u32,
145 }
146 
147 // The VA capabilities for the raw side
148 struct RawCap {
149     fourcc: u32,
150     min_width: u32,
151     min_height: u32,
152     max_width: u32,
153     max_height: u32,
154 }
155 
156 impl VaapiDecoder {
157     // Query the capabilities for the coded format
get_coded_cap( display: &libva::Display, profile: libva::VAProfile::Type, ) -> Result<CodedCap>158     fn get_coded_cap(
159         display: &libva::Display,
160         profile: libva::VAProfile::Type,
161     ) -> Result<CodedCap> {
162         let mut attrs = vec![
163             libva::VAConfigAttrib {
164                 type_: libva::VAConfigAttribType::VAConfigAttribMaxPictureWidth,
165                 value: 0,
166             },
167             libva::VAConfigAttrib {
168                 type_: libva::VAConfigAttribType::VAConfigAttribMaxPictureHeight,
169                 value: 0,
170             },
171         ];
172 
173         display.get_config_attributes(profile, libva::VAEntrypoint::VAEntrypointVLD, &mut attrs)?;
174 
175         let mut max_width = 1u32;
176         let mut max_height = 1u32;
177 
178         for attr in &attrs {
179             if attr.value == libva::constants::VA_ATTRIB_NOT_SUPPORTED {
180                 continue;
181             }
182 
183             match attr.type_ {
184                 libva::VAConfigAttribType::VAConfigAttribMaxPictureWidth => max_width = attr.value,
185                 libva::VAConfigAttribType::VAConfigAttribMaxPictureHeight => {
186                     max_height = attr.value
187                 }
188 
189                 _ => panic!("Unexpected VAConfigAttribType {}", attr.type_),
190             }
191         }
192 
193         Ok(CodedCap {
194             profile,
195             max_width,
196             max_height,
197         })
198     }
199 
200     // Query the capabilities for the raw format
get_raw_caps(display: Rc<libva::Display>, coded_cap: &CodedCap) -> Result<Vec<RawCap>>201     fn get_raw_caps(display: Rc<libva::Display>, coded_cap: &CodedCap) -> Result<Vec<RawCap>> {
202         let mut raw_caps = Vec::new();
203 
204         let mut config = display.create_config(
205             vec![],
206             coded_cap.profile,
207             libva::VAEntrypoint::VAEntrypointVLD,
208         )?;
209 
210         let fourccs = config.query_surface_attributes_by_type(
211             libva::VASurfaceAttribType::VASurfaceAttribPixelFormat,
212         )?;
213 
214         for fourcc in fourccs {
215             let fourcc = match fourcc {
216                 libva::GenericValue::Integer(i) => i as u32,
217                 other => panic!("Unexpected VAGenericValue {:?}", other),
218             };
219 
220             let min_width = config.query_surface_attributes_by_type(
221                 libva::VASurfaceAttribType::VASurfaceAttribMinWidth,
222             )?;
223 
224             let min_width = match min_width.get(0) {
225                 Some(libva::GenericValue::Integer(i)) => *i as u32,
226                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
227                 None => 1,
228             };
229 
230             let min_height = config.query_surface_attributes_by_type(
231                 libva::VASurfaceAttribType::VASurfaceAttribMinHeight,
232             )?;
233             let min_height = match min_height.get(0) {
234                 Some(libva::GenericValue::Integer(i)) => *i as u32,
235                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
236                 None => 1,
237             };
238 
239             let max_width = config.query_surface_attributes_by_type(
240                 libva::VASurfaceAttribType::VASurfaceAttribMaxWidth,
241             )?;
242             let max_width = match max_width.get(0) {
243                 Some(libva::GenericValue::Integer(i)) => *i as u32,
244                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
245                 None => coded_cap.max_width,
246             };
247 
248             let max_height = config.query_surface_attributes_by_type(
249                 libva::VASurfaceAttribType::VASurfaceAttribMaxHeight,
250             )?;
251             let max_height = match max_height.get(0) {
252                 Some(libva::GenericValue::Integer(i)) => *i as u32,
253                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
254                 None => coded_cap.max_height,
255             };
256 
257             raw_caps.push(RawCap {
258                 fourcc,
259                 min_width,
260                 min_height,
261                 max_width,
262                 max_height,
263             });
264         }
265 
266         Ok(raw_caps)
267     }
268 
269     /// Creates a new instance of the Vaapi decoder.
new() -> Result<Self>270     pub fn new() -> Result<Self> {
271         let display = libva::Display::open().ok_or_else(|| anyhow!("failed to open VA display"))?;
272 
273         let va_profiles = display.query_config_profiles()?;
274 
275         let mut profiles = Vec::new();
276         let mut in_fmts = Vec::new();
277         let mut out_fmts = Vec::new();
278         let mut profiles_map: BTreeMap<Format, Vec<Profile>> = Default::default();
279 
280         // VA has no API for querying the levels supported by the driver.
281         // vaQueryProcessingRate is close, but not quite a solution here
282         // for all codecs.
283         let levels: BTreeMap<Format, Vec<Level>> = Default::default();
284 
285         for va_profile in va_profiles {
286             let entrypoints = display.query_config_entrypoints(va_profile)?;
287             if !entrypoints
288                 .iter()
289                 .any(|e| *e == libva::VAEntrypoint::VAEntrypointVLD)
290             {
291                 // All formats we are aiming to support require
292                 // VAEntrypointVLD.
293                 continue;
294             }
295             // Manually push all VP8 profiles, since VA exposes only a single
296             // VP8 profile for all of these
297             if va_profile == libva::VAProfile::VAProfileVP8Version0_3 {
298                 profiles.push(Profile::VP8Profile0);
299                 profiles.push(Profile::VP8Profile1);
300                 profiles.push(Profile::VP8Profile2);
301                 profiles.push(Profile::VP8Profile3);
302             }
303 
304             let profile = match Profile::try_from(va_profile) {
305                 Ok(p) => p,
306                 // Skip if we cannot convert to a valid virtio format
307                 Err(_) => continue,
308             };
309 
310             let coded_cap = VaapiDecoder::get_coded_cap(display.as_ref(), va_profile)?;
311             let raw_caps = VaapiDecoder::get_raw_caps(Rc::clone(&display), &coded_cap)?;
312 
313             let coded_frame_fmt = FrameFormat {
314                 width: FormatRange {
315                     min: 1,
316                     max: coded_cap.max_width,
317                     step: 1,
318                 },
319 
320                 height: FormatRange {
321                     min: 1,
322                     max: coded_cap.max_height,
323                     step: 1,
324                 },
325 
326                 bitrates: Default::default(),
327             };
328 
329             let coded_format = profile.to_format();
330             match profiles_map.entry(coded_format) {
331                 Entry::Vacant(e) => {
332                     e.insert(vec![profile]);
333                 }
334                 Entry::Occupied(mut ps) => {
335                     ps.get_mut().push(profile);
336                 }
337             }
338 
339             let mut n_out = 0;
340             for raw_cap in raw_caps {
341                 if raw_cap.fourcc != libva::constants::VA_FOURCC_NV12 {
342                     // Apparently only NV12 is currently supported by virtio video
343                     continue;
344                 }
345 
346                 let raw_frame_fmt = FrameFormat {
347                     width: FormatRange {
348                         min: raw_cap.min_width,
349                         max: raw_cap.max_width,
350                         step: 1,
351                     },
352 
353                     height: FormatRange {
354                         min: raw_cap.min_height,
355                         max: raw_cap.max_height,
356                         step: 1,
357                     },
358 
359                     bitrates: Default::default(),
360                 };
361 
362                 out_fmts.push(FormatDesc {
363                     mask: 0,
364                     format: Format::NV12,
365                     frame_formats: vec![raw_frame_fmt],
366                     plane_align: 1,
367                 });
368 
369                 n_out += 1;
370             }
371 
372             let mask = !(u64::MAX << n_out) << (out_fmts.len() - n_out);
373 
374             if mask != 0 {
375                 in_fmts.push(FormatDesc {
376                     mask,
377                     format: coded_format,
378                     frame_formats: vec![coded_frame_fmt],
379                     plane_align: 1,
380                 });
381             }
382         }
383 
384         Ok(Self {
385             caps: Capability::new(in_fmts, out_fmts, profiles_map, levels),
386         })
387     }
388 }
389 
390 #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
391 pub struct Resolution {
392     width: u32,
393     height: u32,
394 }
395 
396 trait AsBufferHandle {
397     type BufferHandle: BufferHandle;
as_buffer_handle(&self) -> &Self::BufferHandle398     fn as_buffer_handle(&self) -> &Self::BufferHandle;
399 }
400 
401 impl AsBufferHandle for &mut GuestResource {
402     type BufferHandle = GuestResourceHandle;
403 
as_buffer_handle(&self) -> &Self::BufferHandle404     fn as_buffer_handle(&self) -> &Self::BufferHandle {
405         &self.handle
406     }
407 }
408 
409 impl AsBufferHandle for GuestResourceHandle {
410     type BufferHandle = Self;
411 
as_buffer_handle(&self) -> &Self::BufferHandle412     fn as_buffer_handle(&self) -> &Self::BufferHandle {
413         self
414     }
415 }
416 
417 /// A convenience type implementing persistent slice access for BufferHandles.
418 struct BufferMapping<T: AsBufferHandle> {
419     #[allow(dead_code)]
420     /// The underlying resource. Must be kept so as not to drop the BufferHandle
421     resource: T,
422     /// The mapping that backs the underlying slices returned by AsRef and AsMut
423     mapping: MemoryMappingArena,
424 }
425 
426 impl<T: AsBufferHandle> BufferMapping<T> {
427     /// Creates a new BufferMap
new(resource: T, offset: usize, size: usize) -> Result<Self>428     pub fn new(resource: T, offset: usize, size: usize) -> Result<Self> {
429         let mapping = resource.as_buffer_handle().get_mapping(offset, size)?;
430 
431         Ok(Self { resource, mapping })
432     }
433 }
434 
435 impl<T: AsBufferHandle> AsRef<[u8]> for BufferMapping<T> {
as_ref(&self) -> &[u8]436     fn as_ref(&self) -> &[u8] {
437         let mapping = &self.mapping;
438         // Safe because the mapping is linear and we own it, so it will not be unmapped during
439         // the lifetime of this slice.
440         unsafe { std::slice::from_raw_parts(mapping.as_ptr(), mapping.size()) }
441     }
442 }
443 
444 impl<T: AsBufferHandle> AsMut<[u8]> for BufferMapping<T> {
as_mut(&mut self) -> &mut [u8]445     fn as_mut(&mut self) -> &mut [u8] {
446         let mapping = &self.mapping;
447         // Safe because the mapping is linear and we own it, so it will not be unmapped during
448         // the lifetime of this slice.
449         unsafe { std::slice::from_raw_parts_mut(mapping.as_ptr(), mapping.size()) }
450     }
451 }
452 
453 /// A decoder session for the libva backend
454 pub struct VaapiDecoderSession {
455     /// The implementation for the codec specific logic.
456     codec: Box<dyn VideoDecoder>,
457     /// The state for the output queue. Updated when `set_output_buffer_count`
458     /// is called or when we detect a dynamic resolution change.
459     output_queue_state: OutputQueueState,
460     /// Queue containing decoded pictures.
461     ready_queue: VecDeque<Box<dyn DynDecodedHandle>>,
462     /// Queue containing the buffers we have not yet submitted to the codec.
463     submit_queue: VecDeque<PendingJob>,
464     /// The event queue we can use to signal new events.
465     event_queue: EventQueue<DecoderEvent>,
466     /// Whether the decoder is currently flushing.
467     flushing: bool,
468     /// The last value for "display_order" we have managed to output.
469     last_display_order: u64,
470 }
471 
472 impl VaapiDecoderSession {
change_resolution(&mut self, new_params: DrcParams) -> Result<()>473     fn change_resolution(&mut self, new_params: DrcParams) -> Result<()> {
474         // Ask the client for new buffers.
475         self.event_queue
476             .queue_event(DecoderEvent::ProvidePictureBuffers {
477                 min_num_buffers: u32::try_from(new_params.min_num_buffers)?,
478                 width: new_params.width as i32,
479                 height: new_params.height as i32,
480                 visible_rect: new_params.visible_rect,
481             })?;
482 
483         // Drop our output queue and wait for the new number of output buffers.
484         self.output_queue_state = match &self.output_queue_state {
485             // If this is part of the initialization step, then do not switch states.
486             OutputQueueState::AwaitingBufferCount => OutputQueueState::AwaitingBufferCount,
487             OutputQueueState::Decoding { .. } => OutputQueueState::Drc,
488             _ => return Err(anyhow!("Invalid state during DRC.")),
489         };
490 
491         Ok(())
492     }
493 
494     /// Copy raw decoded data from `image` into the output buffer
output_picture(&mut self, decoded_frame: &dyn DynDecodedHandle) -> Result<bool>495     pub fn output_picture(&mut self, decoded_frame: &dyn DynDecodedHandle) -> Result<bool> {
496         let output_queue = self.output_queue_state.output_queue_mut()?;
497 
498         // Output buffer to be used.
499         let (picture_buffer_id, output_buffer) = match output_queue.try_get_ready_buffer() {
500             Some(ready_buffer) => ready_buffer,
501             None => {
502                 return Ok(false);
503             }
504         };
505 
506         let display_resolution = decoded_frame.display_resolution();
507 
508         let mut picture = decoded_frame.dyn_picture_mut();
509         let mut backend_handle = picture.dyn_mappable_handle_mut();
510         let buffer_size = backend_handle.image_size();
511 
512         // Get a mapping from the start of the buffer to the size of the
513         // underlying decoded data in the Image.
514         let mut output_map = BufferMapping::new(output_buffer, 0, buffer_size)?;
515 
516         let output_bytes = output_map.as_mut();
517 
518         backend_handle.read(output_bytes)?;
519 
520         drop(backend_handle);
521         drop(picture);
522 
523         let timestamp = decoded_frame.timestamp();
524         let picture_buffer_id = picture_buffer_id as i32;
525 
526         // Say that we are done decoding this picture.
527         self.event_queue
528             .queue_event(DecoderEvent::PictureReady {
529                 picture_buffer_id,
530                 timestamp,
531                 visible_rect: Rect {
532                     left: 0,
533                     top: 0,
534                     right: display_resolution.width as i32,
535                     bottom: display_resolution.height as i32,
536                 },
537             })
538             .map_err(|e| {
539                 VideoError::BackendFailure(anyhow!("Can't queue the PictureReady event {}", e))
540             })?;
541 
542         Ok(true)
543     }
544 
drain_ready_queue(&mut self) -> Result<()>545     fn drain_ready_queue(&mut self) -> Result<()> {
546         // Do not do anything if we haven't been given buffers yet.
547         if !matches!(self.output_queue_state, OutputQueueState::Decoding { .. }) {
548             return Ok(());
549         }
550 
551         while let Some(mut decoded_frame) = self.ready_queue.pop_front() {
552             let display_order = decoded_frame.display_order().expect(
553                 "A frame should have its display order set before being returned from the decoder.",
554             );
555 
556             // We are receiving frames as-is from the decoder, which means there
557             // may be gaps if the decoder returns frames out of order.
558             // We simply wait in this case, as the decoder will eventually
559             // produce more frames that makes the gap not exist anymore.
560             //
561             // On the other hand, we take care to not stall. We compromise by
562             // emitting frames out of order instead. This should not really
563             // happen in production and a warn is left so we can think about
564             // bumping the number of resources allocated by the backends.
565             let gap = display_order != 0 && display_order != self.last_display_order + 1;
566 
567             let stall = if let Some(left) = self.codec.num_resources_left() {
568                 left == 0
569             } else {
570                 false
571             };
572 
573             if gap && !stall {
574                 self.ready_queue.push_front(decoded_frame);
575                 break;
576             } else if gap && stall {
577                 self.ready_queue.push_front(decoded_frame);
578 
579                 // Try polling the decoder for all pending jobs.
580                 let handles = self.codec.poll(BlockingMode::Blocking)?;
581                 self.ready_queue.extend(handles);
582 
583                 self.ready_queue
584                     .make_contiguous()
585                     .sort_by_key(|h| h.display_order());
586 
587                 decoded_frame = self.ready_queue.pop_front().unwrap();
588 
589                 // See whether we *still* have a gap
590                 let display_order = decoded_frame.display_order().expect(
591                 "A frame should have its display order set before being returned from the decoder."
592                 );
593 
594                 let gap = display_order != 0 && display_order != self.last_display_order + 1;
595 
596                 if gap {
597                     // If the stall is not due to a missing frame, then this may
598                     // signal that we are not allocating enough resources.
599                     base::warn!("Outputting out of order to avoid stalling.");
600                     base::warn!(
601                         "Expected {}, got {}",
602                         self.last_display_order + 1,
603                         display_order
604                     );
605                     base::warn!("Either a dropped frame, or not enough resources for the codec.");
606                     base::warn!(
607                         "Increasing the number of allocated resources can possibly fix this."
608                     );
609                 }
610             }
611 
612             let outputted = self.output_picture(decoded_frame.as_ref())?;
613             if !outputted {
614                 self.ready_queue.push_front(decoded_frame);
615                 break;
616             }
617 
618             self.last_display_order = display_order;
619         }
620 
621         Ok(())
622     }
623 
try_emit_flush_completed(&mut self) -> Result<()>624     fn try_emit_flush_completed(&mut self) -> Result<()> {
625         let num_ready_remaining = self.ready_queue.len();
626         let num_submit_remaining = self.submit_queue.len();
627 
628         if num_ready_remaining == 0 && num_submit_remaining == 0 {
629             self.flushing = false;
630 
631             let event_queue = &mut self.event_queue;
632 
633             event_queue
634                 .queue_event(DecoderEvent::FlushCompleted(Ok(())))
635                 .map_err(|e| anyhow!("Can't queue the PictureReady event {}", e))
636         } else {
637             Ok(())
638         }
639     }
640 
decode_one_job(&mut self, job: PendingJob) -> VideoResult<()>641     fn decode_one_job(&mut self, job: PendingJob) -> VideoResult<()> {
642         let PendingJob {
643             resource_id,
644             timestamp,
645             resource,
646             offset,
647             bytes_used,
648         } = job;
649 
650         let bitstream_map = BufferMapping::new(
651             resource,
652             offset.try_into().unwrap(),
653             bytes_used.try_into().unwrap(),
654         )
655         .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?;
656 
657         let frames = self.codec.decode(timestamp, bitstream_map.as_ref());
658 
659         // We are always done with the input buffer after `self.codec.decode()`.
660         self.event_queue
661             .queue_event(DecoderEvent::NotifyEndOfBitstreamBuffer(resource_id))
662             .map_err(|e| {
663                 VideoError::BackendFailure(anyhow!(
664                     "Can't queue the NotifyEndOfBitstream event {}",
665                     e
666                 ))
667             })?;
668 
669         match frames {
670             Ok(frames) => {
671                 if self.codec.negotiation_possible() {
672                     let resolution = self.codec.coded_resolution().unwrap();
673 
674                     let drc_params = DrcParams {
675                         min_num_buffers: self.codec.num_resources_total(),
676                         width: resolution.width,
677                         height: resolution.height,
678                         visible_rect: Rect {
679                             left: 0,
680                             top: 0,
681                             right: resolution.width as i32,
682                             bottom: resolution.height as i32,
683                         },
684                     };
685 
686                     self.change_resolution(drc_params)
687                         .map_err(VideoError::BackendFailure)?;
688                 }
689 
690                 for decoded_frame in frames {
691                     self.ready_queue.push_back(decoded_frame);
692                 }
693 
694                 self.ready_queue
695                     .make_contiguous()
696                     .sort_by_key(|h| h.display_order());
697 
698                 self.drain_ready_queue()
699                     .map_err(VideoError::BackendFailure)?;
700 
701                 Ok(())
702             }
703 
704             Err(e) => {
705                 let event_queue = &mut self.event_queue;
706 
707                 event_queue
708                     .queue_event(DecoderEvent::NotifyError(VideoError::BackendFailure(
709                         anyhow!("Decoding buffer {} failed", resource_id),
710                     )))
711                     .map_err(|e| {
712                         VideoError::BackendFailure(anyhow!(
713                             "Can't queue the NotifyError event {}",
714                             e
715                         ))
716                     })?;
717 
718                 Err(VideoError::BackendFailure(anyhow!(e)))
719             }
720         }
721     }
722 
drain_submit_queue(&mut self) -> VideoResult<()>723     fn drain_submit_queue(&mut self) -> VideoResult<()> {
724         while let Some(queued_buffer) = self.submit_queue.pop_front() {
725             match self.codec.num_resources_left() {
726                 Some(left) if left == 0 => {
727                     self.submit_queue.push_front(queued_buffer);
728                     break;
729                 }
730 
731                 _ => self.decode_one_job(queued_buffer)?,
732             }
733         }
734 
735         Ok(())
736     }
737 
try_make_progress(&mut self) -> VideoResult<()>738     fn try_make_progress(&mut self) -> VideoResult<()> {
739         // Note that the ready queue must be drained first to avoid deadlock.
740         // This is because draining the submit queue will fail if the ready
741         // queue is full enough, since this prevents the deallocation of the
742         // VASurfaces embedded in the handles stored in the ready queue.
743         // This means that no progress gets done.
744         self.drain_ready_queue()
745             .map_err(VideoError::BackendFailure)?;
746         self.drain_submit_queue()?;
747 
748         Ok(())
749     }
750 }
751 
752 impl DecoderSession for VaapiDecoderSession {
set_output_parameters(&mut self, buffer_count: usize, _: Format) -> VideoResult<()>753     fn set_output_parameters(&mut self, buffer_count: usize, _: Format) -> VideoResult<()> {
754         let output_queue_state = &mut self.output_queue_state;
755 
756         // This logic can still be improved, in particular it needs better
757         // support at the virtio-video protocol level.
758         //
759         // We must ensure that set_output_parameters is only called after we are
760         // sure that we have processed some stream metadata, which currently is
761         // not the case. In particular, the {SET|GET}_PARAMS logic currently
762         // takes place *before* we had a chance to parse any stream metadata at
763         // all.
764         //
765         // This can lead to a situation where we accept a format (say, NV12),
766         // but then discover we are unable to decode it after processing some
767         // buffers (because the stream indicates that the bit depth is 10, for
768         // example). Note that there is no way to reject said stream as of right
769         // now unless we hardcode NV12 in cros-codecs itself.
770         //
771         // Nevertheless, the support is already in place in cros-codecs: the
772         // decoders will queue buffers until they read some metadata. At this
773         // point, it will allow for the negotiation of the decoded format until
774         // a new call to decode() is made. At the crosvm level, we can use this
775         // window of time to try different decoded formats with .try_format().
776         //
777         // For now, we accept the default format chosen by cros-codecs instead.
778         // In practice, this means NV12 if it the stream can be decoded into
779         // NV12 and if the hardware can do so.
780 
781         match output_queue_state {
782             OutputQueueState::AwaitingBufferCount | OutputQueueState::Drc => {
783                 // Accept the default format chosen by cros-codecs instead.
784                 //
785                 // if let Some(backend_format) = self.backend.backend().format() {
786                 //     let backend_format = Format::try_from(backend_format);
787 
788                 //     let format_matches = match backend_format {
789                 //         Ok(backend_format) => backend_format != format,
790                 //         Err(_) => false,
791                 //     };
792 
793                 //     if !format_matches {
794                 //         let format =
795                 //             DecodedFormat::try_from(format).map_err(VideoError::BackendFailure)?;
796 
797                 //         self.backend.backend().try_format(format).map_err(|e| {
798                 //             VideoError::BackendFailure(anyhow!(
799                 //                 "Failed to set the codec backend format: {}",
800                 //                 e
801                 //             ))
802                 //         })?;
803                 //     }
804                 // }
805 
806                 *output_queue_state = OutputQueueState::Decoding {
807                     output_queue: OutputQueue::new(buffer_count),
808                 };
809 
810                 Ok(())
811             }
812             OutputQueueState::Decoding { .. } => {
813                 // Covers the slightly awkward ffmpeg v4l2 stateful
814                 // implementation for the capture queue setup.
815                 //
816                 // ffmpeg will queue a single OUTPUT buffer and immediately
817                 // follow up with a VIDIOC_G_FMT call on the CAPTURE queue.
818                 // This leads to a race condition, because it takes some
819                 // appreciable time for the real resolution to propagate back to
820                 // the guest as the virtio machinery processes and delivers the
821                 // event.
822                 //
823                 // In the event that VIDIOC_G_FMT(capture) returns the default
824                 // format, ffmpeg allocates buffers of the default resolution
825                 // (640x480) only to immediately reallocate as soon as it
826                 // processes the SRC_CH v4l2 event. Otherwise (if the resolution
827                 // has propagated in time), this path will not be taken during
828                 // the initialization.
829                 //
830                 // This leads to the following workflow in the virtio video
831                 // worker:
832                 // RESOURCE_QUEUE -> QUEUE_CLEAR -> RESOURCE_QUEUE
833                 //
834                 // Failing to accept this (as we previously did), leaves us
835                 // with bad state and completely breaks the decoding process. We
836                 // should replace the queue even if this is not 100% according
837                 // to spec.
838                 //
839                 // On the other hand, this branch still exists to highlight the
840                 // fact that we should assert that we have emitted a buffer with
841                 // the LAST flag when support for buffer flags is implemented in
842                 // a future CL. If a buffer with the LAST flag hasn't been
843                 // emitted, it's technically a mistake to be here because we
844                 // still have buffers of the old resolution to deliver.
845                 *output_queue_state = OutputQueueState::Decoding {
846                     output_queue: OutputQueue::new(buffer_count),
847                 };
848 
849                 // TODO: check whether we have emitted a buffer with the LAST
850                 // flag before returning.
851                 Ok(())
852             }
853         }
854     }
855 
decode( &mut self, resource_id: u32, timestamp: u64, resource: GuestResourceHandle, offset: u32, bytes_used: u32, ) -> VideoResult<()>856     fn decode(
857         &mut self,
858         resource_id: u32,
859         timestamp: u64,
860         resource: GuestResourceHandle,
861         offset: u32,
862         bytes_used: u32,
863     ) -> VideoResult<()> {
864         let job = PendingJob {
865             resource_id,
866             timestamp,
867             resource,
868             offset,
869             bytes_used,
870         };
871 
872         self.submit_queue.push_back(job);
873         self.try_make_progress()?;
874 
875         Ok(())
876     }
877 
flush(&mut self) -> VideoResult<()>878     fn flush(&mut self) -> VideoResult<()> {
879         self.flushing = true;
880 
881         self.try_make_progress()?;
882 
883         if self.submit_queue.len() != 0 {
884             return Ok(());
885         }
886 
887         // Retrieve ready frames from the codec, if any.
888         let pics = self
889             .codec
890             .flush()
891             .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?;
892 
893         self.ready_queue.extend(pics);
894         self.ready_queue
895             .make_contiguous()
896             .sort_by_key(|h| h.display_order());
897 
898         self.drain_ready_queue()
899             .map_err(VideoError::BackendFailure)?;
900 
901         self.try_emit_flush_completed()
902             .map_err(VideoError::BackendFailure)
903     }
904 
reset(&mut self) -> VideoResult<()>905     fn reset(&mut self) -> VideoResult<()> {
906         // Drop the queued output buffers.
907         self.clear_output_buffers()?;
908 
909         self.submit_queue.clear();
910         self.ready_queue.clear();
911         self.codec
912             .flush()
913             .map_err(|e| VideoError::BackendFailure(anyhow!("Flushing the codec failed {}", e)))?;
914 
915         self.event_queue
916             .queue_event(DecoderEvent::ResetCompleted(Ok(())))
917             .map_err(|e| {
918                 VideoError::BackendFailure(anyhow!("Can't queue the ResetCompleted event {}", e))
919             })?;
920 
921         Ok(())
922     }
923 
clear_output_buffers(&mut self) -> VideoResult<()>924     fn clear_output_buffers(&mut self) -> VideoResult<()> {
925         // Cancel any ongoing flush.
926         self.flushing = false;
927 
928         // Drop all output buffers we currently hold.
929         if let OutputQueueState::Decoding { output_queue } = &mut self.output_queue_state {
930             output_queue.clear_ready_buffers();
931         }
932 
933         // Drop all decoded frames signaled as ready and cancel any reported flush.
934         self.event_queue.retain(|event| {
935             !matches!(
936                 event,
937                 DecoderEvent::PictureReady { .. } | DecoderEvent::FlushCompleted(_)
938             )
939         });
940 
941         Ok(())
942     }
943 
event_pipe(&self) -> &dyn base::AsRawDescriptor944     fn event_pipe(&self) -> &dyn base::AsRawDescriptor {
945         &self.event_queue
946     }
947 
use_output_buffer( &mut self, picture_buffer_id: i32, resource: GuestResource, ) -> VideoResult<()>948     fn use_output_buffer(
949         &mut self,
950         picture_buffer_id: i32,
951         resource: GuestResource,
952     ) -> VideoResult<()> {
953         let output_queue_state = &mut self.output_queue_state;
954         if let OutputQueueState::Drc = output_queue_state {
955             // Reusing buffers during DRC is valid, but we won't use them and can just drop them.
956             return Ok(());
957         }
958 
959         let output_queue = output_queue_state
960             .output_queue_mut()
961             .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?;
962 
963         // TODO: there's a type mismatch here between the trait and the signature for `import_buffer`
964         output_queue
965             .import_buffer(picture_buffer_id as u32, resource)
966             .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?;
967 
968         self.drain_ready_queue()
969             .map_err(VideoError::BackendFailure)?;
970 
971         Ok(())
972     }
973 
reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()>974     fn reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()> {
975         let output_queue_state = &mut self.output_queue_state;
976         if let OutputQueueState::Drc = output_queue_state {
977             // Reusing buffers during DRC is valid, but we won't use them and can just drop them.
978             return Ok(());
979         }
980 
981         let output_queue = output_queue_state
982             .output_queue_mut()
983             .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?;
984 
985         // TODO: there's a type mismatch here between the trait and the signature for `import_buffer`
986         output_queue
987             .reuse_buffer(picture_buffer_id as u32)
988             .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?;
989 
990         self.try_make_progress()?;
991 
992         if self.flushing {
993             // Try flushing again now that we have a new buffer. This might let
994             // us progress further in the flush operation.
995             self.flush()?;
996         }
997         Ok(())
998     }
999 
read_event(&mut self) -> VideoResult<DecoderEvent>1000     fn read_event(&mut self) -> VideoResult<DecoderEvent> {
1001         self.event_queue
1002             .dequeue_event()
1003             .map_err(|e| VideoError::BackendFailure(anyhow!("Can't read event {}", e)))
1004     }
1005 }
1006 
1007 impl DecoderBackend for VaapiDecoder {
1008     type Session = VaapiDecoderSession;
1009 
get_capabilities(&self) -> Capability1010     fn get_capabilities(&self) -> Capability {
1011         self.caps.clone()
1012     }
1013 
new_session(&mut self, format: Format) -> VideoResult<Self::Session>1014     fn new_session(&mut self, format: Format) -> VideoResult<Self::Session> {
1015         let display = Display::open().ok_or(VideoError::BackendFailure(anyhow!(
1016             "failed to open VA display"
1017         )))?;
1018 
1019         let codec: Box<dyn VideoDecoder> = match format {
1020             Format::VP8 => Box::new(
1021                 cros_codecs::decoders::vp8::decoder::Decoder::new_vaapi(
1022                     display,
1023                     cros_codecs::decoders::BlockingMode::NonBlocking,
1024                 )
1025                 .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?,
1026             ),
1027             Format::VP9 => Box::new(
1028                 cros_codecs::decoders::vp9::decoder::Decoder::new_vaapi(
1029                     display,
1030                     cros_codecs::decoders::BlockingMode::NonBlocking,
1031                 )
1032                 .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?,
1033             ),
1034             Format::H264 => Box::new(
1035                 cros_codecs::decoders::h264::decoder::Decoder::new_vaapi(
1036                     display,
1037                     cros_codecs::decoders::BlockingMode::NonBlocking,
1038                 )
1039                 .map_err(|e| VideoError::BackendFailure(anyhow!(e)))?,
1040             ),
1041             _ => return Err(VideoError::InvalidFormat),
1042         };
1043 
1044         Ok(VaapiDecoderSession {
1045             codec,
1046             output_queue_state: OutputQueueState::AwaitingBufferCount,
1047             ready_queue: Default::default(),
1048             submit_queue: Default::default(),
1049             event_queue: EventQueue::new().map_err(|e| VideoError::BackendFailure(anyhow!(e)))?,
1050             flushing: Default::default(),
1051             last_display_order: Default::default(),
1052         })
1053     }
1054 }
1055 
1056 #[cfg(test)]
1057 mod tests {
1058     use super::super::tests::*;
1059     use super::*;
1060 
1061     #[test]
1062     // Ignore this test by default as it requires libva-compatible hardware.
1063     #[ignore]
test_get_capabilities()1064     fn test_get_capabilities() {
1065         let decoder = VaapiDecoder::new().unwrap();
1066         let caps = decoder.get_capabilities();
1067         assert!(!caps.input_formats().is_empty());
1068         assert!(!caps.output_formats().is_empty());
1069     }
1070 
1071     // Decode using guest memory input and output buffers.
1072     #[test]
1073     // Ignore this test by default as it requires libva-compatible hardware.
1074     #[ignore]
test_decode_h264_guestmem_to_guestmem()1075     fn test_decode_h264_guestmem_to_guestmem() {
1076         decode_h264_generic(
1077             &mut VaapiDecoder::new().unwrap(),
1078             build_guest_mem_handle,
1079             build_guest_mem_handle,
1080         );
1081     }
1082 }
1083