• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2022 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #![deny(missing_docs)]
6 
7 use std::collections::btree_map::Entry;
8 use std::collections::BTreeMap;
9 use std::collections::VecDeque;
10 use std::os::fd::FromRawFd;
11 use std::os::fd::OwnedFd;
12 use std::rc::Rc;
13 
14 use anyhow::anyhow;
15 use anyhow::Result;
16 use base::IntoRawDescriptor;
17 use base::MappedRegion;
18 use base::MemoryMappingArena;
19 use cros_codecs::decoder::stateless::h264::H264;
20 use cros_codecs::decoder::stateless::h265::H265;
21 use cros_codecs::decoder::stateless::vp8::Vp8;
22 use cros_codecs::decoder::stateless::vp9::Vp9;
23 use cros_codecs::decoder::stateless::DecodeError;
24 use cros_codecs::decoder::stateless::StatelessVideoDecoder;
25 use cros_codecs::decoder::DecodedHandle;
26 use cros_codecs::libva;
27 use cros_codecs::libva::Display;
28 use cros_codecs::multiple_desc_type;
29 use cros_codecs::utils::DmabufFrame;
30 use cros_codecs::DecodedFormat;
31 use cros_codecs::FrameLayout;
32 use cros_codecs::PlaneLayout;
33 
34 use crate::virtio::video::decoder::Capability;
35 use crate::virtio::video::decoder::DecoderBackend;
36 use crate::virtio::video::decoder::DecoderEvent;
37 use crate::virtio::video::decoder::DecoderSession;
38 use crate::virtio::video::error::VideoError;
39 use crate::virtio::video::error::VideoResult;
40 use crate::virtio::video::format::Format;
41 use crate::virtio::video::format::FormatDesc;
42 use crate::virtio::video::format::FormatRange;
43 use crate::virtio::video::format::FrameFormat;
44 use crate::virtio::video::format::Level;
45 use crate::virtio::video::format::Profile;
46 use crate::virtio::video::format::Rect;
47 use crate::virtio::video::resource::BufferHandle;
48 use crate::virtio::video::resource::GuestMemHandle;
49 use crate::virtio::video::resource::GuestResource;
50 use crate::virtio::video::resource::GuestResourceHandle;
51 use crate::virtio::video::utils::EventQueue;
52 
53 /// A guest memory descriptor that uses a managed buffer as a shadow that will be copied into the
54 /// guest memory once decoding is over.
55 struct GuestMemDescriptor(GuestMemHandle);
56 
57 impl libva::SurfaceMemoryDescriptor for GuestMemDescriptor {
add_attrs( &mut self, attrs: &mut Vec<libva::VASurfaceAttrib>, ) -> Option<Box<dyn std::any::Any>>58     fn add_attrs(
59         &mut self,
60         attrs: &mut Vec<libva::VASurfaceAttrib>,
61     ) -> Option<Box<dyn std::any::Any>> {
62         // Decode into a managed buffer.
63         ().add_attrs(attrs)
64     }
65 }
66 
67 multiple_desc_type! {
68     enum BufferDescriptor {
69         GuestMem(GuestMemDescriptor),
70         Dmabuf(DmabufFrame),
71     }
72 }
73 
74 struct BufferDescWithPicId {
75     desc: BufferDescriptor,
76     picture_buffer_id: i32,
77 }
78 
79 impl libva::SurfaceMemoryDescriptor for BufferDescWithPicId {
add_attrs( &mut self, attrs: &mut Vec<libva::VASurfaceAttrib>, ) -> Option<Box<dyn std::any::Any>>80     fn add_attrs(
81         &mut self,
82         attrs: &mut Vec<libva::VASurfaceAttrib>,
83     ) -> Option<Box<dyn std::any::Any>> {
84         self.desc.add_attrs(attrs)
85     }
86 }
87 
88 /// Represents a buffer we have not yet sent to the accelerator.
89 struct PendingJob {
90     resource_id: u32,
91     timestamp: u64,
92     resource: GuestResourceHandle,
93     offset: usize,
94     bytes_used: usize,
95     remaining: usize,
96 }
97 
98 impl TryFrom<DecodedFormat> for Format {
99     type Error = anyhow::Error;
100 
try_from(value: DecodedFormat) -> Result<Self, Self::Error>101     fn try_from(value: DecodedFormat) -> Result<Self, Self::Error> {
102         match value {
103             DecodedFormat::NV12 => Ok(Format::NV12),
104             _ => Err(anyhow!("Unsupported format")),
105         }
106     }
107 }
108 
109 impl TryFrom<Format> for DecodedFormat {
110     type Error = anyhow::Error;
111 
try_from(value: Format) -> Result<Self, Self::Error>112     fn try_from(value: Format) -> Result<Self, Self::Error> {
113         match value {
114             Format::NV12 => Ok(DecodedFormat::NV12),
115             _ => Err(anyhow!("Unsupported format")),
116         }
117     }
118 }
119 
120 impl TryFrom<libva::VAProfile::Type> for Profile {
121     type Error = anyhow::Error;
122 
try_from(value: libva::VAProfile::Type) -> Result<Self, Self::Error>123     fn try_from(value: libva::VAProfile::Type) -> Result<Self, Self::Error> {
124         match value {
125             libva::VAProfile::VAProfileH264Baseline => Ok(Self::H264Baseline),
126             libva::VAProfile::VAProfileH264Main => Ok(Self::H264Main),
127             libva::VAProfile::VAProfileH264High => Ok(Self::H264High),
128             libva::VAProfile::VAProfileH264StereoHigh => Ok(Self::H264StereoHigh),
129             libva::VAProfile::VAProfileH264MultiviewHigh => Ok(Self::H264MultiviewHigh),
130             libva::VAProfile::VAProfileHEVCMain => Ok(Self::HevcMain),
131             libva::VAProfile::VAProfileHEVCMain10 => Ok(Self::HevcMain10),
132             libva::VAProfile::VAProfileVP8Version0_3 => Ok(Self::VP8Profile0),
133             libva::VAProfile::VAProfileVP9Profile0 => Ok(Self::VP9Profile0),
134             libva::VAProfile::VAProfileVP9Profile1 => Ok(Self::VP9Profile1),
135             libva::VAProfile::VAProfileVP9Profile2 => Ok(Self::VP9Profile2),
136             libva::VAProfile::VAProfileVP9Profile3 => Ok(Self::VP9Profile3),
137             _ => Err(anyhow!(
138                 "Conversion failed for unexpected VAProfile: {}",
139                 value
140             )),
141         }
142     }
143 }
144 
145 /// The state for the output queue containing the buffers that will receive the
146 /// decoded data.
147 enum OutputQueueState {
148     /// Waiting for the client to call `set_output_buffer_count`.
149     AwaitingBufferCount,
150     /// Codec is capable of decoding frames.
151     Decoding,
152     /// Dynamic Resolution Change - we can still accept buffers in the old
153     /// format, but are waiting for new parameters before doing any decoding.
154     Drc,
155 }
156 
157 ///A safe decoder abstraction over libva for a single vaContext
158 pub struct VaapiDecoder {
159     /// The capabilities for the decoder
160     caps: Capability,
161 }
162 
163 // The VA capabilities for the coded side
164 struct CodedCap {
165     profile: libva::VAProfile::Type,
166     max_width: u32,
167     max_height: u32,
168 }
169 
170 // The VA capabilities for the raw side
171 struct RawCap {
172     fourcc: u32,
173     min_width: u32,
174     min_height: u32,
175     max_width: u32,
176     max_height: u32,
177 }
178 
179 impl VaapiDecoder {
180     // Query the capabilities for the coded format
get_coded_cap( display: &libva::Display, profile: libva::VAProfile::Type, ) -> Result<CodedCap>181     fn get_coded_cap(
182         display: &libva::Display,
183         profile: libva::VAProfile::Type,
184     ) -> Result<CodedCap> {
185         let mut attrs = vec![
186             libva::VAConfigAttrib {
187                 type_: libva::VAConfigAttribType::VAConfigAttribMaxPictureWidth,
188                 value: 0,
189             },
190             libva::VAConfigAttrib {
191                 type_: libva::VAConfigAttribType::VAConfigAttribMaxPictureHeight,
192                 value: 0,
193             },
194         ];
195 
196         display.get_config_attributes(profile, libva::VAEntrypoint::VAEntrypointVLD, &mut attrs)?;
197 
198         let mut max_width = 1u32;
199         let mut max_height = 1u32;
200 
201         for attr in &attrs {
202             if attr.value == libva::constants::VA_ATTRIB_NOT_SUPPORTED {
203                 continue;
204             }
205 
206             match attr.type_ {
207                 libva::VAConfigAttribType::VAConfigAttribMaxPictureWidth => max_width = attr.value,
208                 libva::VAConfigAttribType::VAConfigAttribMaxPictureHeight => {
209                     max_height = attr.value
210                 }
211 
212                 _ => panic!("Unexpected VAConfigAttribType {}", attr.type_),
213             }
214         }
215 
216         Ok(CodedCap {
217             profile,
218             max_width,
219             max_height,
220         })
221     }
222 
223     // Query the capabilities for the raw format
get_raw_caps(display: Rc<libva::Display>, coded_cap: &CodedCap) -> Result<Vec<RawCap>>224     fn get_raw_caps(display: Rc<libva::Display>, coded_cap: &CodedCap) -> Result<Vec<RawCap>> {
225         let mut raw_caps = Vec::new();
226 
227         let mut config = display.create_config(
228             vec![],
229             coded_cap.profile,
230             libva::VAEntrypoint::VAEntrypointVLD,
231         )?;
232 
233         let fourccs = config.query_surface_attributes_by_type(
234             libva::VASurfaceAttribType::VASurfaceAttribPixelFormat,
235         )?;
236 
237         for fourcc in fourccs {
238             let fourcc = match fourcc {
239                 libva::GenericValue::Integer(i) => i as u32,
240                 other => panic!("Unexpected VAGenericValue {:?}", other),
241             };
242 
243             let min_width = config.query_surface_attributes_by_type(
244                 libva::VASurfaceAttribType::VASurfaceAttribMinWidth,
245             )?;
246 
247             let min_width = match min_width.get(0) {
248                 Some(libva::GenericValue::Integer(i)) => *i as u32,
249                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
250                 None => 1,
251             };
252 
253             let min_height = config.query_surface_attributes_by_type(
254                 libva::VASurfaceAttribType::VASurfaceAttribMinHeight,
255             )?;
256             let min_height = match min_height.get(0) {
257                 Some(libva::GenericValue::Integer(i)) => *i as u32,
258                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
259                 None => 1,
260             };
261 
262             let max_width = config.query_surface_attributes_by_type(
263                 libva::VASurfaceAttribType::VASurfaceAttribMaxWidth,
264             )?;
265             let max_width = match max_width.get(0) {
266                 Some(libva::GenericValue::Integer(i)) => *i as u32,
267                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
268                 None => coded_cap.max_width,
269             };
270 
271             let max_height = config.query_surface_attributes_by_type(
272                 libva::VASurfaceAttribType::VASurfaceAttribMaxHeight,
273             )?;
274             let max_height = match max_height.get(0) {
275                 Some(libva::GenericValue::Integer(i)) => *i as u32,
276                 Some(other) => panic!("Unexpected VAGenericValue {:?}", other),
277                 None => coded_cap.max_height,
278             };
279 
280             raw_caps.push(RawCap {
281                 fourcc,
282                 min_width,
283                 min_height,
284                 max_width,
285                 max_height,
286             });
287         }
288 
289         Ok(raw_caps)
290     }
291 
292     /// Creates a new instance of the Vaapi decoder.
new() -> Result<Self>293     pub fn new() -> Result<Self> {
294         let display = libva::Display::open().ok_or_else(|| anyhow!("failed to open VA display"))?;
295 
296         let va_profiles = display.query_config_profiles()?;
297 
298         let mut in_fmts = Vec::new();
299         let mut out_fmts = Vec::new();
300         let mut profiles_map: BTreeMap<Format, Vec<Profile>> = Default::default();
301 
302         // VA has no API for querying the levels supported by the driver.
303         // vaQueryProcessingRate is close, but not quite a solution here
304         // for all codecs.
305         let levels: BTreeMap<Format, Vec<Level>> = Default::default();
306 
307         for va_profile in va_profiles {
308             let mut profiles = Vec::new();
309 
310             let entrypoints = display.query_config_entrypoints(va_profile)?;
311             if !entrypoints
312                 .iter()
313                 .any(|e| *e == libva::VAEntrypoint::VAEntrypointVLD)
314             {
315                 // All formats we are aiming to support require
316                 // VAEntrypointVLD.
317                 continue;
318             }
319 
320             let profile = match Profile::try_from(va_profile) {
321                 Ok(p) => p,
322                 // Skip if we cannot convert to a valid virtio format
323                 Err(_) => continue,
324             };
325 
326             // Manually push all VP8 profiles, since VA exposes only a single
327             // VP8 profile for all of these
328             if va_profile == libva::VAProfile::VAProfileVP8Version0_3 {
329                 profiles.push(Profile::VP8Profile0);
330                 profiles.push(Profile::VP8Profile1);
331                 profiles.push(Profile::VP8Profile2);
332                 profiles.push(Profile::VP8Profile3);
333             } else {
334                 profiles.push(profile);
335             }
336 
337             let coded_cap = VaapiDecoder::get_coded_cap(display.as_ref(), va_profile)?;
338             let raw_caps = VaapiDecoder::get_raw_caps(Rc::clone(&display), &coded_cap)?;
339 
340             let coded_frame_fmt = FrameFormat {
341                 width: FormatRange {
342                     min: 1,
343                     max: coded_cap.max_width,
344                     step: 1,
345                 },
346 
347                 height: FormatRange {
348                     min: 1,
349                     max: coded_cap.max_height,
350                     step: 1,
351                 },
352 
353                 bitrates: Default::default(),
354             };
355 
356             let coded_format = profile.to_format();
357             match profiles_map.entry(coded_format) {
358                 Entry::Vacant(e) => {
359                     e.insert(profiles);
360                 }
361                 Entry::Occupied(mut ps) => {
362                     ps.get_mut().push(profile);
363                 }
364             }
365 
366             let mut n_out = 0;
367             for raw_cap in raw_caps {
368                 if raw_cap.fourcc != libva::constants::VA_FOURCC_NV12 {
369                     // Apparently only NV12 is currently supported by virtio video
370                     continue;
371                 }
372 
373                 let raw_frame_fmt = FrameFormat {
374                     width: FormatRange {
375                         min: raw_cap.min_width,
376                         max: raw_cap.max_width,
377                         step: 1,
378                     },
379 
380                     height: FormatRange {
381                         min: raw_cap.min_height,
382                         max: raw_cap.max_height,
383                         step: 1,
384                     },
385 
386                     bitrates: Default::default(),
387                 };
388 
389                 out_fmts.push(FormatDesc {
390                     mask: 0,
391                     format: Format::NV12,
392                     frame_formats: vec![raw_frame_fmt],
393                     plane_align: 1,
394                 });
395 
396                 n_out += 1;
397             }
398 
399             let mask = !(u64::MAX << n_out) << (out_fmts.len() - n_out);
400 
401             if mask != 0 {
402                 in_fmts.push(FormatDesc {
403                     mask,
404                     format: coded_format,
405                     frame_formats: vec![coded_frame_fmt],
406                     plane_align: 1,
407                 });
408             }
409         }
410 
411         Ok(Self {
412             caps: Capability::new(in_fmts, out_fmts, profiles_map, levels),
413         })
414     }
415 }
416 
417 #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
418 pub struct Resolution {
419     width: u32,
420     height: u32,
421 }
422 
423 trait AsBufferHandle {
424     type BufferHandle: BufferHandle;
as_buffer_handle(&self) -> &Self::BufferHandle425     fn as_buffer_handle(&self) -> &Self::BufferHandle;
426 }
427 
428 impl AsBufferHandle for GuestResource {
429     type BufferHandle = GuestResourceHandle;
430 
as_buffer_handle(&self) -> &Self::BufferHandle431     fn as_buffer_handle(&self) -> &Self::BufferHandle {
432         &self.handle
433     }
434 }
435 
436 impl AsBufferHandle for GuestMemHandle {
437     type BufferHandle = Self;
438 
as_buffer_handle(&self) -> &Self::BufferHandle439     fn as_buffer_handle(&self) -> &Self::BufferHandle {
440         self
441     }
442 }
443 
444 impl AsBufferHandle for GuestResourceHandle {
445     type BufferHandle = Self;
446 
as_buffer_handle(&self) -> &Self::BufferHandle447     fn as_buffer_handle(&self) -> &Self::BufferHandle {
448         self
449     }
450 }
451 
452 /// A convenience type implementing persistent slice access for BufferHandles.
453 struct BufferMapping<'a, T: AsBufferHandle> {
454     #[allow(dead_code)]
455     /// The underlying resource. Must be kept so as not to drop the BufferHandle
456     resource: &'a T,
457     /// The mapping that backs the underlying slices returned by AsRef and AsMut
458     mapping: MemoryMappingArena,
459 }
460 
461 impl<'a, T: AsBufferHandle> BufferMapping<'a, T> {
462     /// Creates a new BufferMap
new(resource: &'a T, offset: usize, size: usize) -> Result<Self>463     pub fn new(resource: &'a T, offset: usize, size: usize) -> Result<Self> {
464         let mapping = resource.as_buffer_handle().get_mapping(offset, size)?;
465 
466         Ok(Self { resource, mapping })
467     }
468 }
469 
470 impl<'a, T: AsBufferHandle> AsRef<[u8]> for BufferMapping<'a, T> {
as_ref(&self) -> &[u8]471     fn as_ref(&self) -> &[u8] {
472         let mapping = &self.mapping;
473         // SAFETY:
474         // Safe because the mapping is linear and we own it, so it will not be unmapped during
475         // the lifetime of this slice.
476         unsafe { std::slice::from_raw_parts(mapping.as_ptr(), mapping.size()) }
477     }
478 }
479 
480 impl<'a, T: AsBufferHandle> AsMut<[u8]> for BufferMapping<'a, T> {
as_mut(&mut self) -> &mut [u8]481     fn as_mut(&mut self) -> &mut [u8] {
482         let mapping = &self.mapping;
483         // SAFETY:
484         // Safe because the mapping is linear and we own it, so it will not be unmapped during
485         // the lifetime of this slice.
486         unsafe { std::slice::from_raw_parts_mut(mapping.as_ptr(), mapping.size()) }
487     }
488 }
489 
490 /// A frame that is currently not available for being decoded into, either because it has been
491 /// decoded and is waiting for us to release it (`Decoded`), or because we temporarily removed it
492 /// from the decoder pool after a reset and are waiting for the client to tell us we can use it
493 /// (`Held`).
494 enum BorrowedFrame {
495     Decoded(Box<dyn DecodedHandle<Descriptor = BufferDescWithPicId>>),
496     Held(Box<dyn AsRef<BufferDescWithPicId>>),
497 }
498 
499 /// A decoder session for the libva backend
500 pub struct VaapiDecoderSession {
501     /// The implementation for the codec specific logic.
502     codec: Box<dyn StatelessVideoDecoder<BufferDescWithPicId>>,
503     /// The state for the output queue. Updated when `set_output_buffer_count`
504     /// is called or when we detect a dynamic resolution change.
505     output_queue_state: OutputQueueState,
506     /// Frames currently held by us, indexed by `picture_buffer_id`.
507     held_frames: BTreeMap<i32, BorrowedFrame>,
508     /// Queue containing the buffers we have not yet submitted to the codec.
509     submit_queue: VecDeque<PendingJob>,
510     /// The event queue we can use to signal new events.
511     event_queue: EventQueue<DecoderEvent>,
512     /// Whether the decoder is currently flushing.
513     flushing: bool,
514 }
515 
516 impl VaapiDecoderSession {
517     /// Copy raw decoded data from `image` into the output buffer
output_picture( decoded_frame: &dyn DecodedHandle<Descriptor = BufferDescWithPicId>, event_queue: &mut EventQueue<DecoderEvent>, ) -> Result<()>518     fn output_picture(
519         decoded_frame: &dyn DecodedHandle<Descriptor = BufferDescWithPicId>,
520         event_queue: &mut EventQueue<DecoderEvent>,
521     ) -> Result<()> {
522         let display_resolution = decoded_frame.display_resolution();
523         let timestamp = decoded_frame.timestamp();
524 
525         let buffer_desc = decoded_frame.resource();
526         let picture_buffer_id = buffer_desc.picture_buffer_id;
527 
528         // Sync the frame if it is in guest memory, as we are going to map and read it.
529         // This statement is in its own block so we can drop the `buffer_desc` reference
530         // before calling `sync`, which does a mutable borrow.
531         if let BufferDescriptor::GuestMem(_) = &buffer_desc.desc {
532             drop(buffer_desc);
533             decoded_frame.sync()?;
534         }
535 
536         // Copy guest memory buffers into their destination.
537         if let BufferDescriptor::GuestMem(handle) = &decoded_frame.resource().desc {
538             let picture = decoded_frame.dyn_picture();
539             let mut backend_handle = picture.dyn_mappable_handle()?;
540             let buffer_size = backend_handle.image_size();
541 
542             // Get a mapping from the start of the buffer to the size of the
543             // underlying decoded data in the Image.
544             let mut output_map = BufferMapping::new(&handle.0, 0, buffer_size)?;
545             let output_bytes = output_map.as_mut();
546 
547             backend_handle.read(output_bytes)?;
548         }
549 
550         // Say that we are done decoding this picture.
551         event_queue
552             .queue_event(DecoderEvent::PictureReady {
553                 picture_buffer_id,
554                 timestamp,
555                 visible_rect: Rect {
556                     left: 0,
557                     top: 0,
558                     right: display_resolution.width as i32,
559                     bottom: display_resolution.height as i32,
560                 },
561             })
562             .map_err(|e| {
563                 VideoError::BackendFailure(anyhow!("Can't queue the PictureReady event {}", e))
564             })?;
565 
566         Ok(())
567     }
568 
try_emit_flush_completed(&mut self) -> Result<()>569     fn try_emit_flush_completed(&mut self) -> Result<()> {
570         if self.submit_queue.is_empty() {
571             self.flushing = false;
572 
573             let event_queue = &mut self.event_queue;
574 
575             event_queue
576                 .queue_event(DecoderEvent::FlushCompleted(Ok(())))
577                 .map_err(|e| anyhow!("Can't queue the PictureReady event {}", e))
578         } else {
579             Ok(())
580         }
581     }
582 
drain_submit_queue(&mut self) -> VideoResult<()>583     fn drain_submit_queue(&mut self) -> VideoResult<()> {
584         while let Some(job) = self.submit_queue.front_mut() {
585             let bitstream_map = BufferMapping::new(&job.resource, job.offset, job.bytes_used)
586                 .map_err(VideoError::BackendFailure)?;
587 
588             let slice_start = job.bytes_used - job.remaining;
589             match self
590                 .codec
591                 .decode(job.timestamp, &bitstream_map.as_ref()[slice_start..])
592             {
593                 Ok(processed) => {
594                     job.remaining = job.remaining.saturating_sub(processed);
595                     // We have completed the buffer.
596                     if job.remaining == 0 {
597                         // We are always done with the input buffer after decode returns.
598                         self.event_queue
599                             .queue_event(DecoderEvent::NotifyEndOfBitstreamBuffer(job.resource_id))
600                             .map_err(|e| {
601                                 VideoError::BackendFailure(anyhow!(
602                                     "Can't queue the NotifyEndOfBitstream event {}",
603                                     e
604                                 ))
605                             })?;
606                         self.submit_queue.pop_front();
607                     }
608                 }
609                 Err(DecodeError::CheckEvents) => {
610                     self.process_decoder_events()?;
611                     break;
612                 }
613                 // We will succeed once buffers are returned by the client. This could be optimized
614                 // to only retry decoding once buffers are effectively returned.
615                 Err(DecodeError::NotEnoughOutputBuffers(_)) => break,
616                 // TODO add an InvalidInput error to cros-codecs so we can detect these cases and
617                 // just throw a warning instead of a fatal error?
618                 Err(e) => {
619                     self.event_queue
620                         .queue_event(DecoderEvent::NotifyError(VideoError::BackendFailure(
621                             anyhow!("Decoding buffer {} failed", job.resource_id),
622                         )))
623                         .map_err(|e| {
624                             VideoError::BackendFailure(anyhow!(
625                                 "Can't queue the NotifyError event {}",
626                                 e
627                             ))
628                         })?;
629                     return Err(VideoError::BackendFailure(e.into()));
630                 }
631             }
632         }
633 
634         Ok(())
635     }
636 
process_decoder_events(&mut self) -> VideoResult<()>637     fn process_decoder_events(&mut self) -> VideoResult<()> {
638         while let Some(event) = self.codec.next_event() {
639             match event {
640                 cros_codecs::decoder::DecoderEvent::FrameReady(frame) => {
641                     Self::output_picture(frame.as_ref(), &mut self.event_queue)
642                         .map_err(VideoError::BackendFailure)?;
643                     let picture_id = frame.resource().picture_buffer_id;
644                     self.held_frames
645                         .insert(picture_id, BorrowedFrame::Decoded(frame));
646                 }
647                 cros_codecs::decoder::DecoderEvent::FormatChanged(mut format) => {
648                     let coded_resolution = format.stream_info().coded_resolution;
649                     let display_resolution = format.stream_info().display_resolution;
650 
651                     // Ask the client for new buffers.
652                     self.event_queue
653                         .queue_event(DecoderEvent::ProvidePictureBuffers {
654                             min_num_buffers: format.stream_info().min_num_frames as u32,
655                             width: coded_resolution.width as i32,
656                             height: coded_resolution.height as i32,
657                             visible_rect: Rect {
658                                 left: 0,
659                                 top: 0,
660                                 right: display_resolution.width as i32,
661                                 bottom: display_resolution.height as i32,
662                             },
663                         })
664                         .map_err(|e| VideoError::BackendFailure(e.into()))?;
665 
666                     format.frame_pool().clear();
667 
668                     // Drop our output queue and wait for the new number of output buffers.
669                     self.output_queue_state = match &self.output_queue_state {
670                         // If this is part of the initialization step, then do not switch states.
671                         OutputQueueState::AwaitingBufferCount => {
672                             OutputQueueState::AwaitingBufferCount
673                         }
674                         OutputQueueState::Decoding => OutputQueueState::Drc,
675                         OutputQueueState::Drc => {
676                             return Err(VideoError::BackendFailure(anyhow!(
677                                 "Invalid state during DRC."
678                             )))
679                         }
680                     };
681                 }
682             }
683         }
684 
685         Ok(())
686     }
687 
try_make_progress(&mut self) -> VideoResult<()>688     fn try_make_progress(&mut self) -> VideoResult<()> {
689         self.process_decoder_events()?;
690         self.drain_submit_queue()?;
691 
692         Ok(())
693     }
694 }
695 
696 impl DecoderSession for VaapiDecoderSession {
set_output_parameters(&mut self, _: usize, _: Format) -> VideoResult<()>697     fn set_output_parameters(&mut self, _: usize, _: Format) -> VideoResult<()> {
698         let output_queue_state = &mut self.output_queue_state;
699 
700         // This logic can still be improved, in particular it needs better
701         // support at the virtio-video protocol level.
702         //
703         // We must ensure that set_output_parameters is only called after we are
704         // sure that we have processed some stream metadata, which currently is
705         // not the case. In particular, the {SET|GET}_PARAMS logic currently
706         // takes place *before* we had a chance to parse any stream metadata at
707         // all.
708         //
709         // This can lead to a situation where we accept a format (say, NV12),
710         // but then discover we are unable to decode it after processing some
711         // buffers (because the stream indicates that the bit depth is 10, for
712         // example). Note that there is no way to reject said stream as of right
713         // now unless we hardcode NV12 in cros-codecs itself.
714         //
715         // Nevertheless, the support is already in place in cros-codecs: the
716         // decoders will queue buffers until they read some metadata. At this
717         // point, it will allow for the negotiation of the decoded format until
718         // a new call to decode() is made. At the crosvm level, we can use this
719         // window of time to try different decoded formats with .try_format().
720         //
721         // For now, we accept the default format chosen by cros-codecs instead.
722         // In practice, this means NV12 if it the stream can be decoded into
723         // NV12 and if the hardware can do so.
724 
725         match output_queue_state {
726             OutputQueueState::AwaitingBufferCount | OutputQueueState::Drc => {
727                 // Accept the default format chosen by cros-codecs instead.
728                 //
729                 // if let Some(backend_format) = self.backend.backend().format() {
730                 //     let backend_format = Format::try_from(backend_format);
731 
732                 //     let format_matches = match backend_format {
733                 //         Ok(backend_format) => backend_format != format,
734                 //         Err(_) => false,
735                 //     };
736 
737                 //     if !format_matches {
738                 //         let format =
739                 //             DecodedFormat::try_from(format).map_err(VideoError::BackendFailure)?;
740 
741                 //         self.backend.backend().try_format(format).map_err(|e| {
742                 //             VideoError::BackendFailure(anyhow!(
743                 //                 "Failed to set the codec backend format: {}",
744                 //                 e
745                 //             ))
746                 //         })?;
747                 //     }
748                 // }
749 
750                 *output_queue_state = OutputQueueState::Decoding;
751 
752                 Ok(())
753             }
754             OutputQueueState::Decoding => {
755                 // Covers the slightly awkward ffmpeg v4l2 stateful
756                 // implementation for the capture queue setup.
757                 //
758                 // ffmpeg will queue a single OUTPUT buffer and immediately
759                 // follow up with a VIDIOC_G_FMT call on the CAPTURE queue.
760                 // This leads to a race condition, because it takes some
761                 // appreciable time for the real resolution to propagate back to
762                 // the guest as the virtio machinery processes and delivers the
763                 // event.
764                 //
765                 // In the event that VIDIOC_G_FMT(capture) returns the default
766                 // format, ffmpeg allocates buffers of the default resolution
767                 // (640x480) only to immediately reallocate as soon as it
768                 // processes the SRC_CH v4l2 event. Otherwise (if the resolution
769                 // has propagated in time), this path will not be taken during
770                 // the initialization.
771                 //
772                 // This leads to the following workflow in the virtio video
773                 // worker:
774                 // RESOURCE_QUEUE -> QUEUE_CLEAR -> RESOURCE_QUEUE
775                 //
776                 // Failing to accept this (as we previously did), leaves us
777                 // with bad state and completely breaks the decoding process. We
778                 // should replace the queue even if this is not 100% according
779                 // to spec.
780                 //
781                 // On the other hand, this branch still exists to highlight the
782                 // fact that we should assert that we have emitted a buffer with
783                 // the LAST flag when support for buffer flags is implemented in
784                 // a future CL. If a buffer with the LAST flag hasn't been
785                 // emitted, it's technically a mistake to be here because we
786                 // still have buffers of the old resolution to deliver.
787                 *output_queue_state = OutputQueueState::Decoding;
788 
789                 // TODO: check whether we have emitted a buffer with the LAST
790                 // flag before returning.
791                 Ok(())
792             }
793         }
794     }
795 
decode( &mut self, resource_id: u32, timestamp: u64, resource: GuestResourceHandle, offset: u32, bytes_used: u32, ) -> VideoResult<()>796     fn decode(
797         &mut self,
798         resource_id: u32,
799         timestamp: u64,
800         resource: GuestResourceHandle,
801         offset: u32,
802         bytes_used: u32,
803     ) -> VideoResult<()> {
804         let job = PendingJob {
805             resource_id,
806             timestamp,
807             resource,
808             offset: offset as usize,
809             bytes_used: bytes_used as usize,
810             remaining: bytes_used as usize,
811         };
812 
813         self.submit_queue.push_back(job);
814         self.try_make_progress()?;
815 
816         Ok(())
817     }
818 
flush(&mut self) -> VideoResult<()>819     fn flush(&mut self) -> VideoResult<()> {
820         self.flushing = true;
821 
822         self.try_make_progress()?;
823 
824         if !self.submit_queue.is_empty() {
825             return Ok(());
826         }
827 
828         // Retrieve ready frames from the codec, if any.
829         self.codec
830             .flush()
831             .map_err(|e| VideoError::BackendFailure(e.into()))?;
832         self.process_decoder_events()?;
833 
834         self.try_emit_flush_completed()
835             .map_err(VideoError::BackendFailure)
836     }
837 
reset(&mut self) -> VideoResult<()>838     fn reset(&mut self) -> VideoResult<()> {
839         self.submit_queue.clear();
840 
841         // Make sure the codec is not active.
842         self.codec
843             .flush()
844             .map_err(|e| VideoError::BackendFailure(e.into()))?;
845 
846         self.process_decoder_events()?;
847 
848         // Drop the queued output buffers.
849         self.clear_output_buffers()?;
850 
851         self.event_queue
852             .queue_event(DecoderEvent::ResetCompleted(Ok(())))
853             .map_err(|e| {
854                 VideoError::BackendFailure(anyhow!("Can't queue the ResetCompleted event {}", e))
855             })?;
856 
857         Ok(())
858     }
859 
clear_output_buffers(&mut self) -> VideoResult<()>860     fn clear_output_buffers(&mut self) -> VideoResult<()> {
861         // Cancel any ongoing flush.
862         self.flushing = false;
863 
864         // Drop all decoded frames signaled as ready and cancel any reported flush.
865         self.event_queue.retain(|event| {
866             !matches!(
867                 event,
868                 DecoderEvent::PictureReady { .. } | DecoderEvent::FlushCompleted(_)
869             )
870         });
871 
872         // Now hold all the imported frames until reuse_output_buffer is called on them.
873         let frame_pool = self.codec.frame_pool();
874         while let Some(frame) = frame_pool.take_free_frame() {
875             let picture_id = (*frame).as_ref().picture_buffer_id;
876             self.held_frames
877                 .insert(picture_id, BorrowedFrame::Held(frame));
878         }
879 
880         Ok(())
881     }
882 
event_pipe(&self) -> &dyn base::AsRawDescriptor883     fn event_pipe(&self) -> &dyn base::AsRawDescriptor {
884         &self.event_queue
885     }
886 
use_output_buffer( &mut self, picture_buffer_id: i32, resource: GuestResource, ) -> VideoResult<()>887     fn use_output_buffer(
888         &mut self,
889         picture_buffer_id: i32,
890         resource: GuestResource,
891     ) -> VideoResult<()> {
892         let output_queue_state = &mut self.output_queue_state;
893         if let OutputQueueState::Drc = output_queue_state {
894             // Reusing buffers during DRC is valid, but we won't use them and can just drop them.
895             return Ok(());
896         }
897 
898         let desc = match resource.handle {
899             GuestResourceHandle::GuestPages(handle) => {
900                 BufferDescriptor::GuestMem(GuestMemDescriptor(handle))
901             }
902             GuestResourceHandle::VirtioObject(handle) => {
903                 // SAFETY: descriptor is expected to be valid
904                 let fd = unsafe { OwnedFd::from_raw_fd(handle.desc.into_raw_descriptor()) };
905                 let modifier = handle.modifier;
906 
907                 let frame = DmabufFrame {
908                     fds: vec![fd],
909                     layout: FrameLayout {
910                         format: (cros_codecs::Fourcc::from(b"NV12"), modifier),
911                         size: cros_codecs::Resolution::from((resource.width, resource.height)),
912                         planes: resource
913                             .planes
914                             .iter()
915                             .map(|p| PlaneLayout {
916                                 buffer_index: 0,
917                                 offset: p.offset,
918                                 stride: p.stride,
919                             })
920                             .collect(),
921                     },
922                 };
923 
924                 BufferDescriptor::Dmabuf(frame)
925             }
926         };
927 
928         let desc_with_pic_id = BufferDescWithPicId {
929             desc,
930             picture_buffer_id,
931         };
932 
933         self.codec
934             .frame_pool()
935             .add_frames(vec![desc_with_pic_id])
936             .map_err(VideoError::BackendFailure)?;
937 
938         self.try_make_progress()
939     }
940 
reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()>941     fn reuse_output_buffer(&mut self, picture_buffer_id: i32) -> VideoResult<()> {
942         let output_queue_state = &mut self.output_queue_state;
943         if let OutputQueueState::Drc = output_queue_state {
944             // Reusing buffers during DRC is valid, but we won't use them and can just drop them.
945             return Ok(());
946         }
947 
948         self.held_frames.remove(&picture_buffer_id);
949 
950         self.try_make_progress()?;
951 
952         if self.flushing {
953             // Try flushing again now that we have a new buffer. This might let
954             // us progress further in the flush operation.
955             self.flush()?;
956         }
957         Ok(())
958     }
959 
read_event(&mut self) -> VideoResult<DecoderEvent>960     fn read_event(&mut self) -> VideoResult<DecoderEvent> {
961         self.event_queue
962             .dequeue_event()
963             .map_err(|e| VideoError::BackendFailure(anyhow!("Can't read event {}", e)))
964     }
965 }
966 
967 impl DecoderBackend for VaapiDecoder {
968     type Session = VaapiDecoderSession;
969 
get_capabilities(&self) -> Capability970     fn get_capabilities(&self) -> Capability {
971         self.caps.clone()
972     }
973 
new_session(&mut self, format: Format) -> VideoResult<Self::Session>974     fn new_session(&mut self, format: Format) -> VideoResult<Self::Session> {
975         let display = Display::open()
976             .ok_or_else(|| VideoError::BackendFailure(anyhow!("failed to open VA display")))?;
977 
978         let codec: Box<dyn StatelessVideoDecoder<BufferDescWithPicId>> = match format {
979             Format::VP8 => Box::new(
980                 cros_codecs::decoder::stateless::StatelessDecoder::<Vp8, _>::new_vaapi(
981                     display,
982                     cros_codecs::decoder::BlockingMode::NonBlocking,
983                 ),
984             ),
985             Format::VP9 => Box::new(
986                 cros_codecs::decoder::stateless::StatelessDecoder::<Vp9, _>::new_vaapi(
987                     display,
988                     cros_codecs::decoder::BlockingMode::NonBlocking,
989                 ),
990             ),
991             Format::H264 => Box::new(
992                 cros_codecs::decoder::stateless::StatelessDecoder::<H264, _>::new_vaapi(
993                     display,
994                     cros_codecs::decoder::BlockingMode::NonBlocking,
995                 ),
996             ),
997             Format::Hevc => Box::new(
998                 cros_codecs::decoder::stateless::StatelessDecoder::<H265, _>::new_vaapi(
999                     display,
1000                     cros_codecs::decoder::BlockingMode::NonBlocking,
1001                 ),
1002             ),
1003             _ => return Err(VideoError::InvalidFormat),
1004         };
1005 
1006         Ok(VaapiDecoderSession {
1007             codec,
1008             output_queue_state: OutputQueueState::AwaitingBufferCount,
1009             held_frames: Default::default(),
1010             submit_queue: Default::default(),
1011             event_queue: EventQueue::new().map_err(|e| VideoError::BackendFailure(anyhow!(e)))?,
1012             flushing: Default::default(),
1013         })
1014     }
1015 }
1016 
1017 #[cfg(test)]
1018 mod tests {
1019     use super::super::tests::*;
1020     use super::*;
1021 
1022     #[test]
1023     // Ignore this test by default as it requires libva-compatible hardware.
1024     #[ignore]
test_get_capabilities()1025     fn test_get_capabilities() {
1026         let decoder = VaapiDecoder::new().unwrap();
1027         let caps = decoder.get_capabilities();
1028         assert!(!caps.input_formats().is_empty());
1029         assert!(!caps.output_formats().is_empty());
1030     }
1031 
1032     // Decode using guest memory input and output buffers.
1033     #[test]
1034     // Ignore this test by default as it requires libva-compatible hardware.
1035     #[ignore]
test_decode_h264_guestmem_to_guestmem()1036     fn test_decode_h264_guestmem_to_guestmem() {
1037         decode_h264_generic(
1038             &mut VaapiDecoder::new().unwrap(),
1039             build_guest_mem_handle,
1040             build_guest_mem_handle,
1041         );
1042     }
1043 }
1044