• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2022 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use std::collections::VecDeque;
6 use std::rc::Rc;
7 
8 use anyhow::anyhow;
9 use anyhow::Context as AnyhowContext;
10 use anyhow::Result;
11 use libva::BufferType;
12 use libva::Display;
13 use libva::IQMatrix;
14 use libva::IQMatrixBufferH264;
15 use libva::Picture as VaPicture;
16 use libva::PictureNew;
17 use libva::PictureParameter;
18 use libva::PictureParameterBufferH264;
19 use libva::SliceParameter;
20 use log::debug;
21 
22 use crate::decoders::h264::backends::Result as StatelessBackendResult;
23 use crate::decoders::h264::backends::StatelessDecoderBackend;
24 use crate::decoders::h264::decoder::Decoder;
25 use crate::decoders::h264::dpb::Dpb;
26 use crate::decoders::h264::dpb::DpbEntry;
27 use crate::decoders::h264::parser::Level;
28 use crate::decoders::h264::parser::Pps;
29 use crate::decoders::h264::parser::Profile;
30 use crate::decoders::h264::parser::Slice;
31 use crate::decoders::h264::parser::Sps;
32 use crate::decoders::h264::picture::Field;
33 use crate::decoders::h264::picture::PictureData;
34 use crate::decoders::h264::picture::Reference;
35 use crate::decoders::BlockingMode;
36 use crate::decoders::DecodedHandle;
37 use crate::decoders::Result as DecoderResult;
38 use crate::decoders::StatelessBackendError;
39 use crate::decoders::VideoDecoderBackend;
40 use crate::utils::vaapi::DecodedHandle as VADecodedHandle;
41 use crate::utils::vaapi::NegotiationStatus;
42 use crate::utils::vaapi::StreamInfo;
43 use crate::utils::vaapi::VaapiBackend;
44 use crate::DecodedFormat;
45 use crate::Resolution;
46 
47 #[cfg(test)]
48 #[derive(Default)]
49 struct TestParams {
50     pic_param: Option<BufferType>,
51     iq_matrix: Option<BufferType>,
52     slice_param: Option<BufferType>,
53     slice_data: Option<BufferType>,
54 }
55 
56 #[cfg(test)]
57 impl TestParams {
save_pic_params(&mut self, pic_param: BufferType, iq_matrix: BufferType)58     fn save_pic_params(&mut self, pic_param: BufferType, iq_matrix: BufferType) {
59         self.pic_param = Some(pic_param);
60         self.iq_matrix = Some(iq_matrix);
61     }
62 
save_slice_params(&mut self, slice_param: BufferType)63     fn save_slice_params(&mut self, slice_param: BufferType) {
64         self.slice_param = Some(slice_param);
65     }
66 
save_slice_data(&mut self, slice_data: BufferType)67     fn save_slice_data(&mut self, slice_data: BufferType) {
68         self.slice_data = Some(slice_data);
69     }
70 }
71 
72 impl StreamInfo for &Sps {
va_profile(&self) -> anyhow::Result<i32>73     fn va_profile(&self) -> anyhow::Result<i32> {
74         let profile_idc = self.profile_idc();
75         let profile = Profile::n(profile_idc)
76             .with_context(|| format!("Invalid profile_idc {:?}", profile_idc))?;
77 
78         match profile {
79             Profile::Baseline => {
80                 if self.constraint_set0_flag() {
81                     Ok(libva::VAProfile::VAProfileH264ConstrainedBaseline)
82                 } else {
83                     Err(anyhow!(
84                         "Unsupported stream: profile_idc=66, but constraint_set0_flag is unset"
85                     ))
86                 }
87             }
88             Profile::Main => Ok(libva::VAProfile::VAProfileH264Main),
89             Profile::High => Ok(libva::VAProfile::VAProfileH264High),
90         }
91     }
92 
rt_format(&self) -> anyhow::Result<u32>93     fn rt_format(&self) -> anyhow::Result<u32> {
94         let bit_depth_luma = self.bit_depth_chroma_minus8() + 8;
95         let chroma_format_idc = self.chroma_format_idc();
96 
97         match bit_depth_luma {
98             8 => match chroma_format_idc {
99                 0 | 1 => Ok(libva::constants::VA_RT_FORMAT_YUV420),
100                 _ => Err(anyhow!(
101                     "Unsupported chroma_format_idc: {}",
102                     chroma_format_idc
103                 )),
104             },
105             _ => Err(anyhow!("Unsupported bit depth: {}", bit_depth_luma)),
106         }
107     }
108 
min_num_surfaces(&self) -> usize109     fn min_num_surfaces(&self) -> usize {
110         self.max_dpb_frames().unwrap() + 4
111     }
112 
coded_size(&self) -> (u32, u32)113     fn coded_size(&self) -> (u32, u32) {
114         (self.width(), self.height())
115     }
116 
visible_rect(&self) -> ((u32, u32), (u32, u32))117     fn visible_rect(&self) -> ((u32, u32), (u32, u32)) {
118         let rect = self.visible_rectangle();
119 
120         ((rect.min.x, rect.min.y), (rect.max.x, rect.max.y))
121     }
122 }
123 
124 /// H.264 stateless decoder backend for VA-API.
125 struct Backend {
126     backend: VaapiBackend<Sps>,
127 
128     /// The current picture being worked on.
129     current_picture: Option<VaPicture<PictureNew>>,
130 
131     #[cfg(test)]
132     /// Test params. Saves the metadata sent to VA-API for the purposes of
133     /// testing.
134     test_params: TestParams,
135 }
136 
137 impl Backend {
138     /// Creates a new codec backend for H.264.
new(display: Rc<libva::Display>) -> Result<Self>139     fn new(display: Rc<libva::Display>) -> Result<Self> {
140         Ok(Self {
141             backend: VaapiBackend::new(display),
142             current_picture: Default::default(),
143 
144             #[cfg(test)]
145             test_params: Default::default(),
146         })
147     }
148 
149     /// Gets the VASurfaceID for the given `picture`.
surface_id(handle: &Option<VADecodedHandle>) -> libva::VASurfaceID150     fn surface_id(handle: &Option<VADecodedHandle>) -> libva::VASurfaceID {
151         match handle {
152             None => libva::constants::VA_INVALID_SURFACE,
153             Some(handle) => handle.handle().surface_id(),
154         }
155     }
156 
157     /// Fills the internal `va_pic` picture parameter with data from `h264_pic`
fill_va_h264_pic( h264_pic: &PictureData, surface_id: libva::VASurfaceID, merge_other_field: bool, ) -> libva::PictureH264158     fn fill_va_h264_pic(
159         h264_pic: &PictureData,
160         surface_id: libva::VASurfaceID,
161         merge_other_field: bool,
162     ) -> libva::PictureH264 {
163         let mut flags = 0;
164         let frame_idx = if matches!(h264_pic.reference(), Reference::LongTerm) {
165             flags |= libva::constants::VA_PICTURE_H264_LONG_TERM_REFERENCE;
166             h264_pic.long_term_frame_idx
167         } else {
168             if matches!(h264_pic.reference(), Reference::ShortTerm { .. }) {
169                 flags |= libva::constants::VA_PICTURE_H264_SHORT_TERM_REFERENCE;
170             }
171 
172             h264_pic.frame_num
173         };
174 
175         let top_field_order_cnt;
176         let bottom_field_order_cnt;
177 
178         match h264_pic.field {
179             Field::Frame => {
180                 top_field_order_cnt = h264_pic.top_field_order_cnt;
181                 bottom_field_order_cnt = h264_pic.bottom_field_order_cnt;
182             }
183             Field::Top => {
184                 if merge_other_field && h264_pic.other_field().is_some() {
185                     bottom_field_order_cnt = h264_pic
186                         .other_field_unchecked()
187                         .borrow()
188                         .bottom_field_order_cnt;
189                 } else {
190                     flags |= libva::constants::VA_PICTURE_H264_TOP_FIELD;
191                     bottom_field_order_cnt = 0;
192                 }
193 
194                 top_field_order_cnt = h264_pic.top_field_order_cnt;
195             }
196             Field::Bottom => {
197                 if merge_other_field && h264_pic.other_field().is_some() {
198                     top_field_order_cnt = h264_pic
199                         .other_field_unchecked()
200                         .borrow()
201                         .top_field_order_cnt;
202                 } else {
203                     flags |= libva::constants::VA_PICTURE_H264_BOTTOM_FIELD;
204                     top_field_order_cnt = 0;
205                 }
206 
207                 bottom_field_order_cnt = h264_pic.bottom_field_order_cnt;
208             }
209         }
210 
211         libva::PictureH264::new(
212             surface_id,
213             frame_idx as u32,
214             flags,
215             top_field_order_cnt,
216             bottom_field_order_cnt,
217         )
218     }
219 
220     /// Builds an invalid VaPictureH264. These pictures are used to fill empty
221     /// array slots there is no data to fill them with.
build_invalid_va_h264_pic() -> libva::PictureH264222     fn build_invalid_va_h264_pic() -> libva::PictureH264 {
223         libva::PictureH264::new(
224             libva::constants::VA_INVALID_ID,
225             0,
226             libva::constants::VA_PICTURE_H264_INVALID,
227             0,
228             0,
229         )
230     }
231 
build_iq_matrix(pps: &Pps) -> BufferType232     fn build_iq_matrix(pps: &Pps) -> BufferType {
233         let mut scaling_list4x4 = [[0; 16]; 6];
234         let mut scaling_list8x8 = [[0; 64]; 2];
235 
236         (0..6).for_each(|i| {
237             Decoder::<VADecodedHandle>::get_raster_from_zigzag_4x4(
238                 pps.scaling_lists_4x4()[i],
239                 &mut scaling_list4x4[i],
240             );
241         });
242 
243         (0..2).for_each(|i| {
244             Decoder::<VADecodedHandle>::get_raster_from_zigzag_8x8(
245                 pps.scaling_lists_8x8()[i],
246                 &mut scaling_list8x8[i],
247             );
248         });
249 
250         BufferType::IQMatrix(IQMatrix::H264(IQMatrixBufferH264::new(
251             scaling_list4x4,
252             scaling_list8x8,
253         )))
254     }
255 
build_pic_param( slice: &Slice<impl AsRef<[u8]>>, current_picture: &PictureData, current_surface_id: libva::VASurfaceID, dpb: &Dpb<VADecodedHandle>, sps: &Sps, pps: &Pps, ) -> Result<BufferType>256     fn build_pic_param(
257         slice: &Slice<impl AsRef<[u8]>>,
258         current_picture: &PictureData,
259         current_surface_id: libva::VASurfaceID,
260         dpb: &Dpb<VADecodedHandle>,
261         sps: &Sps,
262         pps: &Pps,
263     ) -> Result<BufferType> {
264         let curr_pic = Backend::fill_va_h264_pic(current_picture, current_surface_id, false);
265 
266         let mut refs = vec![];
267         let mut va_refs = vec![];
268 
269         dpb.get_short_term_refs(&mut refs);
270         refs.retain(|handle| {
271             let pic = handle.0.borrow();
272             !pic.nonexisting && !pic.is_second_field()
273         });
274 
275         for handle in &refs {
276             let ref_pic = handle.0.borrow();
277             let surface_id = Backend::surface_id(&handle.1);
278             let pic = Backend::fill_va_h264_pic(&ref_pic, surface_id, true);
279             va_refs.push(pic);
280         }
281 
282         refs.clear();
283 
284         dpb.get_long_term_refs(&mut refs);
285         refs.retain(|handle| {
286             let pic = handle.0.borrow();
287             !pic.is_second_field()
288         });
289 
290         for handle in &refs {
291             let ref_pic = handle.0.borrow();
292             let surface_id = Backend::surface_id(&handle.1);
293             let pic = Backend::fill_va_h264_pic(&ref_pic, surface_id, true);
294             va_refs.push(pic);
295         }
296 
297         for _ in va_refs.len()..16 {
298             va_refs.push(Backend::build_invalid_va_h264_pic());
299         }
300 
301         refs.clear();
302 
303         let seq_fields = libva::H264SeqFields::new(
304             sps.chroma_format_idc() as u32,
305             sps.separate_colour_plane_flag() as u32,
306             sps.gaps_in_frame_num_value_allowed_flag() as u32,
307             sps.frame_mbs_only_flag() as u32,
308             sps.mb_adaptive_frame_field_flag() as u32,
309             sps.direct_8x8_inference_flag() as u32,
310             (sps.level_idc() > Level::L3_1) as u32, /* see A.3.3.2 */
311             sps.log2_max_frame_num_minus4() as u32,
312             sps.pic_order_cnt_type() as u32,
313             sps.log2_max_pic_order_cnt_lsb_minus4() as u32,
314             sps.delta_pic_order_always_zero_flag() as u32,
315         );
316         let interlaced = !sps.frame_mbs_only_flag() as u32;
317         let picture_height_in_mbs_minus1 =
318             ((sps.pic_height_in_map_units_minus1() + 1) << interlaced) - 1;
319 
320         let pic_fields = libva::H264PicFields::new(
321             pps.entropy_coding_mode_flag() as u32,
322             pps.weighted_pred_flag() as u32,
323             pps.weighted_bipred_idc() as u32,
324             pps.transform_8x8_mode_flag() as u32,
325             slice.header().field_pic_flag() as u32,
326             pps.constrained_intra_pred_flag() as u32,
327             pps.bottom_field_pic_order_in_frame_present_flag() as u32,
328             pps.deblocking_filter_control_present_flag() as u32,
329             pps.redundant_pic_cnt_present_flag() as u32,
330             (current_picture.nal_ref_idc != 0) as u32,
331         );
332 
333         let va_refs = va_refs.try_into();
334         let va_refs = match va_refs {
335             Ok(va_refs) => va_refs,
336             Err(_) => {
337                 panic!("Bug: wrong number of references, expected 16");
338             }
339         };
340 
341         let pic_param = PictureParameterBufferH264::new(
342             curr_pic,
343             va_refs,
344             u16::try_from(sps.pic_width_in_mbs_minus1())?,
345             u16::try_from(picture_height_in_mbs_minus1)?,
346             sps.bit_depth_luma_minus8(),
347             sps.bit_depth_chroma_minus8(),
348             u8::try_from(sps.max_num_ref_frames())?,
349             &seq_fields,
350             0, /* FMO not supported by VA */
351             0, /* FMO not supported by VA */
352             0, /* FMO not supported by VA */
353             pps.pic_init_qp_minus26(),
354             pps.pic_init_qs_minus26(),
355             pps.chroma_qp_index_offset(),
356             pps.second_chroma_qp_index_offset(),
357             &pic_fields,
358             slice.header().frame_num(),
359         );
360 
361         Ok(BufferType::PictureParameter(PictureParameter::H264(
362             pic_param,
363         )))
364     }
365 
fill_ref_pic_list(ref_list_x: &[DpbEntry<VADecodedHandle>]) -> [libva::PictureH264; 32]366     fn fill_ref_pic_list(ref_list_x: &[DpbEntry<VADecodedHandle>]) -> [libva::PictureH264; 32] {
367         let mut va_pics = vec![];
368 
369         for handle in ref_list_x {
370             let pic = handle.0.borrow();
371             let surface_id = Backend::surface_id(&handle.1);
372             let merge = matches!(pic.field, Field::Frame);
373             let va_pic = Backend::fill_va_h264_pic(&pic, surface_id, merge);
374 
375             va_pics.push(va_pic);
376         }
377 
378         for _ in va_pics.len()..32 {
379             va_pics.push(Backend::build_invalid_va_h264_pic());
380         }
381 
382         let va_pics: [libva::PictureH264; 32] = match va_pics.try_into() {
383             Ok(va_pics) => va_pics,
384             Err(e) => panic!(
385                 "Bug: wrong number of references, expected 32, got {:?}",
386                 e.len()
387             ),
388         };
389 
390         va_pics
391     }
392 
build_slice_param( slice: &Slice<impl AsRef<[u8]>>, ref_list_0: &[DpbEntry<VADecodedHandle>], ref_list_1: &[DpbEntry<VADecodedHandle>], sps: &Sps, pps: &Pps, ) -> Result<BufferType>393     fn build_slice_param(
394         slice: &Slice<impl AsRef<[u8]>>,
395         ref_list_0: &[DpbEntry<VADecodedHandle>],
396         ref_list_1: &[DpbEntry<VADecodedHandle>],
397         sps: &Sps,
398         pps: &Pps,
399     ) -> Result<BufferType> {
400         let hdr = slice.header();
401         let nalu = slice.nalu();
402 
403         let ref_list_0 = Backend::fill_ref_pic_list(ref_list_0);
404         let ref_list_1 = Backend::fill_ref_pic_list(ref_list_1);
405         let pwt = hdr.pred_weight_table();
406 
407         let mut luma_weight_l0_flag = false;
408         let mut chroma_weight_l0_flag = false;
409         let mut luma_weight_l0 = [0i16; 32];
410         let mut luma_offset_l0 = [0i16; 32];
411         let mut chroma_weight_l0: [[i16; 2]; 32] = [[0i16; 2]; 32];
412         let mut chroma_offset_l0: [[i16; 2]; 32] = [[0i16; 2]; 32];
413 
414         let mut luma_weight_l1_flag = false;
415         let mut chroma_weight_l1_flag = false;
416         let mut luma_weight_l1 = [0i16; 32];
417         let mut luma_offset_l1 = [0i16; 32];
418         let mut chroma_weight_l1: [[i16; 2]; 32] = [[0i16; 2]; 32];
419         let mut chroma_offset_l1: [[i16; 2]; 32] = [[0i16; 2]; 32];
420 
421         let mut fill_l0 = false;
422         let mut fill_l1 = false;
423 
424         if pps.weighted_pred_flag() && (hdr.slice_type().is_p() || hdr.slice_type().is_sp()) {
425             fill_l0 = true;
426         } else if pps.weighted_bipred_idc() == 1 && hdr.slice_type().is_b() {
427             fill_l0 = true;
428             fill_l1 = true;
429         }
430 
431         if fill_l0 {
432             luma_weight_l0_flag = true;
433 
434             for i in 0..=hdr.num_ref_idx_l0_active_minus1() as usize {
435                 luma_weight_l0[i] = pwt.luma_weight_l0()[i];
436                 luma_offset_l0[i] = i16::from(pwt.luma_offset_l0()[i]);
437             }
438 
439             chroma_weight_l0_flag = sps.chroma_array_type() != 0;
440             if chroma_weight_l0_flag {
441                 for i in 0..=hdr.num_ref_idx_l0_active_minus1() as usize {
442                     for j in 0..2 {
443                         chroma_weight_l0[i][j] = pwt.chroma_weight_l0()[i][j];
444                         chroma_offset_l0[i][j] = i16::from(pwt.chroma_offset_l0()[i][j]);
445                     }
446                 }
447             }
448         }
449 
450         if fill_l1 {
451             luma_weight_l1_flag = true;
452 
453             luma_weight_l1[..(hdr.num_ref_idx_l1_active_minus1() as usize + 1)].clone_from_slice(
454                 &pwt.luma_weight_l1()[..(hdr.num_ref_idx_l1_active_minus1() as usize + 1)],
455             );
456             luma_offset_l1[..(hdr.num_ref_idx_l1_active_minus1() as usize + 1)].clone_from_slice(
457                 &pwt.luma_offset_l1()[..(hdr.num_ref_idx_l1_active_minus1() as usize + 1)],
458             );
459 
460             chroma_weight_l1_flag = sps.chroma_array_type() != 0;
461             if chroma_weight_l1_flag {
462                 for i in 0..=hdr.num_ref_idx_l1_active_minus1() as usize {
463                     for j in 0..2 {
464                         chroma_weight_l1[i][j] = pwt.chroma_weight_l1()[i][j];
465                         chroma_offset_l1[i][j] = i16::from(pwt.chroma_offset_l1()[i][j]);
466                     }
467                 }
468             }
469         }
470 
471         let slice_param = libva::SliceParameterBufferH264::new(
472             nalu.size() as u32,
473             0,
474             libva::constants::VA_SLICE_DATA_FLAG_ALL,
475             hdr.header_bit_size() as u16,
476             hdr.first_mb_in_slice() as u16,
477             *hdr.slice_type() as u8,
478             hdr.direct_spatial_mv_pred_flag() as u8,
479             hdr.num_ref_idx_l0_active_minus1(),
480             hdr.num_ref_idx_l1_active_minus1(),
481             hdr.cabac_init_idc(),
482             hdr.slice_qp_delta(),
483             hdr.disable_deblocking_filter_idc(),
484             hdr.slice_alpha_c0_offset_div2(),
485             hdr.slice_beta_offset_div2(),
486             ref_list_0,
487             ref_list_1,
488             pwt.luma_log2_weight_denom(),
489             pwt.chroma_log2_weight_denom(),
490             luma_weight_l0_flag as u8,
491             luma_weight_l0,
492             luma_offset_l0,
493             chroma_weight_l0_flag as u8,
494             chroma_weight_l0,
495             chroma_offset_l0,
496             luma_weight_l1_flag as u8,
497             luma_weight_l1,
498             luma_offset_l1,
499             chroma_weight_l1_flag as u8,
500             chroma_weight_l1,
501             chroma_offset_l1,
502         );
503 
504         Ok(BufferType::SliceParameter(SliceParameter::H264(
505             slice_param,
506         )))
507     }
508 }
509 
510 impl VideoDecoderBackend for Backend {
511     type Handle = VADecodedHandle;
512 
num_resources_total(&self) -> usize513     fn num_resources_total(&self) -> usize {
514         self.backend.num_resources_total()
515     }
516 
num_resources_left(&self) -> usize517     fn num_resources_left(&self) -> usize {
518         self.backend.num_resources_left()
519     }
520 
format(&self) -> Option<DecodedFormat>521     fn format(&self) -> Option<DecodedFormat> {
522         self.backend.format()
523     }
524 
try_format(&mut self, format: DecodedFormat) -> DecoderResult<()>525     fn try_format(&mut self, format: DecodedFormat) -> DecoderResult<()> {
526         self.backend.try_format(format)
527     }
528 
coded_resolution(&self) -> Option<Resolution>529     fn coded_resolution(&self) -> Option<Resolution> {
530         self.backend.coded_resolution()
531     }
532 
display_resolution(&self) -> Option<Resolution>533     fn display_resolution(&self) -> Option<Resolution> {
534         self.backend.display_resolution()
535     }
536 
poll(&mut self, blocking_mode: BlockingMode) -> DecoderResult<VecDeque<Self::Handle>>537     fn poll(&mut self, blocking_mode: BlockingMode) -> DecoderResult<VecDeque<Self::Handle>> {
538         self.backend.poll(blocking_mode)
539     }
540 
handle_is_ready(&self, handle: &Self::Handle) -> bool541     fn handle_is_ready(&self, handle: &Self::Handle) -> bool {
542         self.backend.handle_is_ready(handle)
543     }
544 
block_on_handle(&mut self, handle: &Self::Handle) -> StatelessBackendResult<()>545     fn block_on_handle(&mut self, handle: &Self::Handle) -> StatelessBackendResult<()> {
546         self.backend.block_on_handle(handle)
547     }
548 }
549 
550 impl StatelessDecoderBackend for Backend {
new_sequence(&mut self, sps: &Sps) -> StatelessBackendResult<()>551     fn new_sequence(&mut self, sps: &Sps) -> StatelessBackendResult<()> {
552         self.backend.metadata_state.open(sps, None)?;
553         self.backend.negotiation_status = NegotiationStatus::Possible(Box::new(sps.clone()));
554 
555         Ok(())
556     }
557 
handle_picture( &mut self, picture: &PictureData, timestamp: u64, sps: &Sps, pps: &Pps, dpb: &Dpb<Self::Handle>, slice: &Slice<&[u8]>, ) -> StatelessBackendResult<()>558     fn handle_picture(
559         &mut self,
560         picture: &PictureData,
561         timestamp: u64,
562         sps: &Sps,
563         pps: &Pps,
564         dpb: &Dpb<Self::Handle>,
565         slice: &Slice<&[u8]>,
566     ) -> StatelessBackendResult<()> {
567         debug!(
568             "Va-API backend: handle_picture for timestamp {:?}",
569             timestamp
570         );
571 
572         self.backend.negotiation_status = NegotiationStatus::Negotiated;
573 
574         let metadata = self.backend.metadata_state.get_parsed()?;
575         let context = &metadata.context;
576 
577         let va_pic = &mut self.current_picture.as_mut().unwrap();
578         let surface_id = va_pic.surface().id();
579 
580         let pic_param = Backend::build_pic_param(slice, picture, surface_id, dpb, sps, pps)?;
581         let pic_param = context.create_buffer(pic_param)?;
582 
583         let iq_matrix = Backend::build_iq_matrix(pps);
584         let iq_matrix = context.create_buffer(iq_matrix)?;
585 
586         #[cfg(test)]
587         self.test_params.save_pic_params(
588             Backend::build_pic_param(slice, picture, surface_id, dpb, sps, pps)?,
589             Backend::build_iq_matrix(pps),
590         );
591 
592         va_pic.add_buffer(pic_param);
593         va_pic.add_buffer(iq_matrix);
594 
595         Ok(())
596     }
597 
decode_slice( &mut self, slice: &Slice<&[u8]>, sps: &Sps, pps: &Pps, _: &Dpb<Self::Handle>, ref_pic_list0: &[DpbEntry<Self::Handle>], ref_pic_list1: &[DpbEntry<Self::Handle>], ) -> StatelessBackendResult<()>598     fn decode_slice(
599         &mut self,
600         slice: &Slice<&[u8]>,
601         sps: &Sps,
602         pps: &Pps,
603         _: &Dpb<Self::Handle>,
604         ref_pic_list0: &[DpbEntry<Self::Handle>],
605         ref_pic_list1: &[DpbEntry<Self::Handle>],
606     ) -> StatelessBackendResult<()> {
607         let metadata = self.backend.metadata_state.get_parsed()?;
608         let context = &metadata.context;
609 
610         let slice_param = context.create_buffer(Backend::build_slice_param(
611             slice,
612             ref_pic_list0,
613             ref_pic_list1,
614             sps,
615             pps,
616         )?)?;
617 
618         #[cfg(test)]
619         self.test_params
620             .save_slice_params(Backend::build_slice_param(
621                 slice,
622                 ref_pic_list0,
623                 ref_pic_list1,
624                 sps,
625                 pps,
626             )?);
627 
628         let cur_va_pic = &mut self.current_picture.as_mut().unwrap();
629         cur_va_pic.add_buffer(slice_param);
630 
631         let slice_data =
632             context.create_buffer(BufferType::SliceData(Vec::from(slice.nalu().as_ref())))?;
633 
634         #[cfg(test)]
635         self.test_params
636             .save_slice_data(BufferType::SliceData(Vec::from(slice.nalu().as_ref())));
637 
638         cur_va_pic.add_buffer(slice_data);
639 
640         Ok(())
641     }
642 
submit_picture( &mut self, _: &PictureData, block: BlockingMode, ) -> StatelessBackendResult<Self::Handle>643     fn submit_picture(
644         &mut self,
645         _: &PictureData,
646         block: BlockingMode,
647     ) -> StatelessBackendResult<Self::Handle> {
648         let current_picture = self.current_picture.take().unwrap();
649 
650         self.backend.process_picture(current_picture, block)
651     }
652 
new_picture(&mut self, _: &PictureData, timestamp: u64) -> StatelessBackendResult<()>653     fn new_picture(&mut self, _: &PictureData, timestamp: u64) -> StatelessBackendResult<()> {
654         let metadata = self.backend.metadata_state.get_parsed_mut()?;
655 
656         let surface = metadata
657             .surface_pool
658             .get_surface()
659             .ok_or(StatelessBackendError::OutOfResources)?;
660 
661         let va_pic = VaPicture::new(timestamp, Rc::clone(&metadata.context), surface);
662 
663         self.current_picture = Some(va_pic);
664 
665         Ok(())
666     }
667 
new_field_picture( &mut self, _: &PictureData, timestamp: u64, first_field: &Self::Handle, ) -> StatelessBackendResult<()>668     fn new_field_picture(
669         &mut self,
670         _: &PictureData,
671         timestamp: u64,
672         first_field: &Self::Handle,
673     ) -> StatelessBackendResult<()> {
674         // Block on the first field if it is not ready yet.
675         let backend_handle = first_field.handle();
676         if !backend_handle.is_ready() {
677             drop(backend_handle);
678             self.block_on_handle(first_field)?;
679         }
680 
681         // Decode to the same surface as the first field picture.
682         let first_va_handle = first_field.handle();
683         let va_picture = first_va_handle
684             .picture()
685             .expect("no valid backend handle after blocking on it");
686         self.current_picture = Some(VaPicture::new_from_same_surface(timestamp, va_picture));
687 
688         Ok(())
689     }
690 
691     #[cfg(test)]
get_test_params(&self) -> &dyn std::any::Any692     fn get_test_params(&self) -> &dyn std::any::Any {
693         &self.test_params
694     }
695 }
696 
697 impl Decoder<VADecodedHandle> {
698     // Creates a new instance of the decoder using the VAAPI backend.
new_vaapi(display: Rc<Display>, blocking_mode: BlockingMode) -> Result<Self>699     pub fn new_vaapi(display: Rc<Display>, blocking_mode: BlockingMode) -> Result<Self> {
700         Self::new(Box::new(Backend::new(display)?), blocking_mode)
701     }
702 }
703 
704 #[cfg(test)]
705 mod tests {
706     use std::collections::HashSet;
707 
708     use libva::Display;
709 
710     use crate::decoders::h264::backends::vaapi::TestParams;
711     use crate::decoders::h264::backends::StatelessDecoderBackend;
712     use crate::decoders::h264::decoder::tests::process_ready_frames;
713     use crate::decoders::h264::decoder::tests::run_decoding_loop;
714     use crate::decoders::h264::decoder::Decoder;
715     use crate::decoders::BlockingMode;
716     use crate::decoders::DecodedHandle;
717     use crate::decoders::DynHandle;
718     use crate::utils::vaapi::DecodedHandle as VADecodedHandle;
719 
get_test_params( backend: &dyn StatelessDecoderBackend<Handle = VADecodedHandle>, ) -> &TestParams720     fn get_test_params(
721         backend: &dyn StatelessDecoderBackend<Handle = VADecodedHandle>,
722     ) -> &TestParams {
723         backend
724             .get_test_params()
725             .downcast_ref::<TestParams>()
726             .unwrap()
727     }
728 
process_handle( handle: &VADecodedHandle, dump_yuv: bool, expected_crcs: Option<&mut HashSet<&str>>, frame_num: i32, )729     fn process_handle(
730         handle: &VADecodedHandle,
731         dump_yuv: bool,
732         expected_crcs: Option<&mut HashSet<&str>>,
733         frame_num: i32,
734     ) {
735         let mut picture = handle.handle_mut();
736         let mut backend_handle = picture.dyn_mappable_handle_mut();
737 
738         let buffer_size = backend_handle.image_size();
739         let mut nv12 = vec![0; buffer_size];
740 
741         backend_handle.read(&mut nv12).unwrap();
742 
743         if dump_yuv {
744             std::fs::write(format!("/tmp/frame{}.yuv", frame_num), &nv12).unwrap();
745         }
746 
747         let frame_crc = crc32fast::hash(&nv12);
748 
749         if let Some(expected_crcs) = expected_crcs {
750             let frame_crc = format!("{:08x}", frame_crc);
751             let removed = expected_crcs.remove(frame_crc.as_str());
752             assert!(
753                 removed,
754                 "CRC not found: {:?}, iteration: {:?}",
755                 frame_crc, frame_num
756             );
757         }
758     }
759 
760     #[test]
761     // Ignore this test by default as it requires libva-compatible hardware.
762     #[ignore]
test_16x16_progressive_i()763     fn test_16x16_progressive_i() {
764         /// This test is the same as
765         /// h264::decoders::tests::test_16x16_progressive_i, but with an actual
766         /// backend to test whether the backend specific logic works and the
767         /// produced CRCs match their expected values.
768         const TEST_STREAM: &[u8] = include_bytes!("../test_data/16x16-I.h264");
769         let blocking_modes = [BlockingMode::Blocking, BlockingMode::NonBlocking];
770 
771         for blocking_mode in blocking_modes {
772             let mut frame_num = 0;
773             let display = Display::open().unwrap();
774             let mut decoder = Decoder::new_vaapi(display, blocking_mode).unwrap();
775 
776             // CRC from the GStreamer decoder, should be good enough for us for now.
777             let mut expected_crcs = HashSet::from(["2737596b"]);
778 
779             run_decoding_loop(&mut decoder, TEST_STREAM, |decoder| {
780                 process_ready_frames(decoder, &mut |decoder, handle| {
781                     // Contains the params used to decode the picture. Useful if we want to
782                     // write assertions against any particular value used therein.
783                     let _params = get_test_params(decoder.backend());
784 
785                     process_handle(handle, false, Some(&mut expected_crcs), frame_num);
786 
787                     frame_num += 1;
788                 });
789             });
790 
791             assert!(
792                 expected_crcs.is_empty(),
793                 "Some CRCs were not produced by the decoder: {:?}",
794                 expected_crcs
795             );
796         }
797     }
798 
799     #[test]
800     // Ignore this test by default as it requires libva-compatible hardware.
801     #[ignore]
test_16x16_progressive_i_and_p()802     fn test_16x16_progressive_i_and_p() {
803         /// This test is the same as
804         /// h264::decoders::tests::test_16x16_progressive_i_and_p, but with an
805         /// actual backend to test whether the backend specific logic works and
806         /// the produced CRCs match their expected values.
807         const TEST_STREAM: &[u8] = include_bytes!("../test_data/16x16-I-P.h264");
808         let blocking_modes = [BlockingMode::Blocking, BlockingMode::NonBlocking];
809 
810         for blocking_mode in blocking_modes {
811             let mut frame_num = 0;
812             let display = Display::open().unwrap();
813             let mut decoder = Decoder::new_vaapi(display, blocking_mode).unwrap();
814 
815             let mut expected_crcs = HashSet::from(["1d0295c6", "2563d883"]);
816 
817             run_decoding_loop(&mut decoder, TEST_STREAM, |decoder| {
818                 // Contains the VA-API params used to decode the picture. Useful
819                 // if we want to write assertions against any particular value
820                 // used therein.
821                 process_ready_frames(decoder, &mut |decoder, handle| {
822                     let _params = get_test_params(decoder.backend());
823                     process_handle(handle, false, Some(&mut expected_crcs), frame_num);
824 
825                     frame_num += 1;
826                 });
827             });
828 
829             assert!(
830                 expected_crcs.is_empty(),
831                 "Some CRCs were not produced by the decoder: {:?}",
832                 expected_crcs
833             );
834         }
835     }
836 
837     #[test]
838     // Ignore this test by default as it requires libva-compatible hardware.
839     #[ignore]
test_16x16_progressive_i_p_b_p()840     fn test_16x16_progressive_i_p_b_p() {
841         /// This test is the same as
842         /// h264::decoders::tests::test_16x16_progressive_i_p_b_p, but with an
843         /// actual backend to test whether the backend specific logic works and
844         /// the produced CRCs match their expected values.
845         const TEST_STREAM: &[u8] = include_bytes!("../test_data/16x16-I-P-B-P.h264");
846         const STREAM_CRCS: &str = include_str!("../test_data/16x16-I-P-B-P.h264.crc");
847         let blocking_modes = [BlockingMode::Blocking, BlockingMode::NonBlocking];
848 
849         for blocking_mode in blocking_modes {
850             let mut expected_crcs = STREAM_CRCS.lines().collect::<HashSet<_>>();
851             let mut frame_num = 0;
852             let display = Display::open().unwrap();
853             let mut decoder = Decoder::new_vaapi(display, blocking_mode).unwrap();
854 
855             run_decoding_loop(&mut decoder, TEST_STREAM, |decoder| {
856                 process_ready_frames(decoder, &mut |decoder, handle| {
857                     // Contains the VA-API params used to decode the picture.
858                     // Useful if we want to write assertions against any
859                     // particular value used therein.
860                     let _params = get_test_params(decoder.backend());
861 
862                     process_handle(handle, false, Some(&mut expected_crcs), frame_num);
863 
864                     frame_num += 1;
865                 });
866             });
867 
868             assert!(
869                 expected_crcs.is_empty(),
870                 "Some CRCs were not produced by the decoder: {:?}",
871                 expected_crcs
872             );
873         }
874     }
875 
876     #[test]
877     // Ignore this test by default as it requires libva-compatible hardware.
878     #[ignore]
test_16x16_progressive_i_p_b_p_high()879     fn test_16x16_progressive_i_p_b_p_high() {
880         /// This test is the same as
881         /// h264::decoders::tests::test_16x16_progressive_i_p_b_p_high, but with
882         /// an actual backend to test whether the backend specific logic works
883         /// and the produced CRCs match their expected values.
884         const TEST_STREAM: &[u8] = include_bytes!("../test_data/16x16-I-P-B-P-high.h264");
885         const STREAM_CRCS: &str = include_str!("../test_data/16x16-I-P-B-P-high.h264.crc");
886         let blocking_modes = [BlockingMode::Blocking, BlockingMode::NonBlocking];
887 
888         for blocking_mode in blocking_modes {
889             let mut expected_crcs = STREAM_CRCS.lines().collect::<HashSet<_>>();
890             let mut frame_num = 0;
891             let display = Display::open().unwrap();
892             let mut decoder = Decoder::new_vaapi(display, blocking_mode).unwrap();
893 
894             run_decoding_loop(&mut decoder, TEST_STREAM, |decoder| {
895                 process_ready_frames(decoder, &mut |decoder, handle| {
896                     // Contains the VA-API params used to decode the picture.
897                     // Useful if we want to write assertions against any
898                     // particular value used therein.
899                     let _params = get_test_params(decoder.backend());
900 
901                     process_handle(handle, false, Some(&mut expected_crcs), frame_num);
902 
903                     frame_num += 1;
904                 });
905             });
906 
907             assert!(
908                 expected_crcs.is_empty(),
909                 "Some CRCs were not produced by the decoder: {:?}",
910                 expected_crcs
911             );
912         }
913     }
914 
915     #[test]
916     // Ignore this test by default as it requires libva-compatible hardware.
917     #[ignore]
test_25fps_interlaced_h264()918     fn test_25fps_interlaced_h264() {
919         /// This test is the same as
920         /// h264::decoders::tests::test_25fps_interlaced_h264, but with an
921         /// actual backend to test whether the backend specific logic works and
922         /// the produced CRCs match their expected values.
923         const TEST_STREAM: &[u8] = include_bytes!("../test_data/test-25fps-interlaced.h264");
924         const STREAM_CRCS: &str = include_str!("../test_data/test-25fps-interlaced.h264.crc");
925         let blocking_modes = [BlockingMode::Blocking, BlockingMode::NonBlocking];
926 
927         for blocking_mode in blocking_modes {
928             let mut expected_crcs = STREAM_CRCS.lines().collect::<HashSet<_>>();
929             let mut frame_num = 0;
930             let display = Display::open().unwrap();
931             let mut decoder = Decoder::new_vaapi(display, blocking_mode).unwrap();
932 
933             run_decoding_loop(&mut decoder, TEST_STREAM, |decoder| {
934                 process_ready_frames(decoder, &mut |decoder, handle| {
935                     // Contains the VA-API params used to decode the picture.
936                     // Useful if we want to write assertions against any
937                     // particular value used therein.
938                     let _params = get_test_params(decoder.backend());
939 
940                     process_handle(handle, false, Some(&mut expected_crcs), frame_num);
941 
942                     frame_num += 1;
943                 });
944             });
945 
946             assert!(
947                 expected_crcs.is_empty(),
948                 "Some CRCs were not produced by the decoder: {:?}",
949                 expected_crcs
950             );
951         }
952     }
953 
954     #[test]
955     // Ignore this test by default as it requires libva-compatible hardware.
956     #[ignore]
test_25fps_h264()957     fn test_25fps_h264() {
958         /// This test is the same as h264::decoders::tests::test_25fps_h264, but
959         /// with an actual backend to test whether the backend specific logic
960         /// works and the produced CRCs match their expected values.
961         const TEST_STREAM: &[u8] = include_bytes!("../test_data/test-25fps.h264");
962         const STREAM_CRCS: &str = include_str!("../test_data/test-25fps.h264.crc");
963         let blocking_modes = [BlockingMode::Blocking, BlockingMode::NonBlocking];
964 
965         for blocking_mode in blocking_modes {
966             let mut expected_crcs = STREAM_CRCS.lines().collect::<HashSet<_>>();
967             let mut frame_num = 0;
968             let display = Display::open().unwrap();
969             let mut decoder = Decoder::new_vaapi(display, blocking_mode).unwrap();
970 
971             run_decoding_loop(&mut decoder, TEST_STREAM, |decoder| {
972                 process_ready_frames(decoder, &mut |decoder, handle| {
973                     // Contains the VA-API params used to decode the picture.
974                     // Useful if we want to write assertions against any
975                     // particular value used therein.
976                     let _params = get_test_params(decoder.backend());
977 
978                     process_handle(handle, false, Some(&mut expected_crcs), frame_num);
979 
980                     frame_num += 1;
981                 });
982             });
983 
984             assert!(
985                 expected_crcs.is_empty(),
986                 "Some CRCs were not produced by the decoder: {:?}",
987                 expected_crcs
988             );
989         }
990     }
991 }
992