• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2022 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use std::rc::Rc;
6 
7 use anyhow::anyhow;
8 use anyhow::Context as AnyhowContext;
9 use libva::{
10     BufferType, Display, IQMatrix, IQMatrixBufferH264, PictureParameter,
11     PictureParameterBufferH264, SliceParameter,
12 };
13 
14 use crate::backend::vaapi::decoder::va_surface_id;
15 use crate::backend::vaapi::decoder::DecodedHandle as VADecodedHandle;
16 use crate::backend::vaapi::decoder::VaStreamInfo;
17 use crate::backend::vaapi::decoder::VaapiBackend;
18 use crate::backend::vaapi::decoder::VaapiPicture;
19 use crate::codec::h264::dpb::Dpb;
20 use crate::codec::h264::dpb::DpbEntry;
21 use crate::codec::h264::parser::Level;
22 use crate::codec::h264::parser::Pps;
23 use crate::codec::h264::parser::Profile;
24 use crate::codec::h264::parser::Slice;
25 use crate::codec::h264::parser::SliceHeader;
26 use crate::codec::h264::parser::Sps;
27 use crate::codec::h264::picture::Field;
28 use crate::codec::h264::picture::PictureData;
29 use crate::codec::h264::picture::Reference;
30 use crate::decoder::stateless::h264::StatelessH264DecoderBackend;
31 use crate::decoder::stateless::h264::H264;
32 use crate::decoder::stateless::NewPictureError;
33 use crate::decoder::stateless::NewPictureResult;
34 use crate::decoder::stateless::NewStatelessDecoderError;
35 use crate::decoder::stateless::StatelessBackendResult;
36 use crate::decoder::stateless::StatelessDecoder;
37 use crate::decoder::stateless::StatelessDecoderBackend;
38 use crate::decoder::stateless::StatelessDecoderBackendPicture;
39 use crate::decoder::BlockingMode;
40 use crate::decoder::DecodedHandle;
41 use crate::video_frame::VideoFrame;
42 use crate::Rect;
43 use crate::Resolution;
44 
45 impl VaStreamInfo for &Rc<Sps> {
va_profile(&self) -> anyhow::Result<i32>46     fn va_profile(&self) -> anyhow::Result<i32> {
47         let profile_idc = self.profile_idc;
48         let profile = Profile::try_from(profile_idc)
49             .map_err(|err| anyhow!(err))
50             .with_context(|| format!("Invalid profile_idc {:?}", profile_idc))?;
51 
52         match profile {
53             Profile::Baseline => {
54                 if self.constraint_set0_flag {
55                     Ok(libva::VAProfile::VAProfileH264ConstrainedBaseline)
56                 } else {
57                     Err(anyhow!(
58                         "Unsupported stream: profile_idc=66, but constraint_set0_flag is unset"
59                     ))
60                 }
61             }
62             Profile::Main => Ok(libva::VAProfile::VAProfileH264Main),
63             Profile::Extended => {
64                 if self.constraint_set1_flag {
65                     Ok(libva::VAProfile::VAProfileH264Main)
66                 } else {
67                     Err(anyhow!(
68                         "Unsupported stream: profile_idc=88, but constraint_set1_flag is unset"
69                     ))
70                 }
71             }
72             Profile::High | Profile::High422P | Profile::High10 => {
73                 Ok(libva::VAProfile::VAProfileH264High)
74             }
75         }
76     }
77 
rt_format(&self) -> anyhow::Result<u32>78     fn rt_format(&self) -> anyhow::Result<u32> {
79         let bit_depth_luma = self.bit_depth_chroma_minus8 + 8;
80         let chroma_format_idc = self.chroma_format_idc;
81 
82         match (bit_depth_luma, chroma_format_idc) {
83             (8, 0) | (8, 1) => Ok(libva::VA_RT_FORMAT_YUV420),
84             (8, 2) => Ok(libva::VA_RT_FORMAT_YUV422),
85             (8, 3) => Ok(libva::VA_RT_FORMAT_YUV444),
86             (10, 0) | (10, 1) => Ok(libva::VA_RT_FORMAT_YUV420_10),
87             (10, 2) => Ok(libva::VA_RT_FORMAT_YUV422_10),
88             (10, 3) => Ok(libva::VA_RT_FORMAT_YUV444_10),
89             (12, 0) | (12, 1) => Ok(libva::VA_RT_FORMAT_YUV420_12),
90             (12, 2) => Ok(libva::VA_RT_FORMAT_YUV422_12),
91             (12, 3) => Ok(libva::VA_RT_FORMAT_YUV444_12),
92             _ => Err(anyhow!(
93                 "unsupported bit depth/chroma format pair {}, {}",
94                 bit_depth_luma,
95                 chroma_format_idc
96             )),
97         }
98     }
99 
min_num_surfaces(&self) -> usize100     fn min_num_surfaces(&self) -> usize {
101         self.max_dpb_frames() + 4
102     }
103 
coded_size(&self) -> Resolution104     fn coded_size(&self) -> Resolution {
105         Resolution::from((self.width(), self.height()))
106     }
107 
visible_rect(&self) -> Rect108     fn visible_rect(&self) -> Rect {
109         let rect = self.visible_rectangle();
110 
111         Rect { x: rect.min.x, y: rect.min.y, width: rect.max.x, height: rect.max.y }
112     }
113 }
114 
115 /// Fills the internal `va_pic` picture parameter with data from `h264_pic`
fill_va_h264_pic( h264_pic: &PictureData, surface_id: libva::VASurfaceID, merge_other_field: bool, ) -> libva::PictureH264116 fn fill_va_h264_pic(
117     h264_pic: &PictureData,
118     surface_id: libva::VASurfaceID,
119     merge_other_field: bool,
120 ) -> libva::PictureH264 {
121     let mut flags = 0;
122     let frame_idx = if matches!(h264_pic.reference(), Reference::LongTerm) {
123         flags |= libva::VA_PICTURE_H264_LONG_TERM_REFERENCE;
124         h264_pic.long_term_frame_idx
125     } else {
126         if matches!(h264_pic.reference(), Reference::ShortTerm { .. }) {
127             flags |= libva::VA_PICTURE_H264_SHORT_TERM_REFERENCE;
128         }
129 
130         h264_pic.frame_num
131     };
132 
133     let top_field_order_cnt;
134     let bottom_field_order_cnt;
135 
136     match h264_pic.field {
137         Field::Frame => {
138             top_field_order_cnt = h264_pic.top_field_order_cnt;
139             bottom_field_order_cnt = h264_pic.bottom_field_order_cnt;
140         }
141         Field::Top => {
142             match (merge_other_field, h264_pic.other_field()) {
143                 (true, Some(other_field)) => {
144                     bottom_field_order_cnt = other_field.borrow().bottom_field_order_cnt
145                 }
146                 (_, _) => {
147                     flags |= libva::VA_PICTURE_H264_TOP_FIELD;
148                     bottom_field_order_cnt = 0;
149                 }
150             }
151 
152             top_field_order_cnt = h264_pic.top_field_order_cnt;
153         }
154         Field::Bottom => {
155             match (merge_other_field, h264_pic.other_field()) {
156                 (true, Some(other_field)) => {
157                     top_field_order_cnt = other_field.borrow().top_field_order_cnt
158                 }
159                 (_, _) => {
160                     flags |= libva::VA_PICTURE_H264_BOTTOM_FIELD;
161                     top_field_order_cnt = 0;
162                 }
163             }
164 
165             bottom_field_order_cnt = h264_pic.bottom_field_order_cnt;
166         }
167     }
168 
169     libva::PictureH264::new(
170         surface_id,
171         frame_idx,
172         flags,
173         top_field_order_cnt,
174         bottom_field_order_cnt,
175     )
176 }
177 
178 /// Builds an invalid VaPictureH264. These pictures are used to fill empty
179 /// array slots there is no data to fill them with.
build_invalid_va_h264_pic() -> libva::PictureH264180 fn build_invalid_va_h264_pic() -> libva::PictureH264 {
181     libva::PictureH264::new(libva::VA_INVALID_ID, 0, libva::VA_PICTURE_H264_INVALID, 0, 0)
182 }
183 
build_iq_matrix(pps: &Pps) -> BufferType184 fn build_iq_matrix(pps: &Pps) -> BufferType {
185     let mut scaling_list4x4 = [[0; 16]; 6];
186     let mut scaling_list8x8 = [[0; 64]; 2];
187 
188     (0..6).for_each(|i| {
189         super::get_raster_from_zigzag_4x4(pps.scaling_lists_4x4[i], &mut scaling_list4x4[i]);
190     });
191 
192     (0..2).for_each(|i| {
193         super::get_raster_from_zigzag_8x8(pps.scaling_lists_8x8[i], &mut scaling_list8x8[i]);
194     });
195 
196     BufferType::IQMatrix(IQMatrix::H264(IQMatrixBufferH264::new(scaling_list4x4, scaling_list8x8)))
197 }
198 
build_pic_param<V: VideoFrame>( hdr: &SliceHeader, current_picture: &PictureData, current_surface_id: libva::VASurfaceID, dpb: &Dpb<VADecodedHandle<V>>, sps: &Sps, pps: &Pps, ) -> anyhow::Result<BufferType>199 fn build_pic_param<V: VideoFrame>(
200     hdr: &SliceHeader,
201     current_picture: &PictureData,
202     current_surface_id: libva::VASurfaceID,
203     dpb: &Dpb<VADecodedHandle<V>>,
204     sps: &Sps,
205     pps: &Pps,
206 ) -> anyhow::Result<BufferType> {
207     let curr_pic = fill_va_h264_pic(current_picture, current_surface_id, false);
208 
209     let mut refs: Vec<_> = dpb
210         .short_term_refs_iter()
211         .filter(|handle| {
212             let pic = handle.pic.borrow();
213             !pic.nonexisting && !pic.is_second_field()
214         })
215         .cloned()
216         .collect();
217 
218     let mut va_refs = vec![];
219 
220     for handle in &refs {
221         let surface_id = va_surface_id(&handle.reference);
222         let ref_pic = handle.pic.borrow();
223         let pic = fill_va_h264_pic(&ref_pic, surface_id, true);
224         va_refs.push(pic);
225     }
226 
227     refs.clear();
228 
229     let mut refs: Vec<_> = dpb
230         .long_term_refs_iter()
231         .filter(|handle| {
232             let pic = handle.pic.borrow();
233             !pic.is_second_field()
234         })
235         .cloned()
236         .collect();
237 
238     for handle in &refs {
239         let surface_id = va_surface_id(&handle.reference);
240         let ref_pic = handle.pic.borrow();
241         let pic = fill_va_h264_pic(&ref_pic, surface_id, true);
242         va_refs.push(pic);
243     }
244 
245     for _ in va_refs.len()..16 {
246         va_refs.push(build_invalid_va_h264_pic());
247     }
248 
249     refs.clear();
250 
251     let seq_fields = libva::H264SeqFields::new(
252         sps.chroma_format_idc as u32,
253         sps.separate_colour_plane_flag as u32,
254         sps.gaps_in_frame_num_value_allowed_flag as u32,
255         sps.frame_mbs_only_flag as u32,
256         sps.mb_adaptive_frame_field_flag as u32,
257         sps.direct_8x8_inference_flag as u32,
258         (sps.level_idc >= Level::L3_1) as u32, /* see A.3.3.2 */
259         sps.log2_max_frame_num_minus4 as u32,
260         sps.pic_order_cnt_type as u32,
261         sps.log2_max_pic_order_cnt_lsb_minus4 as u32,
262         sps.delta_pic_order_always_zero_flag as u32,
263     );
264     let interlaced = !sps.frame_mbs_only_flag as u32;
265     let picture_height_in_mbs_minus1 = ((sps.pic_height_in_map_units_minus1 + 1) << interlaced) - 1;
266 
267     let pic_fields = libva::H264PicFields::new(
268         pps.entropy_coding_mode_flag as u32,
269         pps.weighted_pred_flag as u32,
270         pps.weighted_bipred_idc as u32,
271         pps.transform_8x8_mode_flag as u32,
272         hdr.field_pic_flag as u32,
273         pps.constrained_intra_pred_flag as u32,
274         pps.bottom_field_pic_order_in_frame_present_flag as u32,
275         pps.deblocking_filter_control_present_flag as u32,
276         pps.redundant_pic_cnt_present_flag as u32,
277         (current_picture.nal_ref_idc != 0) as u32,
278     );
279 
280     let va_refs = va_refs.try_into();
281     let va_refs = match va_refs {
282         Ok(va_refs) => va_refs,
283         Err(_) => {
284             panic!("Bug: wrong number of references, expected 16");
285         }
286     };
287 
288     let pic_param = PictureParameterBufferH264::new(
289         curr_pic,
290         va_refs,
291         sps.pic_width_in_mbs_minus1,
292         picture_height_in_mbs_minus1,
293         sps.bit_depth_luma_minus8,
294         sps.bit_depth_chroma_minus8,
295         sps.max_num_ref_frames,
296         &seq_fields,
297         0, /* FMO not supported by VA */
298         0, /* FMO not supported by VA */
299         0, /* FMO not supported by VA */
300         pps.pic_init_qp_minus26,
301         pps.pic_init_qs_minus26,
302         pps.chroma_qp_index_offset,
303         pps.second_chroma_qp_index_offset,
304         &pic_fields,
305         hdr.frame_num,
306     );
307 
308     Ok(BufferType::PictureParameter(PictureParameter::H264(pic_param)))
309 }
310 
fill_ref_pic_list<V: VideoFrame>( ref_list_x: &[&DpbEntry<VADecodedHandle<V>>], ) -> [libva::PictureH264; 32]311 fn fill_ref_pic_list<V: VideoFrame>(
312     ref_list_x: &[&DpbEntry<VADecodedHandle<V>>],
313 ) -> [libva::PictureH264; 32] {
314     let mut va_pics = vec![];
315 
316     for handle in ref_list_x {
317         let surface_id = va_surface_id(&handle.reference);
318         let ref_pic = handle.pic.borrow();
319         let merge = matches!(ref_pic.field, Field::Frame);
320         let va_pic = fill_va_h264_pic(&ref_pic, surface_id, merge);
321 
322         va_pics.push(va_pic);
323     }
324 
325     for _ in va_pics.len()..32 {
326         va_pics.push(build_invalid_va_h264_pic());
327     }
328 
329     let va_pics: [libva::PictureH264; 32] = match va_pics.try_into() {
330         Ok(va_pics) => va_pics,
331         Err(e) => panic!("Bug: wrong number of references, expected 32, got {:?}", e.len()),
332     };
333 
334     va_pics
335 }
336 
build_slice_param<V: VideoFrame>( hdr: &SliceHeader, slice_size: usize, ref_list_0: &[&DpbEntry<VADecodedHandle<V>>], ref_list_1: &[&DpbEntry<VADecodedHandle<V>>], sps: &Sps, pps: &Pps, ) -> anyhow::Result<BufferType>337 fn build_slice_param<V: VideoFrame>(
338     hdr: &SliceHeader,
339     slice_size: usize,
340     ref_list_0: &[&DpbEntry<VADecodedHandle<V>>],
341     ref_list_1: &[&DpbEntry<VADecodedHandle<V>>],
342     sps: &Sps,
343     pps: &Pps,
344 ) -> anyhow::Result<BufferType> {
345     let ref_list_0 = fill_ref_pic_list(ref_list_0);
346     let ref_list_1 = fill_ref_pic_list(ref_list_1);
347     let pwt = &hdr.pred_weight_table;
348 
349     let mut luma_weight_l0_flag = false;
350     let mut chroma_weight_l0_flag = false;
351     let mut luma_weight_l0 = [0i16; 32];
352     let mut luma_offset_l0 = [0i16; 32];
353     let mut chroma_weight_l0: [[i16; 2]; 32] = [[0i16; 2]; 32];
354     let mut chroma_offset_l0: [[i16; 2]; 32] = [[0i16; 2]; 32];
355 
356     let mut luma_weight_l1_flag = false;
357     let mut chroma_weight_l1_flag = false;
358     let mut luma_weight_l1 = [0i16; 32];
359     let mut luma_offset_l1 = [0i16; 32];
360     let mut chroma_weight_l1: [[i16; 2]; 32] = [[0i16; 2]; 32];
361     let mut chroma_offset_l1: [[i16; 2]; 32] = [[0i16; 2]; 32];
362 
363     let mut fill_l0 = false;
364     let mut fill_l1 = false;
365 
366     if pps.weighted_pred_flag && (hdr.slice_type.is_p() || hdr.slice_type.is_sp()) {
367         fill_l0 = true;
368     } else if pps.weighted_bipred_idc == 1 && hdr.slice_type.is_b() {
369         fill_l0 = true;
370         fill_l1 = true;
371     }
372 
373     if fill_l0 {
374         luma_weight_l0_flag = true;
375 
376         for i in 0..=hdr.num_ref_idx_l0_active_minus1 as usize {
377             luma_weight_l0[i] = pwt.luma_weight_l0[i];
378             luma_offset_l0[i] = i16::from(pwt.luma_offset_l0[i]);
379         }
380 
381         chroma_weight_l0_flag = sps.chroma_array_type() != 0;
382         if chroma_weight_l0_flag {
383             for i in 0..=hdr.num_ref_idx_l0_active_minus1 as usize {
384                 for j in 0..2 {
385                     chroma_weight_l0[i][j] = pwt.chroma_weight_l0[i][j];
386                     chroma_offset_l0[i][j] = i16::from(pwt.chroma_offset_l0[i][j]);
387                 }
388             }
389         }
390     }
391 
392     if fill_l1 {
393         luma_weight_l1_flag = true;
394 
395         luma_weight_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)].clone_from_slice(
396             &pwt.luma_weight_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)],
397         );
398         luma_offset_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)].clone_from_slice(
399             &pwt.luma_offset_l1[..(hdr.num_ref_idx_l1_active_minus1 as usize + 1)],
400         );
401 
402         chroma_weight_l1_flag = sps.chroma_array_type() != 0;
403         if chroma_weight_l1_flag {
404             for i in 0..=hdr.num_ref_idx_l1_active_minus1 as usize {
405                 for j in 0..2 {
406                     chroma_weight_l1[i][j] = pwt.chroma_weight_l1[i][j];
407                     chroma_offset_l1[i][j] = i16::from(pwt.chroma_offset_l1[i][j]);
408                 }
409             }
410         }
411     }
412 
413     let slice_param = libva::SliceParameterBufferH264::new(
414         slice_size as u32,
415         0,
416         libva::VA_SLICE_DATA_FLAG_ALL,
417         hdr.header_bit_size as u16,
418         hdr.first_mb_in_slice as u16,
419         hdr.slice_type as u8,
420         hdr.direct_spatial_mv_pred_flag as u8,
421         hdr.num_ref_idx_l0_active_minus1,
422         hdr.num_ref_idx_l1_active_minus1,
423         hdr.cabac_init_idc,
424         hdr.slice_qp_delta,
425         hdr.disable_deblocking_filter_idc,
426         hdr.slice_alpha_c0_offset_div2,
427         hdr.slice_beta_offset_div2,
428         ref_list_0,
429         ref_list_1,
430         pwt.luma_log2_weight_denom,
431         pwt.chroma_log2_weight_denom,
432         luma_weight_l0_flag as u8,
433         luma_weight_l0,
434         luma_offset_l0,
435         chroma_weight_l0_flag as u8,
436         chroma_weight_l0,
437         chroma_offset_l0,
438         luma_weight_l1_flag as u8,
439         luma_weight_l1,
440         luma_offset_l1,
441         chroma_weight_l1_flag as u8,
442         chroma_weight_l1,
443         chroma_offset_l1,
444     );
445 
446     Ok(BufferType::SliceParameter(SliceParameter::H264(slice_param)))
447 }
448 
449 impl<V: VideoFrame> StatelessDecoderBackendPicture<H264> for VaapiBackend<V> {
450     type Picture = VaapiPicture<V>;
451 }
452 
453 impl<V: VideoFrame> StatelessH264DecoderBackend for VaapiBackend<V> {
new_sequence(&mut self, sps: &Rc<Sps>) -> StatelessBackendResult<()>454     fn new_sequence(&mut self, sps: &Rc<Sps>) -> StatelessBackendResult<()> {
455         self.new_sequence(sps)
456     }
457 
start_picture( &mut self, picture: &mut Self::Picture, picture_data: &PictureData, sps: &Sps, pps: &Pps, dpb: &Dpb<Self::Handle>, hdr: &SliceHeader, ) -> StatelessBackendResult<()>458     fn start_picture(
459         &mut self,
460         picture: &mut Self::Picture,
461         picture_data: &PictureData,
462         sps: &Sps,
463         pps: &Pps,
464         dpb: &Dpb<Self::Handle>,
465         hdr: &SliceHeader,
466     ) -> StatelessBackendResult<()> {
467         let context = &self.context;
468 
469         let surface_id = picture.surface().id();
470 
471         let pic_param = build_pic_param(hdr, picture_data, surface_id, dpb, sps, pps)?;
472         let pic_param =
473             context.create_buffer(pic_param).context("while creating picture parameter buffer")?;
474 
475         let iq_matrix = build_iq_matrix(pps);
476         let iq_matrix =
477             context.create_buffer(iq_matrix).context("while creating IQ matrix buffer")?;
478 
479         picture.add_buffer(pic_param);
480         picture.add_buffer(iq_matrix);
481 
482         Ok(())
483     }
484 
decode_slice( &mut self, picture: &mut Self::Picture, slice: &Slice, sps: &Sps, pps: &Pps, ref_pic_list0: &[&DpbEntry<Self::Handle>], ref_pic_list1: &[&DpbEntry<Self::Handle>], ) -> StatelessBackendResult<()>485     fn decode_slice(
486         &mut self,
487         picture: &mut Self::Picture,
488         slice: &Slice,
489         sps: &Sps,
490         pps: &Pps,
491         ref_pic_list0: &[&DpbEntry<Self::Handle>],
492         ref_pic_list1: &[&DpbEntry<Self::Handle>],
493     ) -> StatelessBackendResult<()> {
494         let context = &self.context;
495 
496         let slice_param = context
497             .create_buffer(build_slice_param(
498                 &slice.header,
499                 slice.nalu.size,
500                 ref_pic_list0,
501                 ref_pic_list1,
502                 sps,
503                 pps,
504             )?)
505             .context("while creating slice params buffer")?;
506 
507         picture.add_buffer(slice_param);
508 
509         let slice_data = context
510             .create_buffer(BufferType::SliceData(Vec::from(slice.nalu.as_ref())))
511             .context("while creating slice data buffer")?;
512 
513         picture.add_buffer(slice_data);
514 
515         Ok(())
516     }
517 
submit_picture(&mut self, picture: Self::Picture) -> StatelessBackendResult<Self::Handle>518     fn submit_picture(&mut self, picture: Self::Picture) -> StatelessBackendResult<Self::Handle> {
519         self.process_picture::<H264>(picture)
520     }
521 
new_picture( &mut self, timestamp: u64, alloc_cb: &mut dyn FnMut() -> Option< <<Self as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame, >, ) -> NewPictureResult<Self::Picture>522     fn new_picture(
523         &mut self,
524         timestamp: u64,
525         alloc_cb: &mut dyn FnMut() -> Option<
526             <<Self as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame,
527         >,
528     ) -> NewPictureResult<Self::Picture> {
529         Ok(VaapiPicture::new(
530             timestamp,
531             Rc::clone(&self.context),
532             alloc_cb().ok_or(NewPictureError::OutOfOutputBuffers)?,
533         ))
534     }
535 
new_field_picture( &mut self, timestamp: u64, first_field: &Self::Handle, ) -> NewPictureResult<Self::Picture>536     fn new_field_picture(
537         &mut self,
538         timestamp: u64,
539         first_field: &Self::Handle,
540     ) -> NewPictureResult<Self::Picture> {
541         // Decode to the same surface as the first field picture.
542         Ok(first_field.borrow().new_picture_from_same_surface(timestamp))
543     }
544 }
545 
546 impl<V: VideoFrame> StatelessDecoder<H264, VaapiBackend<V>> {
547     // Creates a new instance of the decoder using the VAAPI backend.
new_vaapi( display: Rc<Display>, blocking_mode: BlockingMode, ) -> Result<Self, NewStatelessDecoderError>548     pub fn new_vaapi(
549         display: Rc<Display>,
550         blocking_mode: BlockingMode,
551     ) -> Result<Self, NewStatelessDecoderError> {
552         Self::new(VaapiBackend::new(display, false), blocking_mode)
553     }
554 }
555 
556 #[cfg(test)]
557 mod tests {
558     use libva::Display;
559 
560     use crate::bitstream_utils::NalIterator;
561     use crate::codec::h264::parser::Nalu;
562     use crate::decoder::stateless::h264::H264;
563     use crate::decoder::stateless::tests::test_decode_stream;
564     use crate::decoder::stateless::tests::TestStream;
565     use crate::decoder::stateless::StatelessDecoder;
566     use crate::decoder::BlockingMode;
567     use crate::utils::simple_playback_loop;
568     use crate::utils::simple_playback_loop_owned_frames;
569     use crate::DecodedFormat;
570 
571     /// Run `test` using the vaapi decoder, in both blocking and non-blocking modes.
test_decoder_vaapi( test: &TestStream, output_format: DecodedFormat, blocking_mode: BlockingMode, )572     fn test_decoder_vaapi(
573         test: &TestStream,
574         output_format: DecodedFormat,
575         blocking_mode: BlockingMode,
576     ) {
577         let display = Display::open().unwrap();
578         let decoder = StatelessDecoder::<H264, _>::new_vaapi::<()>(display, blocking_mode).unwrap();
579 
580         test_decode_stream(
581             |d, s, f| {
582                 simple_playback_loop(
583                     d,
584                     NalIterator::<Nalu>::new(s),
585                     f,
586                     &mut simple_playback_loop_owned_frames,
587                     output_format,
588                     blocking_mode,
589                 )
590             },
591             decoder,
592             test,
593             true,
594             false,
595         );
596     }
597 
598     #[test]
599     // Ignore this test by default as it requires libva-compatible hardware.
600     #[ignore]
test_64x64_progressive_i_block()601     fn test_64x64_progressive_i_block() {
602         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I;
603         test_decoder_vaapi(
604             &DECODE_64X64_PROGRESSIVE_I,
605             DecodedFormat::NV12,
606             BlockingMode::Blocking,
607         );
608     }
609 
610     #[test]
611     // Ignore this test by default as it requires libva-compatible hardware.
612     #[ignore]
test_64x64_progressive_i_nonblock()613     fn test_64x64_progressive_i_nonblock() {
614         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I;
615         test_decoder_vaapi(
616             &DECODE_64X64_PROGRESSIVE_I,
617             DecodedFormat::NV12,
618             BlockingMode::NonBlocking,
619         );
620     }
621 
622     #[test]
623     // Ignore this test by default as it requires libva-compatible hardware.
624     #[ignore]
test_64x64_progressive_i_p_block()625     fn test_64x64_progressive_i_p_block() {
626         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P;
627         test_decoder_vaapi(
628             &DECODE_64X64_PROGRESSIVE_I_P,
629             DecodedFormat::NV12,
630             BlockingMode::Blocking,
631         );
632     }
633 
634     #[test]
635     // Ignore this test by default as it requires libva-compatible hardware.
636     #[ignore]
test_64x64_progressive_i_p_nonblock()637     fn test_64x64_progressive_i_p_nonblock() {
638         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P;
639         test_decoder_vaapi(
640             &DECODE_64X64_PROGRESSIVE_I_P,
641             DecodedFormat::NV12,
642             BlockingMode::NonBlocking,
643         );
644     }
645 
646     #[test]
647     // Ignore this test by default as it requires libva-compatible hardware.
648     #[ignore]
test_64x64_progressive_i_p_b_p_block()649     fn test_64x64_progressive_i_p_b_p_block() {
650         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P;
651         test_decoder_vaapi(
652             &DECODE_64X64_PROGRESSIVE_I_P_B_P,
653             DecodedFormat::NV12,
654             BlockingMode::Blocking,
655         );
656     }
657 
658     #[test]
659     // Ignore this test by default as it requires libva-compatible hardware.
660     #[ignore]
test_64x64_progressive_i_p_b_p_nonblock()661     fn test_64x64_progressive_i_p_b_p_nonblock() {
662         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P;
663         test_decoder_vaapi(
664             &DECODE_64X64_PROGRESSIVE_I_P_B_P,
665             DecodedFormat::NV12,
666             BlockingMode::NonBlocking,
667         );
668     }
669 
670     #[test]
671     // Ignore this test by default as it requires libva-compatible hardware.
672     #[ignore]
test_64x64_progressive_i_p_b_p_high_block()673     fn test_64x64_progressive_i_p_b_p_high_block() {
674         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH;
675         test_decoder_vaapi(
676             &DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH,
677             DecodedFormat::NV12,
678             BlockingMode::Blocking,
679         );
680     }
681 
682     #[test]
683     // Ignore this test by default as it requires libva-compatible hardware.
684     #[ignore]
test_64x64_progressive_i_p_b_p_high_nonblock()685     fn test_64x64_progressive_i_p_b_p_high_nonblock() {
686         use crate::decoder::stateless::h264::tests::DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH;
687         test_decoder_vaapi(
688             &DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH,
689             DecodedFormat::NV12,
690             BlockingMode::NonBlocking,
691         );
692     }
693 
694     #[test]
695     // Ignore this test by default as it requires libva-compatible hardware.
696     #[ignore]
test_25fps_block()697     fn test_25fps_block() {
698         use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS;
699         test_decoder_vaapi(&DECODE_TEST_25FPS, DecodedFormat::NV12, BlockingMode::Blocking);
700     }
701 
702     #[test]
703     // Ignore this test by default as it requires libva-compatible hardware.
704     #[ignore]
test_25fps_nonblock()705     fn test_25fps_nonblock() {
706         use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS;
707         test_decoder_vaapi(&DECODE_TEST_25FPS, DecodedFormat::NV12, BlockingMode::NonBlocking);
708     }
709 
710     #[test]
711     // Ignore this test by default as it requires libva-compatible hardware.
712     #[ignore]
test_25fps_interlaced_block()713     fn test_25fps_interlaced_block() {
714         use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS_INTERLACED;
715         test_decoder_vaapi(
716             &DECODE_TEST_25FPS_INTERLACED,
717             DecodedFormat::NV12,
718             BlockingMode::Blocking,
719         );
720     }
721 
722     #[test]
723     // Ignore this test by default as it requires libva-compatible hardware.
724     #[ignore]
test_25fps_interlaced_nonblock()725     fn test_25fps_interlaced_nonblock() {
726         use crate::decoder::stateless::h264::tests::DECODE_TEST_25FPS_INTERLACED;
727         test_decoder_vaapi(
728             &DECODE_TEST_25FPS_INTERLACED,
729             DecodedFormat::NV12,
730             BlockingMode::NonBlocking,
731         );
732     }
733 }
734