• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2023 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #[cfg(any(test, fuzzing))]
6 mod dummy;
7 #[cfg(feature = "v4l2")]
8 mod v4l2;
9 #[cfg(feature = "vaapi")]
10 mod vaapi;
11 
12 use std::collections::btree_map::Entry;
13 use std::io::Cursor;
14 use std::os::fd::AsFd;
15 use std::os::fd::BorrowedFd;
16 use std::rc::Rc;
17 
18 use anyhow::anyhow;
19 use anyhow::Context;
20 use log::debug;
21 use thiserror::Error;
22 
23 use crate::codec::h264::dpb::Dpb;
24 use crate::codec::h264::dpb::DpbEntry;
25 use crate::codec::h264::dpb::DpbPicRefList;
26 use crate::codec::h264::dpb::MmcoError;
27 use crate::codec::h264::dpb::ReferencePicLists;
28 use crate::codec::h264::parser::MaxLongTermFrameIdx;
29 use crate::codec::h264::parser::Nalu;
30 use crate::codec::h264::parser::NaluType;
31 use crate::codec::h264::parser::Parser;
32 use crate::codec::h264::parser::Pps;
33 use crate::codec::h264::parser::RefPicListModification;
34 use crate::codec::h264::parser::Slice;
35 use crate::codec::h264::parser::SliceHeader;
36 use crate::codec::h264::parser::SliceType;
37 use crate::codec::h264::parser::Sps;
38 use crate::codec::h264::picture::Field;
39 use crate::codec::h264::picture::FieldRank;
40 use crate::codec::h264::picture::IsIdr;
41 use crate::codec::h264::picture::PictureData;
42 use crate::codec::h264::picture::RcPictureData;
43 use crate::codec::h264::picture::Reference;
44 use crate::decoder::stateless::DecodeError;
45 use crate::decoder::stateless::DecodingState;
46 use crate::decoder::stateless::NewPictureResult;
47 use crate::decoder::stateless::StatelessBackendResult;
48 use crate::decoder::stateless::StatelessCodec;
49 use crate::decoder::stateless::StatelessDecoder;
50 use crate::decoder::stateless::StatelessDecoderBackend;
51 use crate::decoder::stateless::StatelessDecoderBackendPicture;
52 use crate::decoder::stateless::StatelessVideoDecoder;
53 use crate::decoder::BlockingMode;
54 use crate::decoder::DecodedHandle;
55 use crate::decoder::DecoderEvent;
56 use crate::decoder::StreamInfo;
57 use crate::Resolution;
58 
get_raster_from_zigzag_8x8(src: [u8; 64], dst: &mut [u8; 64])59 pub fn get_raster_from_zigzag_8x8(src: [u8; 64], dst: &mut [u8; 64]) {
60     const ZIGZAG_8X8: [usize; 64] = [
61         0, 1, 8, 16, 9, 2, 3, 10, 17, 24, 32, 25, 18, 11, 4, 5, 12, 19, 26, 33, 40, 48, 41, 34, 27,
62         20, 13, 6, 7, 14, 21, 28, 35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
63         58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63,
64     ];
65 
66     for i in 0..64 {
67         dst[ZIGZAG_8X8[i]] = src[i];
68     }
69 }
70 
get_raster_from_zigzag_4x4(src: [u8; 16], dst: &mut [u8; 16])71 pub fn get_raster_from_zigzag_4x4(src: [u8; 16], dst: &mut [u8; 16]) {
72     const ZIGZAG_4X4: [usize; 16] = [0, 1, 4, 8, 5, 2, 3, 6, 9, 12, 13, 10, 7, 11, 14, 15];
73 
74     for i in 0..16 {
75         dst[ZIGZAG_4X4[i]] = src[i];
76     }
77 }
78 
79 /// Stateless backend methods specific to H.264.
80 pub trait StatelessH264DecoderBackend:
81     StatelessDecoderBackend + StatelessDecoderBackendPicture<H264>
82 {
83     /// Called when a new SPS is parsed.
new_sequence(&mut self, sps: &Rc<Sps>) -> StatelessBackendResult<()>84     fn new_sequence(&mut self, sps: &Rc<Sps>) -> StatelessBackendResult<()>;
85 
86     /// Called when the decoder determines that a frame or field was found.
new_picture( &mut self, timestamp: u64, alloc_cb: &mut dyn FnMut() -> Option< <<Self as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame, >, ) -> NewPictureResult<Self::Picture>87     fn new_picture(
88         &mut self,
89         timestamp: u64,
90         alloc_cb: &mut dyn FnMut() -> Option<
91             <<Self as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame,
92         >,
93     ) -> NewPictureResult<Self::Picture>;
94 
95     /// Called when the decoder determines that a second field was found.
96     /// Indicates that the underlying BackendHandle is to be shared between the
97     /// two pictures. This is so both fields decode to the same underlying
98     /// resource and can thus be presented together as a single frame.
new_field_picture( &mut self, timestamp: u64, first_field: &Self::Handle, ) -> NewPictureResult<Self::Picture>99     fn new_field_picture(
100         &mut self,
101         timestamp: u64,
102         first_field: &Self::Handle,
103     ) -> NewPictureResult<Self::Picture>;
104 
105     /// Called by the decoder when starting a new frame or field.
start_picture( &mut self, picture: &mut Self::Picture, picture_data: &PictureData, sps: &Sps, pps: &Pps, dpb: &Dpb<Self::Handle>, hdr: &SliceHeader, ) -> StatelessBackendResult<()>106     fn start_picture(
107         &mut self,
108         picture: &mut Self::Picture,
109         picture_data: &PictureData,
110         sps: &Sps,
111         pps: &Pps,
112         dpb: &Dpb<Self::Handle>,
113         hdr: &SliceHeader,
114     ) -> StatelessBackendResult<()>;
115 
116     /// Called to dispatch a decode operation to the backend.
117     #[allow(clippy::too_many_arguments)]
decode_slice( &mut self, picture: &mut Self::Picture, slice: &Slice, sps: &Sps, pps: &Pps, ref_pic_list0: &[&DpbEntry<Self::Handle>], ref_pic_list1: &[&DpbEntry<Self::Handle>], ) -> StatelessBackendResult<()>118     fn decode_slice(
119         &mut self,
120         picture: &mut Self::Picture,
121         slice: &Slice,
122         sps: &Sps,
123         pps: &Pps,
124         ref_pic_list0: &[&DpbEntry<Self::Handle>],
125         ref_pic_list1: &[&DpbEntry<Self::Handle>],
126     ) -> StatelessBackendResult<()>;
127 
128     /// Called when the decoder wants the backend to finish the decoding
129     /// operations for `picture`. At this point, `decode_slice` has been called
130     /// for all slices.
131     ///
132     /// This call will assign the ownership of the BackendHandle to the Picture
133     /// and then assign the ownership of the Picture to the Handle.
submit_picture(&mut self, picture: Self::Picture) -> StatelessBackendResult<Self::Handle>134     fn submit_picture(&mut self, picture: Self::Picture) -> StatelessBackendResult<Self::Handle>;
135 }
136 
137 /// Keeps track of the last values seen for negotiation purposes.
138 #[derive(Clone, Debug, Default, PartialEq, Eq)]
139 struct NegotiationInfo {
140     /// The current coded resolution
141     coded_resolution: Resolution,
142     /// Same meaning as the specification.
143     profile_idc: u8,
144     /// Same meaning as the specification.
145     bit_depth_luma_minus8: u8,
146     /// Same meaning as the specification.
147     bit_depth_chroma_minus8: u8,
148     /// Same meaning as the specification.
149     chroma_format_idc: u8,
150     /// The maximum size of the dpb in frames.
151     max_dpb_frames: usize,
152     /// Whether this is an interlaced stream
153     interlaced: bool,
154 }
155 
156 impl From<&Sps> for NegotiationInfo {
from(sps: &Sps) -> Self157     fn from(sps: &Sps) -> Self {
158         NegotiationInfo {
159             coded_resolution: Resolution::from((sps.width(), sps.height())),
160             profile_idc: sps.profile_idc,
161             bit_depth_luma_minus8: sps.bit_depth_luma_minus8,
162             bit_depth_chroma_minus8: sps.bit_depth_chroma_minus8,
163             chroma_format_idc: sps.chroma_format_idc,
164             max_dpb_frames: sps.max_dpb_frames(),
165             interlaced: !sps.frame_mbs_only_flag,
166         }
167     }
168 }
169 
170 #[derive(Copy, Clone, Debug)]
171 enum RefPicList {
172     RefPicList0,
173     RefPicList1,
174 }
175 
176 pub struct PrevReferencePicInfo {
177     frame_num: u32,
178     has_mmco_5: bool,
179     top_field_order_cnt: i32,
180     pic_order_cnt_msb: i32,
181     pic_order_cnt_lsb: i32,
182     field: Field,
183 }
184 
185 impl Default for PrevReferencePicInfo {
default() -> Self186     fn default() -> Self {
187         Self {
188             frame_num: Default::default(),
189             has_mmco_5: Default::default(),
190             top_field_order_cnt: Default::default(),
191             pic_order_cnt_msb: Default::default(),
192             pic_order_cnt_lsb: Default::default(),
193             field: Field::Frame,
194         }
195     }
196 }
197 
198 impl PrevReferencePicInfo {
199     /// Store some variables related to the previous reference picture. These
200     /// will be used in the decoding of future pictures.
fill(&mut self, pic: &PictureData)201     fn fill(&mut self, pic: &PictureData) {
202         self.has_mmco_5 = pic.has_mmco_5;
203         self.top_field_order_cnt = pic.top_field_order_cnt;
204         self.pic_order_cnt_msb = pic.pic_order_cnt_msb;
205         self.pic_order_cnt_lsb = pic.pic_order_cnt_lsb;
206         self.field = pic.field;
207         self.frame_num = pic.frame_num;
208     }
209 }
210 
211 #[derive(Default)]
212 pub struct PrevPicInfo {
213     frame_num: u32,
214     frame_num_offset: u32,
215     has_mmco_5: bool,
216 }
217 
218 impl PrevPicInfo {
219     /// Store some variables related to the previous picture. These will be used
220     /// in the decoding of future pictures.
fill(&mut self, pic: &PictureData)221     fn fill(&mut self, pic: &PictureData) {
222         self.frame_num = pic.frame_num;
223         self.has_mmco_5 = pic.has_mmco_5;
224         self.frame_num_offset = pic.frame_num_offset;
225     }
226 }
227 
228 /// Corresponds to RefPicList0 and RefPicList1 in the specification. Computed for every slice,
229 /// points to the pictures in the DPB.
230 struct RefPicLists<'a, T> {
231     ref_pic_list0: DpbPicRefList<'a, T>,
232     ref_pic_list1: DpbPicRefList<'a, T>,
233 }
234 
235 /// Used to track that first_mb_in_slice increases monotonically.
236 enum CurrentMacroblockTracking {
237     SeparateColorPlane(std::collections::BTreeMap<u8, u32>),
238     NonSeparateColorPlane(u32),
239 }
240 
241 /// State of the picture being currently decoded.
242 ///
243 /// Stored between calls to [`StatelessDecoder::handle_slice`] that belong to the same picture.
244 struct CurrentPicState<P> {
245     /// Data for the current picture as extracted from the stream.
246     pic: PictureData,
247     /// PPS at the time of the current picture.
248     pps: Rc<Pps>,
249     /// Backend-specific data for that picture.
250     backend_pic: P,
251     /// List of reference pictures, used once per slice.
252     ref_pic_lists: ReferencePicLists,
253     /// The current macroblock we are processing
254     current_macroblock: CurrentMacroblockTracking,
255 }
256 
257 /// State of the H.264 decoder.
258 ///
259 /// `B` is the backend used for this decoder.
260 pub struct H264DecoderState<H: DecodedHandle, P> {
261     /// H.264 bitstream parser.
262     parser: Parser,
263     /// Keeps track of the last stream parameters seen for negotiation purposes.
264     negotiation_info: NegotiationInfo,
265 
266     /// The decoded picture buffer.
267     dpb: Dpb<H>,
268 
269     /// Cached variables from the previous reference picture.
270     prev_ref_pic_info: PrevReferencePicInfo,
271     /// Cached variables from the previous picture.
272     prev_pic_info: PrevPicInfo,
273     /// Maximum index of the long-term frame.
274     max_long_term_frame_idx: MaxLongTermFrameIdx,
275 
276     /// The picture currently being decoded. We need to preserve it between calls to `decode`
277     /// because multiple slices will be processed in different calls to `decode`.
278     current_pic: Option<CurrentPicState<P>>,
279 }
280 
281 impl<H, P> Default for H264DecoderState<H, P>
282 where
283     H: DecodedHandle,
284 {
default() -> Self285     fn default() -> Self {
286         H264DecoderState {
287             parser: Default::default(),
288             negotiation_info: Default::default(),
289             dpb: Default::default(),
290             prev_ref_pic_info: Default::default(),
291             prev_pic_info: Default::default(),
292             max_long_term_frame_idx: Default::default(),
293             current_pic: None,
294         }
295     }
296 }
297 
298 /// [`StatelessCodec`] structure to use in order to create a H.264 stateless decoder.
299 ///
300 /// # Accepted input
301 ///
302 /// A decoder using this codec processes exactly one NAL unit of input per call to
303 /// [`StatelessDecoder::decode`], and returns the number of bytes until the end of this NAL unit.
304 /// This makes it possible to call [`Decode`](StatelessDecoder::decode) repeatedly on some unsplit
305 /// Annex B stream and shrinking it by the number of bytes processed after each call, until the
306 /// stream ends up being empty.
307 pub struct H264;
308 
309 impl StatelessCodec for H264 {
310     type FormatInfo = Rc<Sps>;
311     type DecoderState<H: DecodedHandle, P> = H264DecoderState<H, P>;
312 }
313 
314 #[derive(Debug, Error)]
315 enum FindFirstFieldError {
316     #[error("expected complementary field {0:?}, got {1:?}")]
317     ExpectedComplementaryField(Field, Field),
318     #[error("the previous field's frame_num value {0} differs from the current one's {1}")]
319     FrameNumDiffers(u32, u32),
320 }
321 
322 impl<H, P> H264DecoderState<H, P>
323 where
324     H: DecodedHandle + Clone,
325 {
compute_pic_order_count(&mut self, pic: &mut PictureData, sps: &Sps) -> anyhow::Result<()>326     fn compute_pic_order_count(&mut self, pic: &mut PictureData, sps: &Sps) -> anyhow::Result<()> {
327         match pic.pic_order_cnt_type {
328             // Spec 8.2.1.1
329             0 => {
330                 let prev_pic_order_cnt_msb;
331                 let prev_pic_order_cnt_lsb;
332 
333                 if matches!(pic.is_idr, IsIdr::Yes { .. }) {
334                     prev_pic_order_cnt_lsb = 0;
335                     prev_pic_order_cnt_msb = 0;
336                 } else if self.prev_ref_pic_info.has_mmco_5 {
337                     if !matches!(self.prev_ref_pic_info.field, Field::Bottom) {
338                         prev_pic_order_cnt_msb = 0;
339                         prev_pic_order_cnt_lsb = self.prev_ref_pic_info.top_field_order_cnt;
340                     } else {
341                         prev_pic_order_cnt_msb = 0;
342                         prev_pic_order_cnt_lsb = 0;
343                     }
344                 } else {
345                     prev_pic_order_cnt_msb = self.prev_ref_pic_info.pic_order_cnt_msb;
346                     prev_pic_order_cnt_lsb = self.prev_ref_pic_info.pic_order_cnt_lsb;
347                 }
348 
349                 let max_pic_order_cnt_lsb = 1 << (sps.log2_max_pic_order_cnt_lsb_minus4 + 4);
350 
351                 pic.pic_order_cnt_msb = if (pic.pic_order_cnt_lsb
352                     < self.prev_ref_pic_info.pic_order_cnt_lsb)
353                     && (prev_pic_order_cnt_lsb - pic.pic_order_cnt_lsb >= max_pic_order_cnt_lsb / 2)
354                 {
355                     prev_pic_order_cnt_msb + max_pic_order_cnt_lsb
356                 } else if (pic.pic_order_cnt_lsb > prev_pic_order_cnt_lsb)
357                     && (pic.pic_order_cnt_lsb - prev_pic_order_cnt_lsb > max_pic_order_cnt_lsb / 2)
358                 {
359                     prev_pic_order_cnt_msb - max_pic_order_cnt_lsb
360                 } else {
361                     prev_pic_order_cnt_msb
362                 };
363 
364                 if !matches!(pic.field, Field::Bottom) {
365                     pic.top_field_order_cnt = pic.pic_order_cnt_msb + pic.pic_order_cnt_lsb;
366                 }
367 
368                 if !matches!(pic.field, Field::Top) {
369                     if matches!(pic.field, Field::Frame) {
370                         pic.bottom_field_order_cnt =
371                             pic.top_field_order_cnt + pic.delta_pic_order_cnt_bottom;
372                     } else {
373                         pic.bottom_field_order_cnt = pic.pic_order_cnt_msb + pic.pic_order_cnt_lsb;
374                     }
375                 }
376             }
377 
378             1 => {
379                 if self.prev_pic_info.has_mmco_5 {
380                     self.prev_pic_info.frame_num_offset = 0;
381                 }
382 
383                 if matches!(pic.is_idr, IsIdr::Yes { .. }) {
384                     pic.frame_num_offset = 0;
385                 } else if self.prev_pic_info.frame_num > pic.frame_num {
386                     pic.frame_num_offset =
387                         self.prev_pic_info.frame_num_offset + sps.max_frame_num();
388                 } else {
389                     pic.frame_num_offset = self.prev_pic_info.frame_num_offset;
390                 }
391 
392                 let mut abs_frame_num = if sps.num_ref_frames_in_pic_order_cnt_cycle != 0 {
393                     pic.frame_num_offset + pic.frame_num
394                 } else {
395                     0
396                 };
397 
398                 if pic.nal_ref_idc == 0 && abs_frame_num > 0 {
399                     abs_frame_num -= 1;
400                 }
401 
402                 let mut expected_pic_order_cnt = 0;
403 
404                 if abs_frame_num > 0 {
405                     if sps.num_ref_frames_in_pic_order_cnt_cycle == 0 {
406                         return Err(anyhow!("Invalid num_ref_frames_in_pic_order_cnt_cycle"));
407                     }
408 
409                     let pic_order_cnt_cycle_cnt =
410                         (abs_frame_num - 1) / sps.num_ref_frames_in_pic_order_cnt_cycle as u32;
411                     let frame_num_in_pic_order_cnt_cycle =
412                         (abs_frame_num - 1) % sps.num_ref_frames_in_pic_order_cnt_cycle as u32;
413                     expected_pic_order_cnt =
414                         pic_order_cnt_cycle_cnt as i32 * sps.expected_delta_per_pic_order_cnt_cycle;
415 
416                     assert!(frame_num_in_pic_order_cnt_cycle < 255);
417 
418                     for i in 0..sps.num_ref_frames_in_pic_order_cnt_cycle {
419                         expected_pic_order_cnt += sps.offset_for_ref_frame[i as usize];
420                     }
421                 }
422 
423                 if pic.nal_ref_idc == 0 {
424                     expected_pic_order_cnt += sps.offset_for_non_ref_pic;
425                 }
426 
427                 if matches!(pic.field, Field::Frame) {
428                     pic.top_field_order_cnt = expected_pic_order_cnt + pic.delta_pic_order_cnt0;
429 
430                     pic.bottom_field_order_cnt = pic.top_field_order_cnt
431                         + sps.offset_for_top_to_bottom_field
432                         + pic.delta_pic_order_cnt1;
433                 } else if !matches!(pic.field, Field::Bottom) {
434                     pic.top_field_order_cnt = expected_pic_order_cnt + pic.delta_pic_order_cnt0;
435                 } else {
436                     pic.bottom_field_order_cnt = expected_pic_order_cnt
437                         + sps.offset_for_top_to_bottom_field
438                         + pic.delta_pic_order_cnt0;
439                 }
440             }
441 
442             2 => {
443                 // Spec 8.2.1.3
444                 if self.prev_pic_info.has_mmco_5 {
445                     self.prev_pic_info.frame_num_offset = 0;
446                 }
447 
448                 if matches!(pic.is_idr, IsIdr::Yes { .. }) {
449                     pic.frame_num_offset = 0;
450                 } else if self.prev_pic_info.frame_num > pic.frame_num {
451                     pic.frame_num_offset =
452                         self.prev_pic_info.frame_num_offset + sps.max_frame_num();
453                 } else {
454                     pic.frame_num_offset = self.prev_pic_info.frame_num_offset;
455                 }
456 
457                 let pic_order_cnt = if matches!(pic.is_idr, IsIdr::Yes { .. }) {
458                     0
459                 } else if pic.nal_ref_idc == 0 {
460                     2 * (pic.frame_num_offset + pic.frame_num) as i32 - 1
461                 } else {
462                     2 * (pic.frame_num_offset + pic.frame_num) as i32
463                 };
464 
465                 if matches!(pic.field, Field::Frame | Field::Top) {
466                     pic.top_field_order_cnt = pic_order_cnt;
467                 }
468                 if matches!(pic.field, Field::Frame | Field::Bottom) {
469                     pic.bottom_field_order_cnt = pic_order_cnt;
470                 }
471             }
472 
473             _ => return Err(anyhow!("Invalid pic_order_cnt_type: {}", sps.pic_order_cnt_type)),
474         }
475 
476         match pic.field {
477             Field::Frame => {
478                 pic.pic_order_cnt =
479                     std::cmp::min(pic.top_field_order_cnt, pic.bottom_field_order_cnt);
480             }
481             Field::Top => {
482                 pic.pic_order_cnt = pic.top_field_order_cnt;
483             }
484             Field::Bottom => {
485                 pic.pic_order_cnt = pic.bottom_field_order_cnt;
486             }
487         }
488 
489         Ok(())
490     }
491 
492     /// Returns an iterator of the handles of the frames that need to be bumped into the ready
493     /// queue.
bump_as_needed(&mut self, current_pic: &PictureData) -> impl Iterator<Item = H>494     fn bump_as_needed(&mut self, current_pic: &PictureData) -> impl Iterator<Item = H> {
495         self.dpb.bump_as_needed(current_pic).into_iter().flatten()
496     }
497 
498     /// Returns an iterator of the handles of all the frames still present in the DPB.
drain(&mut self) -> impl Iterator<Item = H>499     fn drain(&mut self) -> impl Iterator<Item = H> {
500         let pics = self.dpb.drain();
501 
502         pics.into_iter().flatten()
503     }
504 
505     /// Find the first field for the picture started by `slice`, if any.
find_first_field( &self, hdr: &SliceHeader, ) -> Result<Option<(RcPictureData, H)>, FindFirstFieldError>506     fn find_first_field(
507         &self,
508         hdr: &SliceHeader,
509     ) -> Result<Option<(RcPictureData, H)>, FindFirstFieldError> {
510         let mut prev_field = None;
511 
512         if self.dpb.interlaced() {
513             if let Some(last_dpb_entry) = self.dpb.entries().last() {
514                 // Use the last entry in the DPB
515                 let last_pic = last_dpb_entry.pic.borrow();
516 
517                 // If the picture is interlaced but doesn't have its other field set yet,
518                 // then it must be the first field.
519                 if !matches!(last_pic.field, Field::Frame)
520                     && matches!(last_pic.field_rank(), FieldRank::Single)
521                 {
522                     if let Some(handle) = &last_dpb_entry.reference {
523                         // Still waiting for the second field
524                         prev_field = Some((last_dpb_entry.pic.clone(), handle.clone()));
525                     }
526                 }
527             }
528         }
529 
530         let prev_field = match prev_field {
531             None => return Ok(None),
532             Some(prev_field) => prev_field,
533         };
534 
535         let prev_field_pic = prev_field.0.borrow();
536 
537         if prev_field_pic.frame_num != u32::from(hdr.frame_num) {
538             return Err(FindFirstFieldError::FrameNumDiffers(
539                 prev_field_pic.frame_num,
540                 hdr.frame_num as u32,
541             ));
542         }
543 
544         let cur_field = if hdr.bottom_field_flag { Field::Bottom } else { Field::Top };
545 
546         if !hdr.field_pic_flag || cur_field == prev_field_pic.field {
547             let field = prev_field_pic.field;
548 
549             return Err(FindFirstFieldError::ExpectedComplementaryField(field.opposite(), field));
550         }
551 
552         drop(prev_field_pic);
553         Ok(Some(prev_field))
554     }
555 
556     // 8.2.4.3.1 Modification process of reference picture lists for short-term
557     // reference pictures
558     #[allow(clippy::too_many_arguments)]
short_term_pic_list_modification<'a>( cur_pic: &PictureData, dpb: &'a Dpb<H>, ref_pic_list_x: &mut DpbPicRefList<'a, H>, num_ref_idx_lx_active_minus1: u8, max_pic_num: i32, rplm: &RefPicListModification, pic_num_lx_pred: &mut i32, ref_idx_lx: &mut usize, ) -> anyhow::Result<()>559     fn short_term_pic_list_modification<'a>(
560         cur_pic: &PictureData,
561         dpb: &'a Dpb<H>,
562         ref_pic_list_x: &mut DpbPicRefList<'a, H>,
563         num_ref_idx_lx_active_minus1: u8,
564         max_pic_num: i32,
565         rplm: &RefPicListModification,
566         pic_num_lx_pred: &mut i32,
567         ref_idx_lx: &mut usize,
568     ) -> anyhow::Result<()> {
569         let pic_num_lx_no_wrap;
570         let abs_diff_pic_num = rplm.abs_diff_pic_num_minus1 as i32 + 1;
571         let modification_of_pic_nums_idc = rplm.modification_of_pic_nums_idc;
572 
573         if modification_of_pic_nums_idc == 0 {
574             if *pic_num_lx_pred - abs_diff_pic_num < 0 {
575                 pic_num_lx_no_wrap = *pic_num_lx_pred - abs_diff_pic_num + max_pic_num;
576             } else {
577                 pic_num_lx_no_wrap = *pic_num_lx_pred - abs_diff_pic_num;
578             }
579         } else if modification_of_pic_nums_idc == 1 {
580             if *pic_num_lx_pred + abs_diff_pic_num >= max_pic_num {
581                 pic_num_lx_no_wrap = *pic_num_lx_pred + abs_diff_pic_num - max_pic_num;
582             } else {
583                 pic_num_lx_no_wrap = *pic_num_lx_pred + abs_diff_pic_num;
584             }
585         } else {
586             anyhow::bail!(
587                 "unexpected value for modification_of_pic_nums_idc {:?}",
588                 rplm.modification_of_pic_nums_idc
589             );
590         }
591 
592         *pic_num_lx_pred = pic_num_lx_no_wrap;
593 
594         let pic_num_lx = if pic_num_lx_no_wrap > cur_pic.pic_num {
595             pic_num_lx_no_wrap - max_pic_num
596         } else {
597             pic_num_lx_no_wrap
598         };
599 
600         let handle = dpb
601             .find_short_term_with_pic_num(pic_num_lx)
602             .with_context(|| format!("No ShortTerm reference found with pic_num {}", pic_num_lx))?;
603 
604         if *ref_idx_lx >= ref_pic_list_x.len() {
605             anyhow::bail!("invalid ref_idx_lx index");
606         }
607         ref_pic_list_x.insert(*ref_idx_lx, handle);
608         *ref_idx_lx += 1;
609 
610         let mut nidx = *ref_idx_lx;
611 
612         for cidx in *ref_idx_lx..=usize::from(num_ref_idx_lx_active_minus1) + 1 {
613             if cidx == ref_pic_list_x.len() {
614                 break;
615             }
616 
617             let target = &ref_pic_list_x[cidx].pic;
618 
619             if target.borrow().pic_num_f(max_pic_num) != pic_num_lx {
620                 ref_pic_list_x[nidx] = ref_pic_list_x[cidx];
621                 nidx += 1;
622             }
623         }
624 
625         while ref_pic_list_x.len() > (usize::from(num_ref_idx_lx_active_minus1) + 1) {
626             ref_pic_list_x.pop();
627         }
628 
629         Ok(())
630     }
631 
long_term_pic_list_modification<'a>( dpb: &'a Dpb<H>, ref_pic_list_x: &mut DpbPicRefList<'a, H>, num_ref_idx_lx_active_minus1: u8, max_long_term_frame_idx: MaxLongTermFrameIdx, rplm: &RefPicListModification, ref_idx_lx: &mut usize, ) -> anyhow::Result<()>632     fn long_term_pic_list_modification<'a>(
633         dpb: &'a Dpb<H>,
634         ref_pic_list_x: &mut DpbPicRefList<'a, H>,
635         num_ref_idx_lx_active_minus1: u8,
636         max_long_term_frame_idx: MaxLongTermFrameIdx,
637         rplm: &RefPicListModification,
638         ref_idx_lx: &mut usize,
639     ) -> anyhow::Result<()> {
640         let long_term_pic_num = rplm.long_term_pic_num;
641 
642         let handle =
643             dpb.find_long_term_with_long_term_pic_num(long_term_pic_num).with_context(|| {
644                 format!("No LongTerm reference found with long_term_pic_num {}", long_term_pic_num)
645             })?;
646 
647         if *ref_idx_lx >= ref_pic_list_x.len() {
648             anyhow::bail!("invalid ref_idx_lx index");
649         }
650         ref_pic_list_x.insert(*ref_idx_lx, handle);
651         *ref_idx_lx += 1;
652 
653         let mut nidx = *ref_idx_lx;
654 
655         for cidx in *ref_idx_lx..=usize::from(num_ref_idx_lx_active_minus1) + 1 {
656             if cidx == ref_pic_list_x.len() {
657                 break;
658             }
659 
660             let target = &ref_pic_list_x[cidx].pic;
661             if target.borrow().long_term_pic_num_f(max_long_term_frame_idx) != long_term_pic_num {
662                 ref_pic_list_x[nidx] = ref_pic_list_x[cidx];
663                 nidx += 1;
664             }
665         }
666 
667         while ref_pic_list_x.len() > (usize::from(num_ref_idx_lx_active_minus1) + 1) {
668             ref_pic_list_x.pop();
669         }
670 
671         Ok(())
672     }
673 
modify_ref_pic_list( &self, cur_pic: &PictureData, hdr: &SliceHeader, ref_pic_list_type: RefPicList, ref_pic_list_indices: &[usize], ) -> anyhow::Result<DpbPicRefList<H>>674     fn modify_ref_pic_list(
675         &self,
676         cur_pic: &PictureData,
677         hdr: &SliceHeader,
678         ref_pic_list_type: RefPicList,
679         ref_pic_list_indices: &[usize],
680     ) -> anyhow::Result<DpbPicRefList<H>> {
681         let (ref_pic_list_modification_flag_lx, num_ref_idx_lx_active_minus1, rplm) =
682             match ref_pic_list_type {
683                 RefPicList::RefPicList0 => (
684                     hdr.ref_pic_list_modification_flag_l0,
685                     hdr.num_ref_idx_l0_active_minus1,
686                     &hdr.ref_pic_list_modification_l0,
687                 ),
688                 RefPicList::RefPicList1 => (
689                     hdr.ref_pic_list_modification_flag_l1,
690                     hdr.num_ref_idx_l1_active_minus1,
691                     &hdr.ref_pic_list_modification_l1,
692                 ),
693             };
694 
695         let mut ref_pic_list: Vec<_> = ref_pic_list_indices
696             .iter()
697             .map(|&i| &self.dpb.entries()[i])
698             .take(usize::from(num_ref_idx_lx_active_minus1) + 1)
699             .collect();
700 
701         if !ref_pic_list_modification_flag_lx {
702             return Ok(ref_pic_list);
703         }
704 
705         let mut pic_num_lx_pred = cur_pic.pic_num;
706         let mut ref_idx_lx = 0;
707 
708         for modification in rplm {
709             let idc = modification.modification_of_pic_nums_idc;
710 
711             match idc {
712                 0 | 1 => {
713                     Self::short_term_pic_list_modification(
714                         cur_pic,
715                         &self.dpb,
716                         &mut ref_pic_list,
717                         num_ref_idx_lx_active_minus1,
718                         hdr.max_pic_num as i32,
719                         modification,
720                         &mut pic_num_lx_pred,
721                         &mut ref_idx_lx,
722                     )?;
723                 }
724                 2 => Self::long_term_pic_list_modification(
725                     &self.dpb,
726                     &mut ref_pic_list,
727                     num_ref_idx_lx_active_minus1,
728                     self.max_long_term_frame_idx,
729                     modification,
730                     &mut ref_idx_lx,
731                 )?,
732                 3 => break,
733                 _ => anyhow::bail!("unexpected modification_of_pic_nums_idc {:?}", idc),
734             }
735         }
736 
737         Ok(ref_pic_list)
738     }
739 
740     /// Generate RefPicList0 and RefPicList1 in the specification. Computed for every slice, points
741     /// to the pictures in the DPB.
create_ref_pic_lists( &mut self, cur_pic: &PictureData, hdr: &SliceHeader, ref_pic_lists: &ReferencePicLists, ) -> anyhow::Result<RefPicLists<H>>742     fn create_ref_pic_lists(
743         &mut self,
744         cur_pic: &PictureData,
745         hdr: &SliceHeader,
746         ref_pic_lists: &ReferencePicLists,
747     ) -> anyhow::Result<RefPicLists<H>> {
748         let ref_pic_list0 = match hdr.slice_type {
749             SliceType::P | SliceType::Sp => self.modify_ref_pic_list(
750                 cur_pic,
751                 hdr,
752                 RefPicList::RefPicList0,
753                 &ref_pic_lists.ref_pic_list_p0,
754             )?,
755             SliceType::B => self.modify_ref_pic_list(
756                 cur_pic,
757                 hdr,
758                 RefPicList::RefPicList0,
759                 &ref_pic_lists.ref_pic_list_b0,
760             )?,
761             _ => Vec::new(),
762         };
763 
764         let ref_pic_list1 = match hdr.slice_type {
765             SliceType::B => self.modify_ref_pic_list(
766                 cur_pic,
767                 hdr,
768                 RefPicList::RefPicList1,
769                 &ref_pic_lists.ref_pic_list_b1,
770             )?,
771             _ => Vec::new(),
772         };
773 
774         Ok(RefPicLists { ref_pic_list0, ref_pic_list1 })
775     }
776 
handle_memory_management_ops(&mut self, pic: &mut PictureData) -> Result<(), MmcoError>777     fn handle_memory_management_ops(&mut self, pic: &mut PictureData) -> Result<(), MmcoError> {
778         let markings = pic.ref_pic_marking.clone();
779 
780         for marking in &markings.inner {
781             match marking.memory_management_control_operation {
782                 0 => break,
783                 1 => self.dpb.mmco_op_1(pic, marking)?,
784                 2 => self.dpb.mmco_op_2(pic, marking)?,
785                 3 => self.dpb.mmco_op_3(pic, marking)?,
786                 4 => self.max_long_term_frame_idx = self.dpb.mmco_op_4(marking),
787                 5 => self.max_long_term_frame_idx = self.dpb.mmco_op_5(pic),
788                 6 => self.dpb.mmco_op_6(pic, marking),
789                 other => return Err(MmcoError::UnknownMmco(other)),
790             }
791         }
792 
793         Ok(())
794     }
795 
reference_pic_marking(&mut self, pic: &mut PictureData, sps: &Sps) -> anyhow::Result<()>796     fn reference_pic_marking(&mut self, pic: &mut PictureData, sps: &Sps) -> anyhow::Result<()> {
797         /* 8.2.5.1 */
798         if matches!(pic.is_idr, IsIdr::Yes { .. }) {
799             self.dpb.mark_all_as_unused_for_ref();
800 
801             if pic.ref_pic_marking.long_term_reference_flag {
802                 pic.set_reference(Reference::LongTerm, false);
803                 pic.long_term_frame_idx = 0;
804                 self.max_long_term_frame_idx = MaxLongTermFrameIdx::Idx(0);
805             } else {
806                 pic.set_reference(Reference::ShortTerm, false);
807                 self.max_long_term_frame_idx = MaxLongTermFrameIdx::NoLongTermFrameIndices;
808             }
809 
810             return Ok(());
811         }
812 
813         if pic.ref_pic_marking.adaptive_ref_pic_marking_mode_flag {
814             self.handle_memory_management_ops(pic)?;
815         } else {
816             self.dpb.sliding_window_marking(pic, sps);
817         }
818 
819         Ok(())
820     }
821 
822     // Apply the parameters of `sps` to the decoding state.
apply_sps(&mut self, sps: &Sps)823     fn apply_sps(&mut self, sps: &Sps) {
824         self.negotiation_info = NegotiationInfo::from(sps);
825 
826         let max_dpb_frames = sps.max_dpb_frames();
827         let interlaced = !sps.frame_mbs_only_flag;
828         let max_num_order_frames = sps.max_num_order_frames() as usize;
829         let max_num_reorder_frames =
830             if max_num_order_frames > max_dpb_frames { 0 } else { max_num_order_frames };
831 
832         self.dpb.set_limits(max_dpb_frames, max_num_reorder_frames);
833         self.dpb.set_interlaced(interlaced);
834     }
835 }
836 
837 impl<B> StatelessDecoder<H264, B>
838 where
839     B: StatelessH264DecoderBackend,
840     B::Handle: Clone,
841 {
negotiation_possible(sps: &Sps, old_negotiation_info: &NegotiationInfo) -> bool842     fn negotiation_possible(sps: &Sps, old_negotiation_info: &NegotiationInfo) -> bool {
843         let negotiation_info = NegotiationInfo::from(sps);
844         *old_negotiation_info != negotiation_info
845     }
846 
renegotiate_if_needed(&mut self, sps: &Rc<Sps>) -> anyhow::Result<()>847     fn renegotiate_if_needed(&mut self, sps: &Rc<Sps>) -> anyhow::Result<()> {
848         if Self::negotiation_possible(sps, &self.codec.negotiation_info) {
849             // Make sure all the frames we decoded so far are in the ready queue.
850             self.drain()?;
851             self.backend.new_sequence(sps)?;
852             self.await_format_change(sps.clone());
853         }
854 
855         Ok(())
856     }
857 
858     // Apply the parameters of `sps` to the decoder.
apply_sps(&mut self, sps: &Sps)859     fn apply_sps(&mut self, sps: &Sps) {
860         self.codec.apply_sps(sps);
861 
862         self.coded_resolution = Resolution::from((sps.width(), sps.height()));
863     }
864 
drain(&mut self) -> anyhow::Result<()>865     fn drain(&mut self) -> anyhow::Result<()> {
866         // Finish the current picture if there is one pending.
867         if let Some(cur_pic) = self.codec.current_pic.take() {
868             self.finish_picture(cur_pic)?;
869         }
870 
871         self.ready_queue.extend(self.codec.drain());
872 
873         Ok(())
874     }
875 
876     /// Adds picture to the ready queue if it could not be added to the DPB.
add_to_ready_queue(&mut self, pic: PictureData, handle: B::Handle)877     fn add_to_ready_queue(&mut self, pic: PictureData, handle: B::Handle) {
878         if matches!(pic.field, Field::Frame) {
879             self.ready_queue.push(handle);
880         } else if let FieldRank::Second(..) = pic.field_rank() {
881             self.ready_queue.push(handle)
882         }
883     }
884 
finish_picture(&mut self, pic: CurrentPicState<B::Picture>) -> anyhow::Result<()>885     fn finish_picture(&mut self, pic: CurrentPicState<B::Picture>) -> anyhow::Result<()> {
886         debug!("Finishing picture POC {:?}", pic.pic.pic_order_cnt);
887 
888         // Submit the picture to the backend.
889         let handle = self.submit_picture(pic.backend_pic)?;
890         let pps = pic.pps;
891         let mut pic = pic.pic;
892 
893         if matches!(pic.reference(), Reference::ShortTerm | Reference::LongTerm) {
894             self.codec.reference_pic_marking(&mut pic, &pps.sps)?;
895             self.codec.prev_ref_pic_info.fill(&pic);
896         }
897 
898         self.codec.prev_pic_info.fill(&pic);
899 
900         if pic.has_mmco_5 {
901             // C.4.5.3 "Bumping process"
902             // The bumping process is invoked in the following cases:
903             // Clause 3:
904             // The current picture has memory_management_control_operation equal
905             // to 5, as specified in clause C.4.4.
906             self.drain()?;
907         }
908 
909         // Bump the DPB as per C.4.5.3 to cover clauses 1, 4, 5 and 6.
910         self.ready_queue.extend(self.codec.bump_as_needed(&pic));
911 
912         // C.4.5.1, C.4.5.2
913         // If the current decoded picture is the second field of a complementary
914         // reference field pair, add to DPB.
915         // C.4.5.1
916         // For a reference decoded picture, the "bumping" process is invoked
917         // repeatedly until there is an empty frame buffer, by which point it is
918         // added to the DPB. Notice that Dpb::needs_bumping already accounts for
919         // this.
920         // C.4.5.2
921         // For a non-reference decoded picture, if there is empty frame buffer
922         // after bumping the smaller POC, add to DPB. Otherwise, add it to the
923         // ready queue.
924         if pic.is_second_field_of_complementary_ref_pair()
925             || pic.is_ref()
926             || self.codec.dpb.has_empty_frame_buffer()
927         {
928             if self.codec.dpb.interlaced() && matches!(pic.field, Field::Frame) {
929                 // Split the Frame into two complementary fields so reference
930                 // marking is easier. This is inspired by the GStreamer implementation.
931                 let (first_field, second_field) = PictureData::split_frame(pic);
932 
933                 self.codec.dpb.store_picture(first_field, Some(handle.clone()))?;
934                 self.codec.dpb.store_picture(second_field, Some(handle))?;
935             } else {
936                 self.codec.dpb.store_picture(pic.into_rc(), Some(handle))?;
937             }
938         } else {
939             self.add_to_ready_queue(pic, handle);
940         }
941 
942         Ok(())
943     }
944 
handle_frame_num_gap( &mut self, sps: &Sps, frame_num: u32, timestamp: u64, ) -> anyhow::Result<()>945     fn handle_frame_num_gap(
946         &mut self,
947         sps: &Sps,
948         frame_num: u32,
949         timestamp: u64,
950     ) -> anyhow::Result<()> {
951         if self.codec.dpb.is_empty() {
952             return Ok(());
953         }
954 
955         debug!("frame_num gap detected.");
956 
957         if !sps.gaps_in_frame_num_value_allowed_flag {
958             return Err(anyhow!(
959                 "Invalid frame_num: {}. Assuming unintentional loss of pictures",
960                 frame_num
961             ));
962         }
963 
964         let mut unused_short_term_frame_num =
965             (self.codec.prev_ref_pic_info.frame_num + 1) % sps.max_frame_num();
966         while unused_short_term_frame_num != frame_num {
967             let max_frame_num = sps.max_frame_num();
968 
969             let mut pic = PictureData::new_non_existing(unused_short_term_frame_num, timestamp);
970             self.codec.compute_pic_order_count(&mut pic, sps)?;
971 
972             self.codec.dpb.update_pic_nums(unused_short_term_frame_num, max_frame_num, &pic);
973 
974             self.codec.dpb.sliding_window_marking(&mut pic, sps);
975 
976             self.ready_queue.extend(self.codec.bump_as_needed(&pic));
977 
978             if self.codec.dpb.interlaced() {
979                 let (first_field, second_field) = PictureData::split_frame(pic);
980 
981                 self.codec.dpb.store_picture(first_field, None)?;
982                 self.codec.dpb.store_picture(second_field, None)?;
983             } else {
984                 self.codec.dpb.store_picture(pic.into_rc(), None)?;
985             }
986 
987             unused_short_term_frame_num += 1;
988             unused_short_term_frame_num %= max_frame_num;
989         }
990 
991         Ok(())
992     }
993 
994     /// Init the current picture being decoded.
init_current_pic( &mut self, slice: &Slice, sps: &Sps, first_field: Option<&RcPictureData>, timestamp: u64, ) -> anyhow::Result<PictureData>995     fn init_current_pic(
996         &mut self,
997         slice: &Slice,
998         sps: &Sps,
999         first_field: Option<&RcPictureData>,
1000         timestamp: u64,
1001     ) -> anyhow::Result<PictureData> {
1002         let mut pic = PictureData::new_from_slice(slice, sps, timestamp, first_field);
1003         self.codec.compute_pic_order_count(&mut pic, sps)?;
1004 
1005         if matches!(pic.is_idr, IsIdr::Yes { .. }) {
1006             // C.4.5.3 "Bumping process"
1007             // The bumping process is invoked in the following cases:
1008             // Clause 2:
1009             // The current picture is an IDR picture and
1010             // no_output_of_prior_pics_flag is not equal to 1 and is not
1011             // inferred to be equal to 1, as specified in clause C.4.4.
1012             if !pic.ref_pic_marking.no_output_of_prior_pics_flag {
1013                 self.drain()?;
1014             } else {
1015                 // C.4.4 When no_output_of_prior_pics_flag is equal to 1 or is
1016                 // inferred to be equal to 1, all frame buffers in the DPB are
1017                 // emptied without output of the pictures they contain, and DPB
1018                 // fullness is set to 0.
1019                 self.codec.dpb.clear();
1020             }
1021         }
1022 
1023         self.codec.dpb.update_pic_nums(
1024             u32::from(slice.header.frame_num),
1025             sps.max_frame_num(),
1026             &pic,
1027         );
1028 
1029         Ok(pic)
1030     }
1031 
1032     /// Called once per picture to start it.
begin_picture( &mut self, timestamp: u64, slice: &Slice, alloc_cb: &mut dyn FnMut() -> Option< <<B as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame, >, ) -> Result<CurrentPicState<B::Picture>, DecodeError>1033     fn begin_picture(
1034         &mut self,
1035         timestamp: u64,
1036         slice: &Slice,
1037         alloc_cb: &mut dyn FnMut() -> Option<
1038             <<B as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame,
1039         >,
1040     ) -> Result<CurrentPicState<B::Picture>, DecodeError> {
1041         let hdr = &slice.header;
1042         let pps = Rc::clone(
1043             self.codec
1044                 .parser
1045                 .get_pps(hdr.pic_parameter_set_id)
1046                 .context("Invalid PPS in handle_picture")?,
1047         );
1048 
1049         // A picture's SPS may require negociation.
1050         self.renegotiate_if_needed(&pps.sps)?;
1051         if let DecodingState::AwaitingFormat(_) = &self.decoding_state {
1052             return Err(DecodeError::CheckEvents);
1053         }
1054 
1055         // Start by securing the backend picture before modifying our state.
1056         let first_field =
1057             self.codec.find_first_field(&slice.header).context("while looking for first field")?;
1058         let mut backend_pic = if let Some(first_field) = &first_field {
1059             self.backend.new_field_picture(timestamp, &first_field.1)
1060         } else {
1061             self.backend.new_picture(timestamp, alloc_cb)
1062         }?;
1063 
1064         let nalu_hdr = &slice.nalu.header;
1065 
1066         if nalu_hdr.idr_pic_flag {
1067             self.codec.prev_ref_pic_info.frame_num = 0;
1068         }
1069 
1070         let frame_num = u32::from(hdr.frame_num);
1071 
1072         let current_macroblock = match pps.sps.separate_colour_plane_flag {
1073             true => CurrentMacroblockTracking::SeparateColorPlane(Default::default()),
1074             false => CurrentMacroblockTracking::NonSeparateColorPlane(0),
1075         };
1076 
1077         if frame_num != self.codec.prev_ref_pic_info.frame_num
1078             && frame_num != (self.codec.prev_ref_pic_info.frame_num + 1) % pps.sps.max_frame_num()
1079         {
1080             self.handle_frame_num_gap(&pps.sps, frame_num, timestamp)?;
1081         }
1082 
1083         let pic =
1084             self.init_current_pic(slice, &pps.sps, first_field.as_ref().map(|f| &f.0), timestamp)?;
1085         let ref_pic_lists = self.codec.dpb.build_ref_pic_lists(&pic);
1086 
1087         debug!("Decode picture POC {:?}", pic.pic_order_cnt);
1088 
1089         self.backend.start_picture(
1090             &mut backend_pic,
1091             &pic,
1092             pps.sps.as_ref(),
1093             pps.as_ref(),
1094             &self.codec.dpb,
1095             &slice.header,
1096         )?;
1097 
1098         Ok(CurrentPicState { pic, pps, backend_pic, ref_pic_lists, current_macroblock })
1099     }
1100 
1101     // Check whether first_mb_in_slice increases monotonically for the current
1102     // picture as required by the specification.
check_first_mb_in_slice( &mut self, current_macroblock: &mut CurrentMacroblockTracking, slice: &Slice, )1103     fn check_first_mb_in_slice(
1104         &mut self,
1105         current_macroblock: &mut CurrentMacroblockTracking,
1106         slice: &Slice,
1107     ) {
1108         match current_macroblock {
1109             CurrentMacroblockTracking::SeparateColorPlane(current_macroblock) => {
1110                 match current_macroblock.entry(slice.header.colour_plane_id) {
1111                     Entry::Vacant(current_macroblock) => {
1112                         current_macroblock.insert(slice.header.first_mb_in_slice);
1113                     }
1114                     Entry::Occupied(mut current_macroblock) => {
1115                         let current_macroblock = current_macroblock.get_mut();
1116                         if slice.header.first_mb_in_slice >= *current_macroblock {
1117                             log::trace!("first_mb_in_slice does not increase monotically, expect corrupted output");
1118                         }
1119                         *current_macroblock = slice.header.first_mb_in_slice;
1120                     }
1121                 }
1122             }
1123             CurrentMacroblockTracking::NonSeparateColorPlane(current_macroblock) => {
1124                 if slice.header.first_mb_in_slice >= *current_macroblock {
1125                     log::trace!(
1126                         "first_mb_in_slice does not increase monotically, expect corrupted output"
1127                     );
1128                 }
1129                 *current_macroblock = slice.header.first_mb_in_slice;
1130             }
1131         }
1132     }
1133 
1134     /// Handle a slice. Called once per slice NALU.
handle_slice( &mut self, cur_pic: &mut CurrentPicState<B::Picture>, slice: &Slice, ) -> anyhow::Result<()>1135     fn handle_slice(
1136         &mut self,
1137         cur_pic: &mut CurrentPicState<B::Picture>,
1138         slice: &Slice,
1139     ) -> anyhow::Result<()> {
1140         self.check_first_mb_in_slice(&mut cur_pic.current_macroblock, slice);
1141 
1142         // A slice can technically refer to another PPS.
1143         let pps =
1144             self.codec.parser.get_pps(slice.header.pic_parameter_set_id).context("Invalid PPS")?;
1145         cur_pic.pps = Rc::clone(pps);
1146 
1147         // Make sure that no negotiation is possible mid-picture. How could it?
1148         // We'd lose the context with the previous slices on it.
1149         if Self::negotiation_possible(&cur_pic.pps.sps, &self.codec.negotiation_info) {
1150             anyhow::bail!("invalid stream: inter-frame renegotiation requested");
1151         }
1152 
1153         let RefPicLists { ref_pic_list0, ref_pic_list1 } =
1154             self.codec.create_ref_pic_lists(&cur_pic.pic, &slice.header, &cur_pic.ref_pic_lists)?;
1155 
1156         self.backend.decode_slice(
1157             &mut cur_pic.backend_pic,
1158             slice,
1159             cur_pic.pps.sps.as_ref(),
1160             cur_pic.pps.as_ref(),
1161             &ref_pic_list0,
1162             &ref_pic_list1,
1163         )?;
1164 
1165         Ok(())
1166     }
1167 
1168     /// Submits the picture to the accelerator.
submit_picture(&mut self, backend_pic: B::Picture) -> Result<B::Handle, DecodeError>1169     fn submit_picture(&mut self, backend_pic: B::Picture) -> Result<B::Handle, DecodeError> {
1170         let handle = self.backend.submit_picture(backend_pic)?;
1171 
1172         if self.blocking_mode == BlockingMode::Blocking {
1173             handle.sync()?;
1174         }
1175 
1176         Ok(handle)
1177     }
1178 
process_nalu( &mut self, timestamp: u64, nalu: Nalu, alloc_cb: &mut dyn FnMut() -> Option< <<B as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame, >, ) -> Result<(), DecodeError>1179     fn process_nalu(
1180         &mut self,
1181         timestamp: u64,
1182         nalu: Nalu,
1183         alloc_cb: &mut dyn FnMut() -> Option<
1184             <<B as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame,
1185         >,
1186     ) -> Result<(), DecodeError> {
1187         match nalu.header.type_ {
1188             NaluType::Sps => {
1189                 self.codec
1190                     .parser
1191                     .parse_sps(&nalu)
1192                     .map_err(|err| DecodeError::ParseFrameError(err))?;
1193             }
1194             NaluType::Pps => {
1195                 self.codec
1196                     .parser
1197                     .parse_pps(&nalu)
1198                     .map_err(|err| DecodeError::ParseFrameError(err))?;
1199             }
1200             NaluType::Slice
1201             | NaluType::SliceDpa
1202             | NaluType::SliceDpb
1203             | NaluType::SliceDpc
1204             | NaluType::SliceIdr
1205             | NaluType::SliceExt => {
1206                 let slice = self
1207                     .codec
1208                     .parser
1209                     .parse_slice_header(nalu)
1210                     .map_err(|err| DecodeError::ParseFrameError(err))?;
1211                 let mut cur_pic = match self.codec.current_pic.take() {
1212                     // No current picture, start a new one.
1213                     None => self.begin_picture(timestamp, &slice, alloc_cb)?,
1214                     // We have a current picture but are starting a new field, or first_mb_in_slice
1215                     // indicates that a new picture is starting: finish the current picture and
1216                     // start a new one.
1217                     Some(cur_pic)
1218                         if (self.codec.dpb.interlaced()
1219                             && matches!(cur_pic.pic.field, Field::Frame)
1220                             && !cur_pic.pic.is_second_field()
1221                             && cur_pic.pic.field != slice.header.field())
1222                             || (slice.header.first_mb_in_slice == 0) =>
1223                     {
1224                         self.finish_picture(cur_pic)?;
1225                         self.begin_picture(timestamp, &slice, alloc_cb)?
1226                     }
1227                     // This slice is part of the current picture.
1228                     Some(cur_pic) => cur_pic,
1229                 };
1230 
1231                 self.handle_slice(&mut cur_pic, &slice)?;
1232                 self.codec.current_pic = Some(cur_pic);
1233             }
1234             other => {
1235                 debug!("Unsupported NAL unit type {:?}", other,);
1236             }
1237         }
1238 
1239         Ok(())
1240     }
1241 }
1242 
1243 impl<B> StatelessVideoDecoder for StatelessDecoder<H264, B>
1244 where
1245     B: StatelessH264DecoderBackend,
1246     B::Handle: Clone + 'static,
1247 {
1248     type Handle = B::Handle;
1249 
decode( &mut self, timestamp: u64, bitstream: &[u8], alloc_cb: &mut dyn FnMut() -> Option< <<B as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame, >, ) -> Result<usize, DecodeError>1250     fn decode(
1251         &mut self,
1252         timestamp: u64,
1253         bitstream: &[u8],
1254         alloc_cb: &mut dyn FnMut() -> Option<
1255             <<B as StatelessDecoderBackend>::Handle as DecodedHandle>::Frame,
1256         >,
1257     ) -> Result<usize, DecodeError> {
1258         self.wait_for_drc_flush()?;
1259 
1260         let mut cursor = Cursor::new(bitstream);
1261         let nalu = Nalu::next(&mut cursor).map_err(|err| DecodeError::ParseFrameError(err))?;
1262 
1263         if nalu.header.type_ == NaluType::Sps {
1264             let sps = self
1265                 .codec
1266                 .parser
1267                 .parse_sps(&nalu)
1268                 .map_err(|err| DecodeError::ParseFrameError(err))?
1269                 .clone();
1270 
1271             if Self::negotiation_possible(&sps, &self.codec.negotiation_info)
1272                 && matches!(self.decoding_state, DecodingState::Decoding)
1273             {
1274                 // DRC occurs when an SPS packet is received that indicates an IDR,
1275                 // the format is different, and the decoder is already decoding frames.
1276                 self.flush()?;
1277                 self.decoding_state = DecodingState::FlushingForDRC;
1278                 // Start signaling the awaiting format event to process a format change.
1279                 self.awaiting_format_event.write(1).unwrap();
1280                 return Err(DecodeError::CheckEvents);
1281             }
1282 
1283             if matches!(self.decoding_state, DecodingState::AwaitingStreamInfo) {
1284                 // If more SPS come along we will renegotiate in begin_picture().
1285                 self.renegotiate_if_needed(&sps)?;
1286             } else if matches!(self.decoding_state, DecodingState::Reset) {
1287                 // We can resume decoding since the decoding parameters have not changed.
1288                 self.decoding_state = DecodingState::Decoding;
1289             }
1290         } else if matches!(self.decoding_state, DecodingState::Reset) {
1291             let mut cursor = Cursor::new(bitstream);
1292 
1293             while let Ok(nalu) = Nalu::next(&mut cursor) {
1294                 // In the Reset state we can resume decoding from any key frame.
1295                 if matches!(nalu.header.type_, NaluType::SliceIdr) {
1296                     self.decoding_state = DecodingState::Decoding;
1297                     break;
1298                 }
1299             }
1300         }
1301 
1302         let nalu_len = nalu.offset + nalu.size;
1303 
1304         match &mut self.decoding_state {
1305             // Process parameter sets, but skip input until we get information
1306             // from the stream.
1307             DecodingState::AwaitingStreamInfo | DecodingState::Reset => {
1308                 if matches!(nalu.header.type_, NaluType::Pps) {
1309                     self.process_nalu(timestamp, nalu, alloc_cb)?;
1310                 }
1311             }
1312             // Ask the client to confirm the format before we can process this.
1313             DecodingState::FlushingForDRC | DecodingState::AwaitingFormat(_) => {
1314                 return Err(DecodeError::CheckEvents)
1315             }
1316             DecodingState::Decoding => {
1317                 self.process_nalu(timestamp, nalu, alloc_cb)?;
1318             }
1319         }
1320 
1321         Ok(nalu_len)
1322     }
1323 
flush(&mut self) -> Result<(), DecodeError>1324     fn flush(&mut self) -> Result<(), DecodeError> {
1325         self.drain()?;
1326         self.decoding_state = DecodingState::Reset;
1327 
1328         Ok(())
1329     }
1330 
next_event(&mut self) -> Option<DecoderEvent<B::Handle>>1331     fn next_event(&mut self) -> Option<DecoderEvent<B::Handle>> {
1332         self.query_next_event(|decoder, sps| {
1333             // Apply the SPS settings to the decoder so we don't enter the AwaitingFormat state
1334             // on the next decode() call.
1335             decoder.apply_sps(sps);
1336         })
1337     }
1338 
stream_info(&self) -> Option<&StreamInfo>1339     fn stream_info(&self) -> Option<&StreamInfo> {
1340         self.backend.stream_info()
1341     }
1342 
poll_fd(&self) -> BorrowedFd1343     fn poll_fd(&self) -> BorrowedFd {
1344         self.epoll_fd.0.as_fd()
1345     }
1346 }
1347 
1348 #[cfg(test)]
1349 pub mod tests {
1350     use crate::bitstream_utils::NalIterator;
1351     use crate::codec::h264::parser::Nalu;
1352     use crate::decoder::stateless::h264::H264;
1353     use crate::decoder::stateless::tests::test_decode_stream;
1354     use crate::decoder::stateless::tests::TestStream;
1355     use crate::decoder::stateless::StatelessDecoder;
1356     use crate::decoder::BlockingMode;
1357     use crate::utils::simple_playback_loop;
1358     use crate::utils::simple_playback_loop_owned_frames;
1359     use crate::DecodedFormat;
1360 
1361     /// Run `test` using the dummy decoder, in both blocking and non-blocking modes.
test_decoder_dummy(test: &TestStream, blocking_mode: BlockingMode)1362     fn test_decoder_dummy(test: &TestStream, blocking_mode: BlockingMode) {
1363         let decoder = StatelessDecoder::<H264, _>::new_dummy(blocking_mode).unwrap();
1364 
1365         test_decode_stream(
1366             |d, s, f| {
1367                 simple_playback_loop(
1368                     d,
1369                     NalIterator::<Nalu>::new(s),
1370                     f,
1371                     &mut simple_playback_loop_owned_frames,
1372                     DecodedFormat::NV12,
1373                     blocking_mode,
1374                 )
1375             },
1376             decoder,
1377             test,
1378             false,
1379             false,
1380         );
1381     }
1382 
1383     /// A 64x64 progressive byte-stream encoded I-frame to make it easier to
1384     /// spot errors on the libva trace.
1385     /// Encoded with the following GStreamer pipeline:
1386     ///
1387     /// gst-launch-1.0 videotestsrc num-buffers=1 ! video/x-raw,format=I420,width=64,height=64 ! x264enc ! video/x-h264,profile=constrained-baseline,stream-format=byte-stream ! filesink location="64x64-I.h264"
1388     pub const DECODE_64X64_PROGRESSIVE_I: TestStream = TestStream {
1389         stream: include_bytes!("../../codec/h264/test_data/64x64-I.h264"),
1390         crcs: include_str!("../../codec/h264/test_data/64x64-I.h264.crc"),
1391     };
1392 
1393     #[test]
test_64x64_progressive_i_block()1394     fn test_64x64_progressive_i_block() {
1395         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I, BlockingMode::Blocking);
1396     }
1397 
1398     #[test]
test_64x64_progressive_i_nonblock()1399     fn test_64x64_progressive_i_nonblock() {
1400         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I, BlockingMode::NonBlocking);
1401     }
1402 
1403     /// A 64x64 progressive byte-stream encoded I-frame and P-frame to make
1404     /// it easier to spot errors on the libva trace.
1405     /// Encoded with the following GStreamer pipeline:
1406     /// gst-launch-1.0 videotestsrc num-buffers=2 ! video/x-raw,format=I420,width=64,height=64 ! x264enc b-adapt=false ! video/x-h264,profile=constrained-baseline,stream-format=byte-stream ! filesink location="64x64-I-P.h264"
1407     pub const DECODE_64X64_PROGRESSIVE_I_P: TestStream = TestStream {
1408         stream: include_bytes!("../../codec/h264/test_data/64x64-I-P.h264"),
1409         crcs: include_str!("../../codec/h264/test_data/64x64-I-P.h264.crc"),
1410     };
1411 
1412     #[test]
test_64x64_progressive_i_p_block()1413     fn test_64x64_progressive_i_p_block() {
1414         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P, BlockingMode::Blocking);
1415     }
1416 
1417     #[test]
test_64x64_progressive_i_p_nonblock()1418     fn test_64x64_progressive_i_p_nonblock() {
1419         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P, BlockingMode::NonBlocking);
1420     }
1421 
1422     /// A 64x64 progressive byte-stream encoded I-P-B-P sequence to make it
1423     /// easier to it easier to spot errors on the libva trace.
1424     /// Encoded with the following GStreamer pipeline:
1425     /// gst-launch-1.0 videotestsrc num-buffers=3 ! video/x-raw,format=I420,width=64,height=64 ! x264enc b-adapt=false bframes=1 ! video/x-h264,profile=constrained-baseline,stream-format=byte-stream ! filesink location="64x64-I-P-B-P.h264"
1426     pub const DECODE_64X64_PROGRESSIVE_I_P_B_P: TestStream = TestStream {
1427         stream: include_bytes!("../../codec/h264/test_data/64x64-I-P-B-P.h264"),
1428         crcs: include_str!("../../codec/h264/test_data/64x64-I-P-B-P.h264.crc"),
1429     };
1430 
1431     #[test]
test_64x64_progressive_i_p_b_p_block()1432     fn test_64x64_progressive_i_p_b_p_block() {
1433         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P_B_P, BlockingMode::Blocking);
1434     }
1435 
1436     #[test]
test_64x64_progressive_i_p_b_p_nonblock()1437     fn test_64x64_progressive_i_p_b_p_nonblock() {
1438         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P_B_P, BlockingMode::NonBlocking);
1439     }
1440 
1441     /// A 64x64 progressive byte-stream encoded I-P-B-P sequence to make it
1442     /// easier to it easier to spot errors on the libva trace.
1443     /// Also tests whether the decoder supports the high profile.
1444     ///
1445     /// Encoded with the following GStreamer pipeline:
1446     /// gst-launch-1.0 videotestsrc num-buffers=3 ! video/x-raw,format=I420,width=64,height=64 ! x264enc b-adapt=false bframes=1 ! video/x-h264,profile=high,stream-format=byte-stream ! filesink location="64x64-I-P-B-P-high.h264"
1447     pub const DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH: TestStream = TestStream {
1448         stream: include_bytes!("../../codec/h264/test_data/64x64-I-P-B-P-high.h264"),
1449         crcs: include_str!("../../codec/h264/test_data/64x64-I-P-B-P-high.h264.crc"),
1450     };
1451 
1452     #[test]
test_64x64_progressive_i_p_b_p_high_block()1453     fn test_64x64_progressive_i_p_b_p_high_block() {
1454         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH, BlockingMode::Blocking);
1455     }
1456 
1457     #[test]
test_64x64_progressive_i_p_b_p_high_nonblock()1458     fn test_64x64_progressive_i_p_b_p_high_nonblock() {
1459         test_decoder_dummy(&DECODE_64X64_PROGRESSIVE_I_P_B_P_HIGH, BlockingMode::NonBlocking);
1460     }
1461 
1462     /// Same as Chromium's test-25fps.h264
1463     pub const DECODE_TEST_25FPS: TestStream = TestStream {
1464         stream: include_bytes!("../../codec/h264/test_data/test-25fps.h264"),
1465         crcs: include_str!("../../codec/h264/test_data/test-25fps.h264.crc"),
1466     };
1467 
1468     #[test]
test_25fps_block()1469     fn test_25fps_block() {
1470         test_decoder_dummy(&DECODE_TEST_25FPS, BlockingMode::Blocking);
1471     }
1472 
1473     #[test]
test_25fps_nonblock()1474     fn test_25fps_nonblock() {
1475         test_decoder_dummy(&DECODE_TEST_25FPS, BlockingMode::NonBlocking);
1476     }
1477 
1478     // Adapted from Chromium's test-25fps.h264. Same file, but encoded as
1479     // interlaced instead using the following ffmpeg command:
1480     // ffmpeg -i
1481     // src/third_party/blink/web_tests/media/content/test-25fps.mp4
1482     // -flags +ilme+ildct  -vbsf h264_mp4toannexb -an test-25fps.h264
1483     //
1484     // This test makes sure that the interlaced logic in the decoder
1485     // actually works, specially that "frame splitting" works, as the fields
1486     // here were encoded as frames.
1487     pub const DECODE_TEST_25FPS_INTERLACED: TestStream = TestStream {
1488         stream: include_bytes!("../../codec/h264/test_data/test-25fps-interlaced.h264"),
1489         crcs: include_str!("../../codec/h264/test_data/test-25fps-interlaced.h264.crc"),
1490     };
1491 
1492     #[test]
test_25fps_interlaced_block()1493     fn test_25fps_interlaced_block() {
1494         test_decoder_dummy(&DECODE_TEST_25FPS_INTERLACED, BlockingMode::Blocking);
1495     }
1496 
1497     #[test]
test_25fps_interlaced_nonblock()1498     fn test_25fps_interlaced_nonblock() {
1499         test_decoder_dummy(&DECODE_TEST_25FPS_INTERLACED, BlockingMode::NonBlocking);
1500     }
1501 }
1502