• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2024 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use super::BackendRequest;
6 use super::EncoderConfig;
7 use crate::codec::vp9::parser::BitDepth;
8 use crate::codec::vp9::parser::FrameType;
9 use crate::codec::vp9::parser::Header;
10 use crate::codec::vp9::parser::Profile;
11 use crate::codec::vp9::parser::QuantizationParams;
12 use crate::encoder::stateless::predictor::LowDelay;
13 use crate::encoder::stateless::predictor::LowDelayDelegate;
14 use crate::encoder::stateless::vp9::ReferenceUse;
15 use crate::encoder::stateless::EncodeResult;
16 use crate::encoder::FrameMetadata;
17 use crate::encoder::RateControl;
18 use crate::encoder::Tunings;
19 
20 pub(crate) const MIN_Q_IDX: u8 = 0;
21 pub(crate) const MAX_Q_IDX: u8 = 255;
22 
23 pub(crate) struct LowDelayVP9Delegate {
24     config: EncoderConfig,
25 }
26 
27 pub(crate) type LowDelayVP9<Picture, Reference> =
28     LowDelay<Picture, Reference, LowDelayVP9Delegate, BackendRequest<Picture, Reference>>;
29 
30 impl<Picture, Reference> LowDelayVP9<Picture, Reference> {
new(config: EncoderConfig, limit: u16) -> Self31     pub(super) fn new(config: EncoderConfig, limit: u16) -> Self {
32         Self {
33             queue: Default::default(),
34             references: Default::default(),
35             counter: 0,
36             limit,
37             tunings: config.initial_tunings.clone(),
38             delegate: LowDelayVP9Delegate { config },
39             tunings_queue: Default::default(),
40             _phantom: Default::default(),
41         }
42     }
43 
create_frame_header(&mut self, frame_type: FrameType) -> Header44     fn create_frame_header(&mut self, frame_type: FrameType) -> Header {
45         let width = self.delegate.config.resolution.width;
46         let height = self.delegate.config.resolution.height;
47 
48         let profile = match self.delegate.config.bit_depth {
49             BitDepth::Depth8 => Profile::Profile0,
50             BitDepth::Depth10 | BitDepth::Depth12 => Profile::Profile2,
51         };
52 
53         let base_q_idx = if let RateControl::ConstantQuality(base_q_idx) = self.tunings.rate_control
54         {
55             // Limit Q index to valid values
56             base_q_idx.clamp(MIN_Q_IDX as u32, MAX_Q_IDX as u32) as u8
57         } else {
58             // Pick middle Q index
59             (MAX_Q_IDX + MIN_Q_IDX) / 2
60         };
61 
62         Header {
63             profile,
64             bit_depth: BitDepth::Depth10,
65             frame_type,
66             show_frame: true,
67             error_resilient_mode: true,
68             width,
69             height,
70             render_and_frame_size_different: false,
71             intra_only: matches!(frame_type, FrameType::KeyFrame),
72             refresh_frame_flags: 0x01,
73             ref_frame_idx: [0, 0, 0],
74             quant: QuantizationParams { base_q_idx, ..Default::default() },
75 
76             ..Default::default()
77         }
78     }
79 }
80 
81 impl<Picture, Reference> LowDelayDelegate<Picture, Reference, BackendRequest<Picture, Reference>>
82     for LowDelayVP9<Picture, Reference>
83 {
request_keyframe( &mut self, input: Picture, input_meta: FrameMetadata, _idr: bool, ) -> EncodeResult<BackendRequest<Picture, Reference>>84     fn request_keyframe(
85         &mut self,
86         input: Picture,
87         input_meta: FrameMetadata,
88         _idr: bool,
89     ) -> EncodeResult<BackendRequest<Picture, Reference>> {
90         log::trace!("Requested keyframe timestamp={}", input_meta.timestamp);
91 
92         let request = BackendRequest {
93             header: self.create_frame_header(FrameType::KeyFrame),
94             input,
95             input_meta,
96             last_frame_ref: None,
97             golden_frame_ref: None,
98             altref_frame_ref: None,
99             tunings: self.tunings.clone(),
100             coded_output: Vec::new(),
101         };
102 
103         Ok(request)
104     }
105 
request_interframe( &mut self, input: Picture, input_meta: FrameMetadata, ) -> EncodeResult<BackendRequest<Picture, Reference>>106     fn request_interframe(
107         &mut self,
108         input: Picture,
109         input_meta: FrameMetadata,
110     ) -> EncodeResult<BackendRequest<Picture, Reference>> {
111         log::trace!("Requested interframe timestamp={}", input_meta.timestamp);
112 
113         let ref_frame = self.references.pop_front().unwrap();
114 
115         let request = BackendRequest {
116             header: self.create_frame_header(FrameType::InterFrame),
117             input,
118             input_meta,
119             last_frame_ref: Some((ref_frame, ReferenceUse::Single)),
120             golden_frame_ref: None,
121             altref_frame_ref: None,
122             tunings: self.tunings.clone(),
123             coded_output: Vec::new(),
124         };
125 
126         self.references.clear();
127 
128         Ok(request)
129     }
130 
try_tunings(&self, _tunings: &Tunings) -> EncodeResult<()>131     fn try_tunings(&self, _tunings: &Tunings) -> EncodeResult<()> {
132         Ok(())
133     }
134 
apply_tunings(&mut self, _tunings: &Tunings) -> EncodeResult<()>135     fn apply_tunings(&mut self, _tunings: &Tunings) -> EncodeResult<()> {
136         Ok(())
137     }
138 }
139