• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The ChromiumOS Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //! Implementation of the the `Encoder` struct, which is responsible for translation between the
6 //! virtio protocols and LibVDA APIs.
7 
8 pub mod backend;
9 mod encoder;
10 
11 use std::collections::BTreeMap;
12 use std::collections::BTreeSet;
13 
14 use backend::*;
15 use base::debug;
16 use base::error;
17 use base::info;
18 use base::warn;
19 use base::Tube;
20 use base::WaitContext;
21 use vm_memory::GuestMemory;
22 
23 use crate::virtio::video::async_cmd_desc_map::AsyncCmdDescMap;
24 use crate::virtio::video::command::QueueType;
25 use crate::virtio::video::command::VideoCmd;
26 use crate::virtio::video::control::*;
27 use crate::virtio::video::device::AsyncCmdResponse;
28 use crate::virtio::video::device::AsyncCmdTag;
29 use crate::virtio::video::device::Device;
30 use crate::virtio::video::device::Token;
31 use crate::virtio::video::device::VideoCmdResponseType;
32 use crate::virtio::video::device::VideoEvtResponseType;
33 use crate::virtio::video::encoder::encoder::EncoderEvent;
34 use crate::virtio::video::encoder::encoder::InputBufferId;
35 use crate::virtio::video::encoder::encoder::OutputBufferId;
36 use crate::virtio::video::encoder::encoder::SessionConfig;
37 use crate::virtio::video::error::*;
38 use crate::virtio::video::event::EvtType;
39 use crate::virtio::video::event::VideoEvt;
40 use crate::virtio::video::format::Bitrate;
41 use crate::virtio::video::format::BitrateMode;
42 use crate::virtio::video::format::Format;
43 use crate::virtio::video::format::Level;
44 use crate::virtio::video::format::PlaneFormat;
45 use crate::virtio::video::format::Profile;
46 use crate::virtio::video::params::Params;
47 use crate::virtio::video::protocol;
48 use crate::virtio::video::resource::*;
49 use crate::virtio::video::response::CmdResponse;
50 use crate::virtio::video::EosBufferManager;
51 
52 #[derive(Debug)]
53 struct QueuedInputResourceParams {
54     timestamp: u64,
55     in_queue: bool,
56 }
57 
58 struct InputResource {
59     resource: GuestResource,
60     queue_params: Option<QueuedInputResourceParams>,
61 }
62 
63 #[derive(Debug)]
64 struct QueuedOutputResourceParams {
65     in_queue: bool,
66 }
67 
68 struct OutputResource {
69     resource: GuestResource,
70     offset: u32,
71     queue_params: Option<QueuedOutputResourceParams>,
72 }
73 
74 #[derive(Debug, PartialEq, Eq, Hash, Ord, PartialOrd)]
75 enum PendingCommand {
76     // TODO(b/193202566): remove this is_ext parameter throughout the code along with
77     // support for the old GET_PARAMS and SET_PARAMS commands.
78     GetSrcParams { is_ext: bool },
79     GetDstParams { is_ext: bool },
80     Drain,
81     SrcQueueClear,
82     DstQueueClear,
83 }
84 
85 struct Stream<T: EncoderSession> {
86     id: u32,
87     src_params: Params,
88     dst_params: Params,
89     dst_bitrate: Bitrate,
90     dst_profile: Profile,
91     dst_h264_level: Option<Level>,
92     force_keyframe: bool,
93 
94     encoder_session: Option<T>,
95     received_input_buffers_event: bool,
96 
97     src_resources: BTreeMap<u32, InputResource>,
98     encoder_input_buffer_ids: BTreeMap<InputBufferId, u32>,
99 
100     dst_resources: BTreeMap<u32, OutputResource>,
101     encoder_output_buffer_ids: BTreeMap<OutputBufferId, u32>,
102 
103     pending_commands: BTreeSet<PendingCommand>,
104     eos_manager: EosBufferManager,
105 }
106 
107 impl<T: EncoderSession> Stream<T> {
new<E: Encoder<Session = T>>( id: u32, src_resource_type: ResourceType, dst_resource_type: ResourceType, desired_format: Format, encoder: &EncoderDevice<E>, ) -> VideoResult<Self>108     fn new<E: Encoder<Session = T>>(
109         id: u32,
110         src_resource_type: ResourceType,
111         dst_resource_type: ResourceType,
112         desired_format: Format,
113         encoder: &EncoderDevice<E>,
114     ) -> VideoResult<Self> {
115         const MIN_BUFFERS: u32 = 1;
116         const MAX_BUFFERS: u32 = 342;
117         const DEFAULT_WIDTH: u32 = 640;
118         const DEFAULT_HEIGHT: u32 = 480;
119         const DEFAULT_BITRATE_TARGET: u32 = 6000;
120         const DEFAULT_BITRATE_PEAK: u32 = DEFAULT_BITRATE_TARGET * 2;
121         const DEFAULT_BITRATE: Bitrate = Bitrate::Vbr {
122             target: DEFAULT_BITRATE_TARGET,
123             peak: DEFAULT_BITRATE_PEAK,
124         };
125         const DEFAULT_BUFFER_SIZE: u32 = 2097152; // 2MB; chosen empirically for 1080p video
126         const DEFAULT_FPS: u32 = 30;
127 
128         let mut src_params = Params {
129             frame_rate: DEFAULT_FPS,
130             min_buffers: MIN_BUFFERS,
131             max_buffers: MAX_BUFFERS,
132             resource_type: src_resource_type,
133             ..Default::default()
134         };
135 
136         let cros_capabilities = &encoder.cros_capabilities;
137 
138         cros_capabilities
139             .populate_src_params(
140                 &mut src_params,
141                 Format::NV12,
142                 DEFAULT_WIDTH,
143                 DEFAULT_HEIGHT,
144                 0,
145             )
146             .map_err(|_| VideoError::InvalidArgument)?;
147 
148         let mut dst_params = Params {
149             resource_type: dst_resource_type,
150             frame_rate: DEFAULT_FPS,
151             frame_width: DEFAULT_WIDTH,
152             frame_height: DEFAULT_HEIGHT,
153             ..Default::default()
154         };
155 
156         // In order to support requesting encoder params change, we must know the default frame
157         // rate, because VEA's request_encoding_params_change requires both framerate and
158         // bitrate to be specified.
159         cros_capabilities
160             .populate_dst_params(&mut dst_params, desired_format, DEFAULT_BUFFER_SIZE)
161             .map_err(|_| VideoError::InvalidArgument)?;
162         // `format` is an Option since for the decoder, it is not populated until decoding has
163         // started. for encoder, format should always be populated.
164         let dest_format = dst_params.format.ok_or(VideoError::InvalidArgument)?;
165 
166         let dst_profile = cros_capabilities
167             .get_default_profile(&dest_format)
168             .ok_or(VideoError::InvalidArgument)?;
169 
170         let dst_h264_level = if dest_format == Format::H264 {
171             Some(Level::H264_1_0)
172         } else {
173             None
174         };
175 
176         Ok(Self {
177             id,
178             src_params,
179             dst_params,
180             dst_bitrate: DEFAULT_BITRATE,
181             dst_profile,
182             dst_h264_level,
183             force_keyframe: false,
184             encoder_session: None,
185             received_input_buffers_event: false,
186             src_resources: Default::default(),
187             encoder_input_buffer_ids: Default::default(),
188             dst_resources: Default::default(),
189             encoder_output_buffer_ids: Default::default(),
190             pending_commands: Default::default(),
191             eos_manager: EosBufferManager::new(id),
192         })
193     }
194 
has_encode_session(&self) -> bool195     fn has_encode_session(&self) -> bool {
196         self.encoder_session.is_some()
197     }
198 
set_encode_session<U: Encoder<Session = T>>( &mut self, encoder: &mut U, wait_ctx: &WaitContext<Token>, ) -> VideoResult<()>199     fn set_encode_session<U: Encoder<Session = T>>(
200         &mut self,
201         encoder: &mut U,
202         wait_ctx: &WaitContext<Token>,
203     ) -> VideoResult<()> {
204         if self.encoder_session.is_some() {
205             error!(
206                 "stream {}: tried to add encode session when one already exists.",
207                 self.id
208             );
209             return Err(VideoError::InvalidOperation);
210         }
211 
212         let new_session = encoder
213             .start_session(SessionConfig {
214                 src_params: self.src_params.clone(),
215                 dst_params: self.dst_params.clone(),
216                 dst_profile: self.dst_profile,
217                 dst_bitrate: self.dst_bitrate,
218                 dst_h264_level: self.dst_h264_level,
219                 frame_rate: self.dst_params.frame_rate,
220             })
221             .map_err(|_| VideoError::InvalidOperation)?;
222 
223         let event_pipe = new_session.event_pipe();
224 
225         wait_ctx
226             .add(event_pipe, Token::Event { id: self.id })
227             .map_err(|e| {
228                 error!(
229                     "stream {}: failed to add FD to poll context: {}",
230                     self.id, e
231                 );
232                 VideoError::InvalidOperation
233             })?;
234         self.encoder_session.replace(new_session);
235         self.received_input_buffers_event = false;
236         Ok(())
237     }
238 
clear_encode_session(&mut self, wait_ctx: &WaitContext<Token>) -> VideoResult<()>239     fn clear_encode_session(&mut self, wait_ctx: &WaitContext<Token>) -> VideoResult<()> {
240         if let Some(session) = self.encoder_session.take() {
241             let event_pipe = session.event_pipe();
242             wait_ctx.delete(event_pipe).map_err(|e| {
243                 error!(
244                     "stream: {}: failed to remove fd from poll context: {}",
245                     self.id, e
246                 );
247                 VideoError::InvalidOperation
248             })?;
249         }
250         Ok(())
251     }
252 
require_input_buffers( &mut self, input_count: u32, input_frame_width: u32, input_frame_height: u32, output_buffer_size: u32, ) -> Option<Vec<VideoEvtResponseType>>253     fn require_input_buffers(
254         &mut self,
255         input_count: u32,
256         input_frame_width: u32,
257         input_frame_height: u32,
258         output_buffer_size: u32,
259     ) -> Option<Vec<VideoEvtResponseType>> {
260         // TODO(alexlau): Does this always arrive after start_session,
261         // but before the first encode call?
262         // TODO(alexlau): set plane info from input_frame_width and input_frame_height
263         self.src_params.min_buffers = input_count;
264         self.src_params.max_buffers = 32;
265         self.src_params.frame_width = input_frame_width;
266         self.src_params.frame_height = input_frame_height;
267         self.dst_params.plane_formats[0].plane_size = output_buffer_size;
268         self.received_input_buffers_event = true;
269 
270         let mut responses = vec![];
271 
272         // Respond to any GetParams commands that were waiting.
273         let pending_get_src_params = if self
274             .pending_commands
275             .remove(&PendingCommand::GetSrcParams { is_ext: false })
276         {
277             Some(false)
278         } else if self
279             .pending_commands
280             .remove(&PendingCommand::GetSrcParams { is_ext: true })
281         {
282             Some(true)
283         } else {
284             None
285         };
286         if let Some(is_ext) = pending_get_src_params {
287             responses.push(VideoEvtResponseType::AsyncCmd(
288                 AsyncCmdResponse::from_response(
289                     AsyncCmdTag::GetParams {
290                         stream_id: self.id,
291                         queue_type: QueueType::Input,
292                     },
293                     CmdResponse::GetParams {
294                         queue_type: QueueType::Input,
295                         params: self.src_params.clone(),
296                         is_ext,
297                     },
298                 ),
299             ));
300         }
301         let pending_get_dst_params = if self
302             .pending_commands
303             .remove(&PendingCommand::GetDstParams { is_ext: false })
304         {
305             Some(false)
306         } else if self
307             .pending_commands
308             .remove(&PendingCommand::GetDstParams { is_ext: true })
309         {
310             Some(true)
311         } else {
312             None
313         };
314         if let Some(is_ext) = pending_get_dst_params {
315             responses.push(VideoEvtResponseType::AsyncCmd(
316                 AsyncCmdResponse::from_response(
317                     AsyncCmdTag::GetParams {
318                         stream_id: self.id,
319                         queue_type: QueueType::Output,
320                     },
321                     CmdResponse::GetParams {
322                         queue_type: QueueType::Output,
323                         params: self.dst_params.clone(),
324                         is_ext,
325                     },
326                 ),
327             ));
328         }
329 
330         if responses.len() > 0 {
331             Some(responses)
332         } else {
333             None
334         }
335     }
336 
processed_input_buffer( &mut self, input_buffer_id: InputBufferId, ) -> Option<Vec<VideoEvtResponseType>>337     fn processed_input_buffer(
338         &mut self,
339         input_buffer_id: InputBufferId,
340     ) -> Option<Vec<VideoEvtResponseType>> {
341         let resource_id = *match self.encoder_input_buffer_ids.get(&input_buffer_id) {
342             Some(id) => id,
343             None => {
344                 warn!("Received processed input buffer event for input buffer id {}, but missing resource, ResourceDestroyAll?", input_buffer_id);
345                 return None;
346             }
347         };
348 
349         let resource = match self.src_resources.get_mut(&resource_id) {
350             Some(r) => r,
351             None => {
352                 error!(
353                     "Received processed input buffer event but missing resource with id {}",
354                     resource_id
355                 );
356                 return None;
357             }
358         };
359 
360         let queue_params = match resource.queue_params.take() {
361             Some(p) => p,
362             None => {
363                 error!(
364                     "Received processed input buffer event but resource with id {} was not queued.",
365                     resource_id
366                 );
367                 return None;
368             }
369         };
370 
371         if !queue_params.in_queue {
372             // A QueueClear command occurred after this buffer was queued.
373             return None;
374         }
375 
376         let tag = AsyncCmdTag::Queue {
377             stream_id: self.id,
378             queue_type: QueueType::Input,
379             resource_id,
380         };
381 
382         let resp = CmdResponse::ResourceQueue {
383             timestamp: queue_params.timestamp,
384             flags: 0,
385             size: 0,
386         };
387 
388         Some(vec![VideoEvtResponseType::AsyncCmd(
389             AsyncCmdResponse::from_response(tag, resp),
390         )])
391     }
392 
processed_output_buffer( &mut self, output_buffer_id: OutputBufferId, bytesused: u32, keyframe: bool, timestamp: u64, ) -> Option<Vec<VideoEvtResponseType>>393     fn processed_output_buffer(
394         &mut self,
395         output_buffer_id: OutputBufferId,
396         bytesused: u32,
397         keyframe: bool,
398         timestamp: u64,
399     ) -> Option<Vec<VideoEvtResponseType>> {
400         let resource_id = *match self.encoder_output_buffer_ids.get(&output_buffer_id) {
401             Some(id) => id,
402             None => {
403                 warn!("Received processed output buffer event for output buffer id {}, but missing resource, ResourceDestroyAll?", output_buffer_id);
404                 return None;
405             }
406         };
407 
408         let resource = match self.dst_resources.get_mut(&resource_id) {
409             Some(r) => r,
410             None => {
411                 error!(
412                     "Received processed output buffer event but missing resource with id {}",
413                     resource_id
414                 );
415                 return None;
416             }
417         };
418 
419         let queue_params = match resource.queue_params.take() {
420             Some(p) => p,
421             None => {
422                 error!("Received processed output buffer event but resource with id {} was not queued.", resource_id);
423                 return None;
424             }
425         };
426 
427         if !queue_params.in_queue {
428             // A QueueClear command occurred after this buffer was queued.
429             return None;
430         }
431 
432         let tag = AsyncCmdTag::Queue {
433             stream_id: self.id,
434             queue_type: QueueType::Output,
435             resource_id,
436         };
437 
438         let resp = CmdResponse::ResourceQueue {
439             timestamp,
440             // At the moment, a buffer is saved in `eos_notification_buffer`, and
441             // the EOS flag is populated and returned after a flush() command.
442             // TODO(b/149725148): Populate flags once libvda supports it.
443             flags: if keyframe {
444                 protocol::VIRTIO_VIDEO_BUFFER_FLAG_IFRAME
445             } else {
446                 0
447             },
448             size: bytesused,
449         };
450 
451         Some(vec![VideoEvtResponseType::AsyncCmd(
452             AsyncCmdResponse::from_response(tag, resp),
453         )])
454     }
455 
flush_response(&mut self, flush_done: bool) -> Option<Vec<VideoEvtResponseType>>456     fn flush_response(&mut self, flush_done: bool) -> Option<Vec<VideoEvtResponseType>> {
457         let command_response = if flush_done {
458             CmdResponse::NoData
459         } else {
460             error!("Flush could not be completed for stream {}", self.id);
461             VideoError::InvalidOperation.into()
462         };
463 
464         let mut async_responses = vec![];
465 
466         // First gather the responses for all completed commands.
467         if self.pending_commands.remove(&PendingCommand::Drain) {
468             async_responses.push(VideoEvtResponseType::AsyncCmd(
469                 AsyncCmdResponse::from_response(
470                     AsyncCmdTag::Drain { stream_id: self.id },
471                     command_response.clone(),
472                 ),
473             ));
474         }
475 
476         if self.pending_commands.remove(&PendingCommand::SrcQueueClear) {
477             async_responses.push(VideoEvtResponseType::AsyncCmd(
478                 AsyncCmdResponse::from_response(
479                     AsyncCmdTag::Clear {
480                         stream_id: self.id,
481                         queue_type: QueueType::Input,
482                     },
483                     command_response.clone(),
484                 ),
485             ));
486         }
487 
488         if self.pending_commands.remove(&PendingCommand::DstQueueClear) {
489             async_responses.push(VideoEvtResponseType::AsyncCmd(
490                 AsyncCmdResponse::from_response(
491                     AsyncCmdTag::Clear {
492                         stream_id: self.id,
493                         queue_type: QueueType::Output,
494                     },
495                     command_response,
496                 ),
497             ));
498         }
499 
500         // Then add the EOS buffer to the responses if it is available.
501         self.eos_manager.try_complete_eos(async_responses)
502     }
503 
504     #[allow(clippy::unnecessary_wraps)]
notify_error(&self, error: VideoError) -> Option<Vec<VideoEvtResponseType>>505     fn notify_error(&self, error: VideoError) -> Option<Vec<VideoEvtResponseType>> {
506         error!(
507             "Received encoder error event for stream {}: {}",
508             self.id, error
509         );
510         Some(vec![VideoEvtResponseType::Event(VideoEvt {
511             typ: EvtType::Error,
512             stream_id: self.id,
513         })])
514     }
515 }
516 
517 pub struct EncoderDevice<T: Encoder> {
518     cros_capabilities: encoder::EncoderCapabilities,
519     encoder: T,
520     streams: BTreeMap<u32, Stream<T::Session>>,
521     resource_bridge: Tube,
522     mem: GuestMemory,
523 }
524 
525 impl<T: Encoder> EncoderDevice<T> {
526     /// Build a new encoder using the provided `backend`.
new(backend: T, resource_bridge: Tube, mem: GuestMemory) -> VideoResult<Self>527     pub fn new(backend: T, resource_bridge: Tube, mem: GuestMemory) -> VideoResult<Self> {
528         Ok(Self {
529             cros_capabilities: backend.query_capabilities()?,
530             encoder: backend,
531             streams: Default::default(),
532             resource_bridge,
533             mem,
534         })
535     }
536 
537     #[allow(clippy::unnecessary_wraps)]
query_capabilities(&self, queue_type: QueueType) -> VideoResult<VideoCmdResponseType>538     fn query_capabilities(&self, queue_type: QueueType) -> VideoResult<VideoCmdResponseType> {
539         let descs = match queue_type {
540             QueueType::Input => self.cros_capabilities.input_format_descs.clone(),
541             QueueType::Output => self.cros_capabilities.output_format_descs.clone(),
542         };
543         Ok(VideoCmdResponseType::Sync(CmdResponse::QueryCapability(
544             descs,
545         )))
546     }
547 
stream_create( &mut self, stream_id: u32, desired_format: Format, src_resource_type: ResourceType, dst_resource_type: ResourceType, ) -> VideoResult<VideoCmdResponseType>548     fn stream_create(
549         &mut self,
550         stream_id: u32,
551         desired_format: Format,
552         src_resource_type: ResourceType,
553         dst_resource_type: ResourceType,
554     ) -> VideoResult<VideoCmdResponseType> {
555         if self.streams.contains_key(&stream_id) {
556             return Err(VideoError::InvalidStreamId(stream_id));
557         }
558         let new_stream = Stream::new(
559             stream_id,
560             src_resource_type,
561             dst_resource_type,
562             desired_format,
563             self,
564         )?;
565 
566         self.streams.insert(stream_id, new_stream);
567         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
568     }
569 
stream_destroy(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>570     fn stream_destroy(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
571         let mut stream = self
572             .streams
573             .remove(&stream_id)
574             .ok_or(VideoError::InvalidStreamId(stream_id))?;
575         // TODO(alexlau): Handle resources that have been queued.
576         if let Some(session) = stream.encoder_session.take() {
577             if let Err(e) = self.encoder.stop_session(session) {
578                 error!("Failed to stop encode session {}: {}", stream_id, e);
579             }
580         }
581         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
582     }
583 
stream_drain(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>584     fn stream_drain(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
585         let stream = self
586             .streams
587             .get_mut(&stream_id)
588             .ok_or(VideoError::InvalidStreamId(stream_id))?;
589         match stream.encoder_session {
590             Some(ref mut session) => {
591                 if stream.pending_commands.contains(&PendingCommand::Drain) {
592                     error!("A pending Drain command already exists.");
593                     return Err(VideoError::InvalidOperation);
594                 }
595                 stream.pending_commands.insert(PendingCommand::Drain);
596 
597                 if !stream
598                     .pending_commands
599                     .contains(&PendingCommand::SrcQueueClear)
600                     && !stream
601                         .pending_commands
602                         .contains(&PendingCommand::DstQueueClear)
603                 {
604                     // If a source or dest QueueClear is underway, a flush has
605                     // already been sent.
606                     if let Err(e) = session.flush() {
607                         error!("Flush failed for stream id {}: {}", stream_id, e);
608                     }
609                 }
610                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Drain {
611                     stream_id,
612                 }))
613             }
614             None => {
615                 // Return an OK response since nothing has been queued yet.
616                 Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
617             }
618         }
619     }
620 
resource_create( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, queue_type: QueueType, resource_id: u32, plane_offsets: Vec<u32>, plane_entries: Vec<Vec<UnresolvedResourceEntry>>, ) -> VideoResult<VideoCmdResponseType>621     fn resource_create(
622         &mut self,
623         wait_ctx: &WaitContext<Token>,
624         stream_id: u32,
625         queue_type: QueueType,
626         resource_id: u32,
627         plane_offsets: Vec<u32>,
628         plane_entries: Vec<Vec<UnresolvedResourceEntry>>,
629     ) -> VideoResult<VideoCmdResponseType> {
630         let stream = self
631             .streams
632             .get_mut(&stream_id)
633             .ok_or(VideoError::InvalidStreamId(stream_id))?;
634 
635         if !stream.has_encode_session() {
636             // No encode session would have been created upon the first
637             // QBUF if there was no previous S_FMT call.
638             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
639         }
640 
641         let num_planes = plane_offsets.len();
642 
643         // We only support single-buffer resources for now.
644         let entries = if plane_entries.len() != 1 {
645             return Err(VideoError::InvalidArgument);
646         } else {
647             // unwrap() is safe because we just tested that `plane_entries` had exactly one element.
648             plane_entries.get(0).unwrap()
649         };
650 
651         match queue_type {
652             QueueType::Input => {
653                 // We currently only support single-buffer formats, but some clients may mistake
654                 // color planes with memory planes and submit several planes to us. This doesn't
655                 // matter as we will only consider the first one.
656                 if num_planes < 1 {
657                     return Err(VideoError::InvalidParameter);
658                 }
659 
660                 if stream.src_resources.contains_key(&resource_id) {
661                     debug!("Replacing source resource with id {}", resource_id);
662                 }
663 
664                 let resource = match stream.src_params.resource_type {
665                     ResourceType::VirtioObject => {
666                         // Virtio object resources only have one entry.
667                         if entries.len() != 1 {
668                             return Err(VideoError::InvalidArgument);
669                         }
670                         GuestResource::from_virtio_object_entry(
671                             // Safe because we confirmed the correct type for the resource.
672                             // unwrap() is also safe here because we just tested above that `entries` had
673                             // exactly one element.
674                             unsafe { entries.get(0).unwrap().object },
675                             &self.resource_bridge,
676                             &stream.src_params,
677                         )
678                         .map_err(|_| VideoError::InvalidArgument)?
679                     }
680                     ResourceType::GuestPages => GuestResource::from_virtio_guest_mem_entry(
681                         // Safe because we confirmed the correct type for the resource.
682                         unsafe {
683                             std::slice::from_raw_parts(
684                                 entries.as_ptr() as *const protocol::virtio_video_mem_entry,
685                                 entries.len(),
686                             )
687                         },
688                         &self.mem,
689                         &stream.src_params,
690                     )
691                     .map_err(|_| VideoError::InvalidArgument)?,
692                 };
693 
694                 stream.src_resources.insert(
695                     resource_id,
696                     InputResource {
697                         resource,
698                         queue_params: None,
699                     },
700                 );
701             }
702             QueueType::Output => {
703                 // Bitstream buffers always have only one plane.
704                 if num_planes != 1 {
705                     return Err(VideoError::InvalidParameter);
706                 }
707 
708                 if stream.dst_resources.contains_key(&resource_id) {
709                     debug!("Replacing dest resource with id {}", resource_id);
710                 }
711 
712                 let resource = match stream.dst_params.resource_type {
713                     ResourceType::VirtioObject => {
714                         // Virtio object resources only have one entry.
715                         if entries.len() != 1 {
716                             return Err(VideoError::InvalidArgument);
717                         }
718                         GuestResource::from_virtio_object_entry(
719                             // Safe because we confirmed the correct type for the resource.
720                             // unwrap() is also safe here because we just tested above that `entries` had
721                             // exactly one element.
722                             unsafe { entries.get(0).unwrap().object },
723                             &self.resource_bridge,
724                             &stream.dst_params,
725                         )
726                         .map_err(|_| VideoError::InvalidArgument)?
727                     }
728                     ResourceType::GuestPages => GuestResource::from_virtio_guest_mem_entry(
729                         // Safe because we confirmed the correct type for the resource.
730                         unsafe {
731                             std::slice::from_raw_parts(
732                                 entries.as_ptr() as *const protocol::virtio_video_mem_entry,
733                                 entries.len(),
734                             )
735                         },
736                         &self.mem,
737                         &stream.dst_params,
738                     )
739                     .map_err(|_| VideoError::InvalidArgument)?,
740                 };
741 
742                 let offset = plane_offsets[0];
743                 stream.dst_resources.insert(
744                     resource_id,
745                     OutputResource {
746                         resource,
747                         offset,
748                         queue_params: None,
749                     },
750                 );
751             }
752         }
753 
754         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
755     }
756 
resource_queue( &mut self, stream_id: u32, queue_type: QueueType, resource_id: u32, timestamp: u64, data_sizes: Vec<u32>, ) -> VideoResult<VideoCmdResponseType>757     fn resource_queue(
758         &mut self,
759         stream_id: u32,
760         queue_type: QueueType,
761         resource_id: u32,
762         timestamp: u64,
763         data_sizes: Vec<u32>,
764     ) -> VideoResult<VideoCmdResponseType> {
765         let stream = self
766             .streams
767             .get_mut(&stream_id)
768             .ok_or(VideoError::InvalidStreamId(stream_id))?;
769 
770         let encoder_session = match stream.encoder_session {
771             Some(ref mut e) => e,
772             None => {
773                 // The encoder session is created on the first ResourceCreate,
774                 // so it should exist here.
775                 error!("Encoder session did not exist at resource_queue.");
776                 return Err(VideoError::InvalidOperation);
777             }
778         };
779 
780         match queue_type {
781             QueueType::Input => {
782                 // We currently only support single-buffer formats, but some clients may mistake
783                 // color planes with memory planes and submit several planes to us. This doesn't
784                 // matter as we will only consider the first one.
785                 if data_sizes.len() < 1 {
786                     return Err(VideoError::InvalidParameter);
787                 }
788 
789                 let src_resource = stream.src_resources.get_mut(&resource_id).ok_or(
790                     VideoError::InvalidResourceId {
791                         stream_id,
792                         resource_id,
793                     },
794                 )?;
795 
796                 let force_keyframe = std::mem::replace(&mut stream.force_keyframe, false);
797 
798                 match encoder_session.encode(
799                     src_resource
800                         .resource
801                         .try_clone()
802                         .map_err(|_| VideoError::InvalidArgument)?,
803                     timestamp,
804                     force_keyframe,
805                 ) {
806                     Ok(input_buffer_id) => {
807                         if let Some(last_resource_id) = stream
808                             .encoder_input_buffer_ids
809                             .insert(input_buffer_id, resource_id)
810                         {
811                             error!(
812                                 "encoder input id {} was already mapped to resource id {}",
813                                 input_buffer_id, last_resource_id
814                             );
815                             return Err(VideoError::InvalidOperation);
816                         }
817                         let queue_params = QueuedInputResourceParams {
818                             timestamp,
819                             in_queue: true,
820                         };
821                         if let Some(last_queue_params) =
822                             src_resource.queue_params.replace(queue_params)
823                         {
824                             if last_queue_params.in_queue {
825                                 error!(
826                                     "resource {} was already queued ({:?})",
827                                     resource_id, last_queue_params
828                                 );
829                                 return Err(VideoError::InvalidOperation);
830                             }
831                         }
832                     }
833                     Err(e) => {
834                         // TODO(alexlau): Return the actual error
835                         error!("encode failed: {}", e);
836                         return Err(VideoError::InvalidOperation);
837                     }
838                 }
839                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
840                     stream_id,
841                     queue_type: QueueType::Input,
842                     resource_id,
843                 }))
844             }
845             QueueType::Output => {
846                 // Bitstream buffers always have only one plane.
847                 if data_sizes.len() != 1 {
848                     return Err(VideoError::InvalidParameter);
849                 }
850 
851                 let dst_resource = stream.dst_resources.get_mut(&resource_id).ok_or(
852                     VideoError::InvalidResourceId {
853                         stream_id,
854                         resource_id,
855                     },
856                 )?;
857 
858                 // data_sizes is always 0 for output buffers. We should fetch them from the
859                 // negotiated parameters, although right now the VirtioObject backend uses the
860                 // buffer's metadata instead.
861                 let buffer_size = dst_resource.resource.planes[0].size as u32;
862 
863                 // Stores an output buffer to notify EOS.
864                 // This is necessary because libvda is unable to indicate EOS along with returned buffers.
865                 // For now, when a `Flush()` completes, this saved resource will be returned as a zero-sized
866                 // buffer with the EOS flag.
867                 if stream.eos_manager.try_reserve_eos_buffer(resource_id) {
868                     return Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
869                         stream_id,
870                         queue_type: QueueType::Output,
871                         resource_id,
872                     }));
873                 }
874 
875                 match encoder_session.use_output_buffer(
876                     dst_resource
877                         .resource
878                         .handle
879                         .try_clone()
880                         .map_err(|_| VideoError::InvalidParameter)?,
881                     dst_resource.offset,
882                     buffer_size,
883                 ) {
884                     Ok(output_buffer_id) => {
885                         if let Some(last_resource_id) = stream
886                             .encoder_output_buffer_ids
887                             .insert(output_buffer_id, resource_id)
888                         {
889                             error!(
890                                 "encoder output id {} was already mapped to resource id {}",
891                                 output_buffer_id, last_resource_id
892                             );
893                         }
894                         let queue_params = QueuedOutputResourceParams { in_queue: true };
895                         if let Some(last_queue_params) =
896                             dst_resource.queue_params.replace(queue_params)
897                         {
898                             if last_queue_params.in_queue {
899                                 error!(
900                                     "resource {} was already queued ({:?})",
901                                     resource_id, last_queue_params
902                                 );
903                             }
904                         }
905                     }
906                     Err(e) => {
907                         error!("use_output_buffer failed: {}", e);
908                         return Err(VideoError::InvalidOperation);
909                     }
910                 }
911                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
912                     stream_id,
913                     queue_type: QueueType::Output,
914                     resource_id,
915                 }))
916             }
917         }
918     }
919 
resource_destroy_all(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>920     fn resource_destroy_all(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
921         let stream = self
922             .streams
923             .get_mut(&stream_id)
924             .ok_or(VideoError::InvalidStreamId(stream_id))?;
925         stream.src_resources.clear();
926         stream.encoder_input_buffer_ids.clear();
927         stream.dst_resources.clear();
928         stream.encoder_output_buffer_ids.clear();
929         stream.eos_manager.reset();
930         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
931     }
932 
queue_clear( &mut self, stream_id: u32, queue_type: QueueType, ) -> VideoResult<VideoCmdResponseType>933     fn queue_clear(
934         &mut self,
935         stream_id: u32,
936         queue_type: QueueType,
937     ) -> VideoResult<VideoCmdResponseType> {
938         // Unfortunately, there is no way to clear the queue with VEA.
939         // VDA has Reset() which also isn't done on a per-queue basis,
940         // but VEA has no such API.
941         // Doing a Flush() here and waiting for the flush response is also
942         // not an option, because the virtio-video driver expects a prompt
943         // response (search for "timed out waiting for queue clear" in
944         // virtio_video_enc.c).
945         // So for now, we do a Flush(), but also mark each currently
946         // queued resource as no longer `in_queue`, and skip them when they
947         // are returned.
948         // TODO(b/153406792): Support per-queue clearing.
949         let stream = self
950             .streams
951             .get_mut(&stream_id)
952             .ok_or(VideoError::InvalidStreamId(stream_id))?;
953 
954         match queue_type {
955             QueueType::Input => {
956                 for src_resource in stream.src_resources.values_mut() {
957                     if let Some(ref mut queue_params) = src_resource.queue_params {
958                         queue_params.in_queue = false;
959                     }
960                 }
961             }
962             QueueType::Output => {
963                 for dst_resource in stream.dst_resources.values_mut() {
964                     if let Some(ref mut queue_params) = dst_resource.queue_params {
965                         queue_params.in_queue = false;
966                     }
967                 }
968                 stream.eos_manager.reset();
969             }
970         }
971         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
972     }
973 
get_params( &mut self, stream_id: u32, queue_type: QueueType, is_ext: bool, ) -> VideoResult<VideoCmdResponseType>974     fn get_params(
975         &mut self,
976         stream_id: u32,
977         queue_type: QueueType,
978         is_ext: bool,
979     ) -> VideoResult<VideoCmdResponseType> {
980         let stream = self
981             .streams
982             .get_mut(&stream_id)
983             .ok_or(VideoError::InvalidStreamId(stream_id))?;
984 
985         if stream.encoder_session.is_some() && !stream.received_input_buffers_event {
986             // If we haven't yet received an RequireInputBuffers
987             // event, we need to wait for that before replying so that
988             // the G_FMT response has the correct data.
989             let pending_command = match queue_type {
990                 QueueType::Input => PendingCommand::GetSrcParams { is_ext },
991                 QueueType::Output => PendingCommand::GetDstParams { is_ext },
992             };
993 
994             if !stream.pending_commands.insert(pending_command) {
995                 // There is already a G_FMT call waiting.
996                 error!("Pending get params call already exists.");
997                 return Err(VideoError::InvalidOperation);
998             }
999 
1000             Ok(VideoCmdResponseType::Async(AsyncCmdTag::GetParams {
1001                 stream_id,
1002                 queue_type,
1003             }))
1004         } else {
1005             let params = match queue_type {
1006                 QueueType::Input => stream.src_params.clone(),
1007                 QueueType::Output => stream.dst_params.clone(),
1008             };
1009             Ok(VideoCmdResponseType::Sync(CmdResponse::GetParams {
1010                 queue_type,
1011                 params,
1012                 is_ext,
1013             }))
1014         }
1015     }
1016 
set_params( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, queue_type: QueueType, format: Option<Format>, frame_width: u32, frame_height: u32, frame_rate: u32, plane_formats: Vec<PlaneFormat>, resource_type: Option<ResourceType>, ) -> VideoResult<VideoCmdResponseType>1017     fn set_params(
1018         &mut self,
1019         wait_ctx: &WaitContext<Token>,
1020         stream_id: u32,
1021         queue_type: QueueType,
1022         format: Option<Format>,
1023         frame_width: u32,
1024         frame_height: u32,
1025         frame_rate: u32,
1026         plane_formats: Vec<PlaneFormat>,
1027         resource_type: Option<ResourceType>,
1028     ) -> VideoResult<VideoCmdResponseType> {
1029         let stream = self
1030             .streams
1031             .get_mut(&stream_id)
1032             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1033 
1034         let mut create_session = stream.encoder_session.is_none();
1035         // TODO(ishitatsuyuki): We should additionally check that no resources are *attached* while
1036         //                      a params is being set.
1037         let src_resources_queued = stream.src_resources.len() > 0;
1038         let dst_resources_queued = stream.dst_resources.len() > 0;
1039 
1040         // Dynamic framerate changes are allowed. The framerate can be set on either the input or
1041         // output queue. Changing the framerate can influence the selected H.264 level, as the
1042         // level might be adjusted to conform to the minimum requirements for the selected bitrate
1043         // and framerate. As dynamic level changes are not supported we will just recreate the
1044         // encoder session as long as no resources have been queued yet. If an encoder session is
1045         // active we will request a dynamic framerate change instead, and it's up to the encoder
1046         // backend to return an error on invalid requests.
1047         if stream.dst_params.frame_rate != frame_rate {
1048             stream.src_params.frame_rate = frame_rate;
1049             stream.dst_params.frame_rate = frame_rate;
1050             if let Some(ref mut encoder_session) = stream.encoder_session {
1051                 if !(src_resources_queued || dst_resources_queued) {
1052                     create_session = true;
1053                 } else if let Err(e) = encoder_session.request_encoding_params_change(
1054                     stream.dst_bitrate,
1055                     stream.dst_params.frame_rate,
1056                 ) {
1057                     error!("failed to dynamically request framerate change: {}", e);
1058                     return Err(VideoError::InvalidOperation);
1059                 }
1060             }
1061         }
1062 
1063         match queue_type {
1064             QueueType::Input => {
1065                 if stream.src_params.frame_width != frame_width
1066                     || stream.src_params.frame_height != frame_height
1067                     || stream.src_params.format != format
1068                     || stream.src_params.plane_formats != plane_formats
1069                     || resource_type
1070                         .map(|resource_type| stream.src_params.resource_type != resource_type)
1071                         .unwrap_or(false)
1072                 {
1073                     if src_resources_queued {
1074                         // Buffers have already been queued and encoding has already started.
1075                         return Err(VideoError::InvalidOperation);
1076                     }
1077 
1078                     let desired_format =
1079                         format.or(stream.src_params.format).unwrap_or(Format::NV12);
1080                     self.cros_capabilities.populate_src_params(
1081                         &mut stream.src_params,
1082                         desired_format,
1083                         frame_width,
1084                         frame_height,
1085                         plane_formats.get(0).map(|fmt| fmt.stride).unwrap_or(0),
1086                     )?;
1087 
1088                     stream.dst_params.frame_width = frame_width;
1089                     stream.dst_params.frame_height = frame_height;
1090 
1091                     if let Some(resource_type) = resource_type {
1092                         stream.src_params.resource_type = resource_type;
1093                     }
1094 
1095                     create_session = true
1096                 }
1097             }
1098             QueueType::Output => {
1099                 if stream.dst_params.format != format
1100                     || stream.dst_params.plane_formats != plane_formats
1101                     || resource_type
1102                         .map(|resource_type| stream.dst_params.resource_type != resource_type)
1103                         .unwrap_or(false)
1104                 {
1105                     if dst_resources_queued {
1106                         // Buffers have already been queued and encoding has already started.
1107                         return Err(VideoError::InvalidOperation);
1108                     }
1109 
1110                     let desired_format =
1111                         format.or(stream.dst_params.format).unwrap_or(Format::H264);
1112 
1113                     // There should be exactly one output buffer.
1114                     if plane_formats.len() != 1 {
1115                         return Err(VideoError::InvalidArgument);
1116                     }
1117 
1118                     self.cros_capabilities.populate_dst_params(
1119                         &mut stream.dst_params,
1120                         desired_format,
1121                         plane_formats[0].plane_size,
1122                     )?;
1123 
1124                     // Format is always populated for encoder.
1125                     let new_format = stream
1126                         .dst_params
1127                         .format
1128                         .ok_or(VideoError::InvalidArgument)?;
1129 
1130                     // If the selected profile no longer corresponds to the selected coded format,
1131                     // reset it.
1132                     stream.dst_profile = self
1133                         .cros_capabilities
1134                         .get_default_profile(&new_format)
1135                         .ok_or(VideoError::InvalidArgument)?;
1136 
1137                     if new_format == Format::H264 {
1138                         stream.dst_h264_level = Some(Level::H264_1_0);
1139                     } else {
1140                         stream.dst_h264_level = None;
1141                     }
1142 
1143                     if let Some(resource_type) = resource_type {
1144                         stream.dst_params.resource_type = resource_type;
1145                     }
1146 
1147                     create_session = true;
1148                 }
1149             }
1150         }
1151 
1152         if create_session {
1153             // An encoder session has to be created immediately upon a SetParams
1154             // (S_FMT) call, because we need to receive the RequireInputBuffers
1155             // callback which has output buffer size info, in order to populate
1156             // dst_params to have the correct size on subsequent GetParams (G_FMT) calls.
1157             if stream.encoder_session.is_some() {
1158                 stream.clear_encode_session(wait_ctx)?;
1159                 if !stream.received_input_buffers_event {
1160                     // This could happen if two SetParams calls are occuring at the same time.
1161                     // For example, the user calls SetParams for the input queue on one thread,
1162                     // and a new encode session is created. Then on another thread, SetParams
1163                     // is called for the output queue before the first SetParams call has returned.
1164                     // At this point, there is a new EncodeSession being created that has not
1165                     // yet received a RequireInputBuffers event.
1166                     // Even if we clear the encoder session and recreate it, this case
1167                     // is handled because stream.pending_commands will still contain
1168                     // the waiting GetParams responses, which will then receive fresh data once
1169                     // the new session's RequireInputBuffers event happens.
1170                     warn!(
1171                         "New encoder session being created while waiting for RequireInputBuffers."
1172                     )
1173                 }
1174             }
1175             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
1176         }
1177         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
1178     }
1179 
query_control(&self, query_ctrl_type: QueryCtrlType) -> VideoResult<VideoCmdResponseType>1180     fn query_control(&self, query_ctrl_type: QueryCtrlType) -> VideoResult<VideoCmdResponseType> {
1181         let query_ctrl_response = match query_ctrl_type {
1182             QueryCtrlType::Profile(format) => match self.cros_capabilities.get_profiles(&format) {
1183                 Some(profiles) => QueryCtrlResponse::Profile(profiles.clone()),
1184                 None => {
1185                     return Err(VideoError::UnsupportedControl(CtrlType::Profile));
1186                 }
1187             },
1188             QueryCtrlType::Level(format) => {
1189                 match format {
1190                     Format::H264 => QueryCtrlResponse::Level(vec![
1191                         Level::H264_1_0,
1192                         Level::H264_1_1,
1193                         Level::H264_1_2,
1194                         Level::H264_1_3,
1195                         Level::H264_2_0,
1196                         Level::H264_2_1,
1197                         Level::H264_2_2,
1198                         Level::H264_3_0,
1199                         Level::H264_3_1,
1200                         Level::H264_3_2,
1201                         Level::H264_4_0,
1202                         Level::H264_4_1,
1203                         Level::H264_4_2,
1204                         Level::H264_5_0,
1205                         Level::H264_5_1,
1206                     ]),
1207                     _ => {
1208                         // Levels are only supported for H264.
1209                         return Err(VideoError::UnsupportedControl(CtrlType::Level));
1210                     }
1211                 }
1212             }
1213         };
1214 
1215         Ok(VideoCmdResponseType::Sync(CmdResponse::QueryControl(
1216             query_ctrl_response,
1217         )))
1218     }
1219 
get_control( &self, stream_id: u32, ctrl_type: CtrlType, ) -> VideoResult<VideoCmdResponseType>1220     fn get_control(
1221         &self,
1222         stream_id: u32,
1223         ctrl_type: CtrlType,
1224     ) -> VideoResult<VideoCmdResponseType> {
1225         let stream = self
1226             .streams
1227             .get(&stream_id)
1228             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1229         let ctrl_val = match ctrl_type {
1230             CtrlType::BitrateMode => CtrlVal::BitrateMode(stream.dst_bitrate.mode()),
1231             CtrlType::Bitrate => CtrlVal::Bitrate(stream.dst_bitrate.target()),
1232             CtrlType::BitratePeak => CtrlVal::BitratePeak(match stream.dst_bitrate {
1233                 Bitrate::Vbr { peak, .. } => peak,
1234                 // For CBR there is no peak, so return the target (which is technically correct).
1235                 Bitrate::Cbr { target } => target,
1236             }),
1237             CtrlType::Profile => CtrlVal::Profile(stream.dst_profile),
1238             CtrlType::Level => {
1239                 let format = stream
1240                     .dst_params
1241                     .format
1242                     .ok_or(VideoError::InvalidArgument)?;
1243                 match format {
1244                     Format::H264 => CtrlVal::Level(stream.dst_h264_level.ok_or_else(|| {
1245                         error!("H264 level not set");
1246                         VideoError::InvalidArgument
1247                     })?),
1248                     _ => {
1249                         return Err(VideoError::UnsupportedControl(ctrl_type));
1250                     }
1251                 }
1252             }
1253             // Button controls should not be queried.
1254             CtrlType::ForceKeyframe => return Err(VideoError::UnsupportedControl(ctrl_type)),
1255             // Prepending SPS and PPS to IDR is always enabled in the libvda backend.
1256             // TODO (b/161495502): account for other backends
1257             CtrlType::PrependSpsPpsToIdr => CtrlVal::PrependSpsPpsToIdr(true),
1258         };
1259         Ok(VideoCmdResponseType::Sync(CmdResponse::GetControl(
1260             ctrl_val,
1261         )))
1262     }
1263 
set_control( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, ctrl_val: CtrlVal, ) -> VideoResult<VideoCmdResponseType>1264     fn set_control(
1265         &mut self,
1266         wait_ctx: &WaitContext<Token>,
1267         stream_id: u32,
1268         ctrl_val: CtrlVal,
1269     ) -> VideoResult<VideoCmdResponseType> {
1270         let stream = self
1271             .streams
1272             .get_mut(&stream_id)
1273             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1274         let mut recreate_session = false;
1275         let resources_queued = stream.src_resources.len() > 0 || stream.dst_resources.len() > 0;
1276 
1277         match ctrl_val {
1278             CtrlVal::BitrateMode(bitrate_mode) => {
1279                 if stream.dst_bitrate.mode() != bitrate_mode {
1280                     if resources_queued {
1281                         error!("set control called for bitrate mode but already encoding.");
1282                         return Err(VideoError::InvalidOperation);
1283                     }
1284                     stream.dst_bitrate = match bitrate_mode {
1285                         BitrateMode::Cbr => Bitrate::Cbr {
1286                             target: stream.dst_bitrate.target(),
1287                         },
1288                         BitrateMode::Vbr => Bitrate::Vbr {
1289                             target: stream.dst_bitrate.target(),
1290                             peak: stream.dst_bitrate.target(),
1291                         },
1292                     };
1293                     recreate_session = true;
1294                 }
1295             }
1296             CtrlVal::Bitrate(bitrate) => {
1297                 if stream.dst_bitrate.target() != bitrate {
1298                     let mut new_bitrate = stream.dst_bitrate;
1299                     match &mut new_bitrate {
1300                         Bitrate::Cbr { target } | Bitrate::Vbr { target, .. } => *target = bitrate,
1301                     }
1302                     if let Some(ref mut encoder_session) = stream.encoder_session {
1303                         if let Err(e) = encoder_session.request_encoding_params_change(
1304                             new_bitrate,
1305                             stream.dst_params.frame_rate,
1306                         ) {
1307                             error!("failed to dynamically request target bitrate change: {}", e);
1308                             return Err(VideoError::InvalidOperation);
1309                         }
1310                     }
1311                     stream.dst_bitrate = new_bitrate;
1312                 }
1313             }
1314             CtrlVal::BitratePeak(bitrate) => {
1315                 match stream.dst_bitrate {
1316                     Bitrate::Vbr { peak, .. } => {
1317                         if peak != bitrate {
1318                             let new_bitrate = Bitrate::Vbr {
1319                                 target: stream.dst_bitrate.target(),
1320                                 peak: bitrate,
1321                             };
1322                             if let Some(ref mut encoder_session) = stream.encoder_session {
1323                                 if let Err(e) = encoder_session.request_encoding_params_change(
1324                                     new_bitrate,
1325                                     stream.dst_params.frame_rate,
1326                                 ) {
1327                                     error!(
1328                                         "failed to dynamically request peak bitrate change: {}",
1329                                         e
1330                                     );
1331                                     return Err(VideoError::InvalidOperation);
1332                                 }
1333                             }
1334                             stream.dst_bitrate = new_bitrate;
1335                         }
1336                     }
1337                     // Trying to set the peak bitrate while in constant mode. This is not
1338                     // an error, just ignored.
1339                     Bitrate::Cbr { .. } => {}
1340                 }
1341             }
1342             CtrlVal::Profile(profile) => {
1343                 if stream.dst_profile != profile {
1344                     if resources_queued {
1345                         error!("set control called for profile but already encoding.");
1346                         return Err(VideoError::InvalidOperation);
1347                     }
1348                     let format = stream
1349                         .dst_params
1350                         .format
1351                         .ok_or(VideoError::InvalidArgument)?;
1352                     if format != profile.to_format() {
1353                         error!(
1354                             "specified profile does not correspond to the selected format ({})",
1355                             format
1356                         );
1357                         return Err(VideoError::InvalidOperation);
1358                     }
1359                     stream.dst_profile = profile;
1360                     recreate_session = true;
1361                 }
1362             }
1363             CtrlVal::Level(level) => {
1364                 if stream.dst_h264_level != Some(level) {
1365                     if resources_queued {
1366                         error!("set control called for level but already encoding.");
1367                         return Err(VideoError::InvalidOperation);
1368                     }
1369                     let format = stream
1370                         .dst_params
1371                         .format
1372                         .ok_or(VideoError::InvalidArgument)?;
1373                     if format != Format::H264 {
1374                         error!(
1375                             "set control called for level but format is not H264 ({})",
1376                             format
1377                         );
1378                         return Err(VideoError::InvalidOperation);
1379                     }
1380                     stream.dst_h264_level = Some(level);
1381                     recreate_session = true;
1382                 }
1383             }
1384             CtrlVal::ForceKeyframe => {
1385                 stream.force_keyframe = true;
1386             }
1387             CtrlVal::PrependSpsPpsToIdr(prepend_sps_pps_to_idr) => {
1388                 // Prepending SPS and PPS to IDR is always enabled in the libvda backend,
1389                 // disabling it will always fail.
1390                 // TODO (b/161495502): account for other backends
1391                 if !prepend_sps_pps_to_idr {
1392                     return Err(VideoError::InvalidOperation);
1393                 }
1394             }
1395         }
1396 
1397         // We can safely recreate the encoder session if no resources were queued yet.
1398         if recreate_session && stream.encoder_session.is_some() {
1399             stream.clear_encode_session(wait_ctx)?;
1400             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
1401         }
1402 
1403         Ok(VideoCmdResponseType::Sync(CmdResponse::SetControl))
1404     }
1405 }
1406 
1407 impl<T: Encoder> Device for EncoderDevice<T> {
process_cmd( &mut self, req: VideoCmd, wait_ctx: &WaitContext<Token>, ) -> ( VideoCmdResponseType, Option<(u32, Vec<VideoEvtResponseType>)>, )1408     fn process_cmd(
1409         &mut self,
1410         req: VideoCmd,
1411         wait_ctx: &WaitContext<Token>,
1412     ) -> (
1413         VideoCmdResponseType,
1414         Option<(u32, Vec<VideoEvtResponseType>)>,
1415     ) {
1416         let mut event_ret = None;
1417         let cmd_response = match req {
1418             VideoCmd::QueryCapability { queue_type } => self.query_capabilities(queue_type),
1419             VideoCmd::StreamCreate {
1420                 stream_id,
1421                 coded_format: desired_format,
1422                 input_resource_type,
1423                 output_resource_type,
1424             } => self.stream_create(
1425                 stream_id,
1426                 desired_format,
1427                 input_resource_type,
1428                 output_resource_type,
1429             ),
1430             VideoCmd::StreamDestroy { stream_id } => self.stream_destroy(stream_id),
1431             VideoCmd::StreamDrain { stream_id } => self.stream_drain(stream_id),
1432             VideoCmd::ResourceCreate {
1433                 stream_id,
1434                 queue_type,
1435                 resource_id,
1436                 plane_offsets,
1437                 plane_entries,
1438             } => self.resource_create(
1439                 wait_ctx,
1440                 stream_id,
1441                 queue_type,
1442                 resource_id,
1443                 plane_offsets,
1444                 plane_entries,
1445             ),
1446             VideoCmd::ResourceQueue {
1447                 stream_id,
1448                 queue_type,
1449                 resource_id,
1450                 timestamp,
1451                 data_sizes,
1452             } => {
1453                 let resp =
1454                     self.resource_queue(stream_id, queue_type, resource_id, timestamp, data_sizes);
1455 
1456                 if resp.is_ok() && queue_type == QueueType::Output {
1457                     if let Some(stream) = self.streams.get_mut(&stream_id) {
1458                         // If we have a flush pending, add the response for dequeueing the EOS
1459                         // buffer.
1460                         if stream.eos_manager.client_awaits_eos {
1461                             info!(
1462                                 "stream {}: using queued buffer as EOS for pending flush",
1463                                 stream_id
1464                             );
1465                             event_ret = match stream.eos_manager.try_complete_eos(vec![]) {
1466                                 Some(eos_resps) => Some((stream_id, eos_resps)),
1467                                 None => {
1468                                     error!("stream {}: try_get_eos_buffer() should have returned a valid response. This is a bug.", stream_id);
1469                                     Some((
1470                                         stream_id,
1471                                         vec![VideoEvtResponseType::Event(VideoEvt {
1472                                             typ: EvtType::Error,
1473                                             stream_id,
1474                                         })],
1475                                     ))
1476                                 }
1477                             };
1478                         }
1479                     } else {
1480                         error!(
1481                             "stream {}: the stream ID should be valid here. This is a bug.",
1482                             stream_id
1483                         );
1484                         event_ret = Some((
1485                             stream_id,
1486                             vec![VideoEvtResponseType::Event(VideoEvt {
1487                                 typ: EvtType::Error,
1488                                 stream_id,
1489                             })],
1490                         ));
1491                     }
1492                 }
1493 
1494                 resp
1495             }
1496             VideoCmd::ResourceDestroyAll { stream_id, .. } => self.resource_destroy_all(stream_id),
1497             VideoCmd::QueueClear {
1498                 stream_id,
1499                 queue_type,
1500             } => self.queue_clear(stream_id, queue_type),
1501             VideoCmd::GetParams {
1502                 stream_id,
1503                 queue_type,
1504                 is_ext,
1505             } => self.get_params(stream_id, queue_type, is_ext),
1506             VideoCmd::SetParams {
1507                 stream_id,
1508                 queue_type,
1509                 params:
1510                     Params {
1511                         format,
1512                         frame_width,
1513                         frame_height,
1514                         frame_rate,
1515                         plane_formats,
1516                         resource_type,
1517                         ..
1518                     },
1519                 is_ext,
1520             } => self.set_params(
1521                 wait_ctx,
1522                 stream_id,
1523                 queue_type,
1524                 format,
1525                 frame_width,
1526                 frame_height,
1527                 frame_rate,
1528                 plane_formats,
1529                 if is_ext { Some(resource_type) } else { None },
1530             ),
1531             VideoCmd::QueryControl { query_ctrl_type } => self.query_control(query_ctrl_type),
1532             VideoCmd::GetControl {
1533                 stream_id,
1534                 ctrl_type,
1535             } => self.get_control(stream_id, ctrl_type),
1536             VideoCmd::SetControl {
1537                 stream_id,
1538                 ctrl_val,
1539             } => self.set_control(wait_ctx, stream_id, ctrl_val),
1540         };
1541         let cmd_ret = match cmd_response {
1542             Ok(r) => r,
1543             Err(e) => {
1544                 error!("returning error response: {}", &e);
1545                 VideoCmdResponseType::Sync(e.into())
1546             }
1547         };
1548         (cmd_ret, event_ret)
1549     }
1550 
process_event( &mut self, _desc_map: &mut AsyncCmdDescMap, stream_id: u32, ) -> Option<Vec<VideoEvtResponseType>>1551     fn process_event(
1552         &mut self,
1553         _desc_map: &mut AsyncCmdDescMap,
1554         stream_id: u32,
1555     ) -> Option<Vec<VideoEvtResponseType>> {
1556         let stream = match self.streams.get_mut(&stream_id) {
1557             Some(s) => s,
1558             None => {
1559                 // TODO: remove fd from poll context?
1560                 error!("Received event for missing stream id {}", stream_id);
1561                 return None;
1562             }
1563         };
1564 
1565         let encoder_session = match stream.encoder_session {
1566             Some(ref mut s) => s,
1567             None => {
1568                 error!(
1569                     "Received event for missing encoder session of stream id {}",
1570                     stream_id
1571                 );
1572                 return None;
1573             }
1574         };
1575 
1576         let event = match encoder_session.read_event() {
1577             Ok(e) => e,
1578             Err(e) => {
1579                 error!("Failed to read event for stream id {}: {}", stream_id, e);
1580                 return None;
1581             }
1582         };
1583 
1584         match event {
1585             EncoderEvent::RequireInputBuffers {
1586                 input_count,
1587                 input_frame_width,
1588                 input_frame_height,
1589                 output_buffer_size,
1590             } => stream.require_input_buffers(
1591                 input_count,
1592                 input_frame_width,
1593                 input_frame_height,
1594                 output_buffer_size,
1595             ),
1596             EncoderEvent::ProcessedInputBuffer {
1597                 id: input_buffer_id,
1598             } => stream.processed_input_buffer(input_buffer_id),
1599             EncoderEvent::ProcessedOutputBuffer {
1600                 id: output_buffer_id,
1601                 bytesused,
1602                 keyframe,
1603                 timestamp,
1604             } => stream.processed_output_buffer(output_buffer_id, bytesused, keyframe, timestamp),
1605             EncoderEvent::FlushResponse { flush_done } => stream.flush_response(flush_done),
1606             EncoderEvent::NotifyError { error } => stream.notify_error(error),
1607         }
1608     }
1609 }
1610