• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 //! Implementation of the the `Encoder` struct, which is responsible for translation between the
6 //! virtio protocols and LibVDA APIs.
7 
8 pub mod backend;
9 mod encoder;
10 
11 use base::{error, info, warn, Tube, WaitContext};
12 use std::collections::{BTreeMap, BTreeSet};
13 use vm_memory::GuestMemory;
14 
15 use crate::virtio::video::async_cmd_desc_map::AsyncCmdDescMap;
16 use crate::virtio::video::command::{QueueType, VideoCmd};
17 use crate::virtio::video::control::*;
18 use crate::virtio::video::device::VideoCmdResponseType;
19 use crate::virtio::video::device::{
20     AsyncCmdResponse, AsyncCmdTag, Device, Token, VideoEvtResponseType,
21 };
22 use crate::virtio::video::encoder::encoder::{
23     EncoderEvent, InputBufferId, OutputBufferId, SessionConfig,
24 };
25 use crate::virtio::video::error::*;
26 use crate::virtio::video::event::{EvtType, VideoEvt};
27 use crate::virtio::video::format::{Bitrate, BitrateMode, Format, Level, PlaneFormat, Profile};
28 use crate::virtio::video::params::Params;
29 use crate::virtio::video::protocol;
30 use crate::virtio::video::resource::*;
31 use crate::virtio::video::response::CmdResponse;
32 use crate::virtio::video::EosBufferManager;
33 use backend::*;
34 
35 #[derive(Debug)]
36 struct QueuedInputResourceParams {
37     encoder_id: InputBufferId,
38     timestamp: u64,
39     in_queue: bool,
40 }
41 
42 struct InputResource {
43     resource: GuestResource,
44     queue_params: Option<QueuedInputResourceParams>,
45 }
46 
47 #[derive(Debug)]
48 struct QueuedOutputResourceParams {
49     encoder_id: OutputBufferId,
50     timestamp: u64,
51     in_queue: bool,
52 }
53 
54 struct OutputResource {
55     resource: GuestResource,
56     offset: u32,
57     queue_params: Option<QueuedOutputResourceParams>,
58 }
59 
60 #[derive(Debug, PartialEq, Eq, Hash, Ord, PartialOrd)]
61 enum PendingCommand {
62     // TODO(b/193202566): remove this is_ext parameter throughout the code along with
63     // support for the old GET_PARAMS and SET_PARAMS commands.
64     GetSrcParams { is_ext: bool },
65     GetDstParams { is_ext: bool },
66     Drain,
67     SrcQueueClear,
68     DstQueueClear,
69 }
70 
71 struct Stream<T: EncoderSession> {
72     id: u32,
73     src_params: Params,
74     dst_params: Params,
75     dst_bitrate: Bitrate,
76     dst_profile: Profile,
77     dst_h264_level: Option<Level>,
78     force_keyframe: bool,
79 
80     encoder_session: Option<T>,
81     received_input_buffers_event: bool,
82 
83     src_resources: BTreeMap<u32, InputResource>,
84     encoder_input_buffer_ids: BTreeMap<InputBufferId, u32>,
85 
86     dst_resources: BTreeMap<u32, OutputResource>,
87     encoder_output_buffer_ids: BTreeMap<OutputBufferId, u32>,
88 
89     pending_commands: BTreeSet<PendingCommand>,
90     eos_manager: EosBufferManager,
91 }
92 
93 impl<T: EncoderSession> Stream<T> {
new<E: Encoder<Session = T>>( id: u32, src_resource_type: ResourceType, dst_resource_type: ResourceType, desired_format: Format, encoder: &EncoderDevice<E>, ) -> VideoResult<Self>94     fn new<E: Encoder<Session = T>>(
95         id: u32,
96         src_resource_type: ResourceType,
97         dst_resource_type: ResourceType,
98         desired_format: Format,
99         encoder: &EncoderDevice<E>,
100     ) -> VideoResult<Self> {
101         const MIN_BUFFERS: u32 = 1;
102         const MAX_BUFFERS: u32 = 342;
103         const DEFAULT_WIDTH: u32 = 640;
104         const DEFAULT_HEIGHT: u32 = 480;
105         const DEFAULT_BITRATE_TARGET: u32 = 6000;
106         const DEFAULT_BITRATE_PEAK: u32 = DEFAULT_BITRATE_TARGET * 2;
107         const DEFAULT_BITRATE: Bitrate = Bitrate::VBR {
108             target: DEFAULT_BITRATE_TARGET,
109             peak: DEFAULT_BITRATE_PEAK,
110         };
111         const DEFAULT_BUFFER_SIZE: u32 = 2097152; // 2MB; chosen empirically for 1080p video
112         const DEFAULT_FPS: u32 = 30;
113 
114         let mut src_params = Params {
115             frame_rate: DEFAULT_FPS,
116             min_buffers: MIN_BUFFERS,
117             max_buffers: MAX_BUFFERS,
118             resource_type: src_resource_type,
119             ..Default::default()
120         };
121 
122         let cros_capabilities = &encoder.cros_capabilities;
123 
124         cros_capabilities
125             .populate_src_params(
126                 &mut src_params,
127                 Format::NV12,
128                 DEFAULT_WIDTH,
129                 DEFAULT_HEIGHT,
130                 0,
131             )
132             .map_err(|_| VideoError::InvalidArgument)?;
133 
134         let mut dst_params = Params {
135             resource_type: dst_resource_type,
136             frame_rate: DEFAULT_FPS,
137             frame_width: DEFAULT_WIDTH,
138             frame_height: DEFAULT_HEIGHT,
139             ..Default::default()
140         };
141 
142         // In order to support requesting encoder params change, we must know the default frame
143         // rate, because VEA's request_encoding_params_change requires both framerate and
144         // bitrate to be specified.
145         cros_capabilities
146             .populate_dst_params(&mut dst_params, desired_format, DEFAULT_BUFFER_SIZE)
147             .map_err(|_| VideoError::InvalidArgument)?;
148         // `format` is an Option since for the decoder, it is not populated until decoding has
149         // started. for encoder, format should always be populated.
150         let dest_format = dst_params.format.ok_or(VideoError::InvalidArgument)?;
151 
152         let dst_profile = cros_capabilities
153             .get_default_profile(&dest_format)
154             .ok_or(VideoError::InvalidArgument)?;
155 
156         let dst_h264_level = if dest_format == Format::H264 {
157             Some(Level::H264_1_0)
158         } else {
159             None
160         };
161 
162         Ok(Self {
163             id,
164             src_params,
165             dst_params,
166             dst_bitrate: DEFAULT_BITRATE,
167             dst_profile,
168             dst_h264_level,
169             force_keyframe: false,
170             encoder_session: None,
171             received_input_buffers_event: false,
172             src_resources: Default::default(),
173             encoder_input_buffer_ids: Default::default(),
174             dst_resources: Default::default(),
175             encoder_output_buffer_ids: Default::default(),
176             pending_commands: Default::default(),
177             eos_manager: EosBufferManager::new(id),
178         })
179     }
180 
has_encode_session(&self) -> bool181     fn has_encode_session(&self) -> bool {
182         self.encoder_session.is_some()
183     }
184 
set_encode_session<U: Encoder<Session = T>>( &mut self, encoder: &mut U, wait_ctx: &WaitContext<Token>, ) -> VideoResult<()>185     fn set_encode_session<U: Encoder<Session = T>>(
186         &mut self,
187         encoder: &mut U,
188         wait_ctx: &WaitContext<Token>,
189     ) -> VideoResult<()> {
190         if self.encoder_session.is_some() {
191             error!(
192                 "stream {}: tried to add encode session when one already exists.",
193                 self.id
194             );
195             return Err(VideoError::InvalidOperation);
196         }
197 
198         let new_session = encoder
199             .start_session(SessionConfig {
200                 src_params: self.src_params.clone(),
201                 dst_params: self.dst_params.clone(),
202                 dst_profile: self.dst_profile,
203                 dst_bitrate: self.dst_bitrate,
204                 dst_h264_level: self.dst_h264_level,
205                 frame_rate: self.dst_params.frame_rate,
206             })
207             .map_err(|_| VideoError::InvalidOperation)?;
208 
209         let event_pipe = new_session.event_pipe();
210 
211         wait_ctx
212             .add(event_pipe, Token::Event { id: self.id })
213             .map_err(|e| {
214                 error!(
215                     "stream {}: failed to add FD to poll context: {}",
216                     self.id, e
217                 );
218                 VideoError::InvalidOperation
219             })?;
220         self.encoder_session.replace(new_session);
221         self.received_input_buffers_event = false;
222         Ok(())
223     }
224 
clear_encode_session(&mut self, wait_ctx: &WaitContext<Token>) -> VideoResult<()>225     fn clear_encode_session(&mut self, wait_ctx: &WaitContext<Token>) -> VideoResult<()> {
226         if let Some(session) = self.encoder_session.take() {
227             let event_pipe = session.event_pipe();
228             wait_ctx.delete(event_pipe).map_err(|e| {
229                 error!(
230                     "stream: {}: failed to remove fd from poll context: {}",
231                     self.id, e
232                 );
233                 VideoError::InvalidOperation
234             })?;
235         }
236         Ok(())
237     }
238 
require_input_buffers( &mut self, input_count: u32, input_frame_width: u32, input_frame_height: u32, output_buffer_size: u32, ) -> Option<Vec<VideoEvtResponseType>>239     fn require_input_buffers(
240         &mut self,
241         input_count: u32,
242         input_frame_width: u32,
243         input_frame_height: u32,
244         output_buffer_size: u32,
245     ) -> Option<Vec<VideoEvtResponseType>> {
246         // TODO(alexlau): Does this always arrive after start_session,
247         // but before the first encode call?
248         // TODO(alexlau): set plane info from input_frame_width and input_frame_height
249         self.src_params.min_buffers = input_count;
250         self.src_params.max_buffers = 32;
251         self.src_params.frame_width = input_frame_width;
252         self.src_params.frame_height = input_frame_height;
253         self.dst_params.plane_formats[0].plane_size = output_buffer_size;
254         self.received_input_buffers_event = true;
255 
256         let mut responses = vec![];
257 
258         // Respond to any GetParams commands that were waiting.
259         let pending_get_src_params = if self
260             .pending_commands
261             .remove(&PendingCommand::GetSrcParams { is_ext: false })
262         {
263             Some(false)
264         } else if self
265             .pending_commands
266             .remove(&PendingCommand::GetSrcParams { is_ext: true })
267         {
268             Some(true)
269         } else {
270             None
271         };
272         if let Some(is_ext) = pending_get_src_params {
273             responses.push(VideoEvtResponseType::AsyncCmd(
274                 AsyncCmdResponse::from_response(
275                     AsyncCmdTag::GetParams {
276                         stream_id: self.id,
277                         queue_type: QueueType::Input,
278                     },
279                     CmdResponse::GetParams {
280                         queue_type: QueueType::Input,
281                         params: self.src_params.clone(),
282                         is_ext,
283                     },
284                 ),
285             ));
286         }
287         let pending_get_dst_params = if self
288             .pending_commands
289             .remove(&PendingCommand::GetDstParams { is_ext: false })
290         {
291             Some(false)
292         } else if self
293             .pending_commands
294             .remove(&PendingCommand::GetDstParams { is_ext: true })
295         {
296             Some(true)
297         } else {
298             None
299         };
300         if let Some(is_ext) = pending_get_dst_params {
301             responses.push(VideoEvtResponseType::AsyncCmd(
302                 AsyncCmdResponse::from_response(
303                     AsyncCmdTag::GetParams {
304                         stream_id: self.id,
305                         queue_type: QueueType::Output,
306                     },
307                     CmdResponse::GetParams {
308                         queue_type: QueueType::Output,
309                         params: self.dst_params.clone(),
310                         is_ext,
311                     },
312                 ),
313             ));
314         }
315 
316         if responses.len() > 0 {
317             Some(responses)
318         } else {
319             None
320         }
321     }
322 
processed_input_buffer( &mut self, input_buffer_id: InputBufferId, ) -> Option<Vec<VideoEvtResponseType>>323     fn processed_input_buffer(
324         &mut self,
325         input_buffer_id: InputBufferId,
326     ) -> Option<Vec<VideoEvtResponseType>> {
327         let resource_id = *match self.encoder_input_buffer_ids.get(&input_buffer_id) {
328             Some(id) => id,
329             None => {
330                 warn!("Received processed input buffer event for input buffer id {}, but missing resource, ResourceDestroyAll?", input_buffer_id);
331                 return None;
332             }
333         };
334 
335         let resource = match self.src_resources.get_mut(&resource_id) {
336             Some(r) => r,
337             None => {
338                 error!(
339                     "Received processed input buffer event but missing resource with id {}",
340                     resource_id
341                 );
342                 return None;
343             }
344         };
345 
346         let queue_params = match resource.queue_params.take() {
347             Some(p) => p,
348             None => {
349                 error!(
350                     "Received processed input buffer event but resource with id {} was not queued.",
351                     resource_id
352                 );
353                 return None;
354             }
355         };
356 
357         if !queue_params.in_queue {
358             // A QueueClear command occurred after this buffer was queued.
359             return None;
360         }
361 
362         let tag = AsyncCmdTag::Queue {
363             stream_id: self.id,
364             queue_type: QueueType::Input,
365             resource_id,
366         };
367 
368         let resp = CmdResponse::ResourceQueue {
369             timestamp: queue_params.timestamp,
370             flags: 0,
371             size: 0,
372         };
373 
374         Some(vec![VideoEvtResponseType::AsyncCmd(
375             AsyncCmdResponse::from_response(tag, resp),
376         )])
377     }
378 
processed_output_buffer( &mut self, output_buffer_id: OutputBufferId, bytesused: u32, keyframe: bool, timestamp: u64, ) -> Option<Vec<VideoEvtResponseType>>379     fn processed_output_buffer(
380         &mut self,
381         output_buffer_id: OutputBufferId,
382         bytesused: u32,
383         keyframe: bool,
384         timestamp: u64,
385     ) -> Option<Vec<VideoEvtResponseType>> {
386         let resource_id = *match self.encoder_output_buffer_ids.get(&output_buffer_id) {
387             Some(id) => id,
388             None => {
389                 warn!("Received processed output buffer event for output buffer id {}, but missing resource, ResourceDestroyAll?", output_buffer_id);
390                 return None;
391             }
392         };
393 
394         let resource = match self.dst_resources.get_mut(&resource_id) {
395             Some(r) => r,
396             None => {
397                 error!(
398                     "Received processed output buffer event but missing resource with id {}",
399                     resource_id
400                 );
401                 return None;
402             }
403         };
404 
405         let queue_params = match resource.queue_params.take() {
406             Some(p) => p,
407             None => {
408                 error!("Received processed output buffer event but resource with id {} was not queued.", resource_id);
409                 return None;
410             }
411         };
412 
413         if !queue_params.in_queue {
414             // A QueueClear command occurred after this buffer was queued.
415             return None;
416         }
417 
418         let tag = AsyncCmdTag::Queue {
419             stream_id: self.id,
420             queue_type: QueueType::Output,
421             resource_id,
422         };
423 
424         let resp = CmdResponse::ResourceQueue {
425             timestamp,
426             // At the moment, a buffer is saved in `eos_notification_buffer`, and
427             // the EOS flag is populated and returned after a flush() command.
428             // TODO(b/149725148): Populate flags once libvda supports it.
429             flags: if keyframe {
430                 protocol::VIRTIO_VIDEO_BUFFER_FLAG_IFRAME
431             } else {
432                 0
433             },
434             size: bytesused,
435         };
436 
437         Some(vec![VideoEvtResponseType::AsyncCmd(
438             AsyncCmdResponse::from_response(tag, resp),
439         )])
440     }
441 
flush_response(&mut self, flush_done: bool) -> Option<Vec<VideoEvtResponseType>>442     fn flush_response(&mut self, flush_done: bool) -> Option<Vec<VideoEvtResponseType>> {
443         let command_response = if flush_done {
444             CmdResponse::NoData
445         } else {
446             error!("Flush could not be completed for stream {}", self.id);
447             VideoError::InvalidOperation.into()
448         };
449 
450         let mut async_responses = vec![];
451 
452         // First gather the responses for all completed commands.
453         if self.pending_commands.remove(&PendingCommand::Drain) {
454             async_responses.push(VideoEvtResponseType::AsyncCmd(
455                 AsyncCmdResponse::from_response(
456                     AsyncCmdTag::Drain { stream_id: self.id },
457                     command_response.clone(),
458                 ),
459             ));
460         }
461 
462         if self.pending_commands.remove(&PendingCommand::SrcQueueClear) {
463             async_responses.push(VideoEvtResponseType::AsyncCmd(
464                 AsyncCmdResponse::from_response(
465                     AsyncCmdTag::Clear {
466                         stream_id: self.id,
467                         queue_type: QueueType::Input,
468                     },
469                     command_response.clone(),
470                 ),
471             ));
472         }
473 
474         if self.pending_commands.remove(&PendingCommand::DstQueueClear) {
475             async_responses.push(VideoEvtResponseType::AsyncCmd(
476                 AsyncCmdResponse::from_response(
477                     AsyncCmdTag::Clear {
478                         stream_id: self.id,
479                         queue_type: QueueType::Output,
480                     },
481                     command_response,
482                 ),
483             ));
484         }
485 
486         // Then add the EOS buffer to the responses if it is available.
487         self.eos_manager.try_complete_eos(async_responses)
488     }
489 
490     #[allow(clippy::unnecessary_wraps)]
notify_error(&self, error: VideoError) -> Option<Vec<VideoEvtResponseType>>491     fn notify_error(&self, error: VideoError) -> Option<Vec<VideoEvtResponseType>> {
492         error!(
493             "Received encoder error event for stream {}: {}",
494             self.id, error
495         );
496         Some(vec![VideoEvtResponseType::Event(VideoEvt {
497             typ: EvtType::Error,
498             stream_id: self.id,
499         })])
500     }
501 }
502 
503 pub struct EncoderDevice<T: Encoder> {
504     cros_capabilities: encoder::EncoderCapabilities,
505     encoder: T,
506     streams: BTreeMap<u32, Stream<T::Session>>,
507     resource_bridge: Tube,
508     mem: GuestMemory,
509 }
510 
511 impl<T: Encoder> EncoderDevice<T> {
512     /// Build a new encoder using the provided `backend`.
new(backend: T, resource_bridge: Tube, mem: GuestMemory) -> VideoResult<Self>513     pub fn new(backend: T, resource_bridge: Tube, mem: GuestMemory) -> VideoResult<Self> {
514         Ok(Self {
515             cros_capabilities: backend.query_capabilities()?,
516             encoder: backend,
517             streams: Default::default(),
518             resource_bridge,
519             mem,
520         })
521     }
522 
523     #[allow(clippy::unnecessary_wraps)]
query_capabilities(&self, queue_type: QueueType) -> VideoResult<VideoCmdResponseType>524     fn query_capabilities(&self, queue_type: QueueType) -> VideoResult<VideoCmdResponseType> {
525         let descs = match queue_type {
526             QueueType::Input => self.cros_capabilities.input_format_descs.clone(),
527             QueueType::Output => self.cros_capabilities.output_format_descs.clone(),
528         };
529         Ok(VideoCmdResponseType::Sync(CmdResponse::QueryCapability(
530             descs,
531         )))
532     }
533 
stream_create( &mut self, stream_id: u32, desired_format: Format, src_resource_type: ResourceType, dst_resource_type: ResourceType, ) -> VideoResult<VideoCmdResponseType>534     fn stream_create(
535         &mut self,
536         stream_id: u32,
537         desired_format: Format,
538         src_resource_type: ResourceType,
539         dst_resource_type: ResourceType,
540     ) -> VideoResult<VideoCmdResponseType> {
541         if self.streams.contains_key(&stream_id) {
542             return Err(VideoError::InvalidStreamId(stream_id));
543         }
544         let new_stream = Stream::new(
545             stream_id,
546             src_resource_type,
547             dst_resource_type,
548             desired_format,
549             self,
550         )?;
551 
552         self.streams.insert(stream_id, new_stream);
553         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
554     }
555 
stream_destroy(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>556     fn stream_destroy(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
557         let mut stream = self
558             .streams
559             .remove(&stream_id)
560             .ok_or(VideoError::InvalidStreamId(stream_id))?;
561         // TODO(alexlau): Handle resources that have been queued.
562         if let Some(session) = stream.encoder_session.take() {
563             if let Err(e) = self.encoder.stop_session(session) {
564                 error!("Failed to stop encode session {}: {}", stream_id, e);
565             }
566         }
567         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
568     }
569 
stream_drain(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>570     fn stream_drain(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
571         let stream = self
572             .streams
573             .get_mut(&stream_id)
574             .ok_or(VideoError::InvalidStreamId(stream_id))?;
575         match stream.encoder_session {
576             Some(ref mut session) => {
577                 if stream.pending_commands.contains(&PendingCommand::Drain) {
578                     error!("A pending Drain command already exists.");
579                     return Err(VideoError::InvalidOperation);
580                 }
581                 stream.pending_commands.insert(PendingCommand::Drain);
582 
583                 if !stream
584                     .pending_commands
585                     .contains(&PendingCommand::SrcQueueClear)
586                     && !stream
587                         .pending_commands
588                         .contains(&PendingCommand::DstQueueClear)
589                 {
590                     // If a source or dest QueueClear is underway, a flush has
591                     // already been sent.
592                     if let Err(e) = session.flush() {
593                         error!("Flush failed for stream id {}: {}", stream_id, e);
594                     }
595                 }
596                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Drain {
597                     stream_id,
598                 }))
599             }
600             None => {
601                 // Return an OK response since nothing has been queued yet.
602                 Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
603             }
604         }
605     }
606 
resource_create( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, queue_type: QueueType, resource_id: u32, plane_offsets: Vec<u32>, plane_entries: Vec<Vec<UnresolvedResourceEntry>>, ) -> VideoResult<VideoCmdResponseType>607     fn resource_create(
608         &mut self,
609         wait_ctx: &WaitContext<Token>,
610         stream_id: u32,
611         queue_type: QueueType,
612         resource_id: u32,
613         plane_offsets: Vec<u32>,
614         plane_entries: Vec<Vec<UnresolvedResourceEntry>>,
615     ) -> VideoResult<VideoCmdResponseType> {
616         let stream = self
617             .streams
618             .get_mut(&stream_id)
619             .ok_or(VideoError::InvalidStreamId(stream_id))?;
620 
621         if !stream.has_encode_session() {
622             // No encode session would have been created upon the first
623             // QBUF if there was no previous S_FMT call.
624             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
625         }
626 
627         let num_planes = plane_offsets.len();
628 
629         // We only support single-buffer resources for now.
630         let entries = if plane_entries.len() != 1 {
631             return Err(VideoError::InvalidArgument);
632         } else {
633             // unwrap() is safe because we just tested that `plane_entries` had exactly one element.
634             plane_entries.get(0).unwrap()
635         };
636 
637         match queue_type {
638             QueueType::Input => {
639                 // We currently only support single-buffer formats, but some clients may mistake
640                 // color planes with memory planes and submit several planes to us. This doesn't
641                 // matter as we will only consider the first one.
642                 if num_planes < 1 {
643                     return Err(VideoError::InvalidParameter);
644                 }
645 
646                 if stream.src_resources.contains_key(&resource_id) {
647                     warn!("Replacing source resource with id {}", resource_id);
648                 }
649 
650                 let resource = match stream.src_params.resource_type {
651                     ResourceType::VirtioObject => {
652                         // Virtio object resources only have one entry.
653                         if entries.len() != 1 {
654                             return Err(VideoError::InvalidArgument);
655                         }
656                         GuestResource::from_virtio_object_entry(
657                             // Safe because we confirmed the correct type for the resource.
658                             // unwrap() is also safe here because we just tested above that `entries` had
659                             // exactly one element.
660                             unsafe { entries.get(0).unwrap().object },
661                             &self.resource_bridge,
662                         )
663                         .map_err(|_| VideoError::InvalidArgument)?
664                     }
665                     ResourceType::GuestPages => GuestResource::from_virtio_guest_mem_entry(
666                         // Safe because we confirmed the correct type for the resource.
667                         unsafe {
668                             std::slice::from_raw_parts(
669                                 entries.as_ptr() as *const protocol::virtio_video_mem_entry,
670                                 entries.len(),
671                             )
672                         },
673                         &self.mem,
674                         &stream.src_params.plane_formats,
675                     )
676                     .map_err(|_| VideoError::InvalidArgument)?,
677                 };
678 
679                 stream.src_resources.insert(
680                     resource_id,
681                     InputResource {
682                         resource,
683                         queue_params: None,
684                     },
685                 );
686             }
687             QueueType::Output => {
688                 // Bitstream buffers always have only one plane.
689                 if num_planes != 1 {
690                     return Err(VideoError::InvalidParameter);
691                 }
692 
693                 if stream.dst_resources.contains_key(&resource_id) {
694                     warn!("Replacing dest resource with id {}", resource_id);
695                 }
696 
697                 let resource = match stream.dst_params.resource_type {
698                     ResourceType::VirtioObject => {
699                         // Virtio object resources only have one entry.
700                         if entries.len() != 1 {
701                             return Err(VideoError::InvalidArgument);
702                         }
703                         GuestResource::from_virtio_object_entry(
704                             // Safe because we confirmed the correct type for the resource.
705                             // unwrap() is also safe here because we just tested above that `entries` had
706                             // exactly one element.
707                             unsafe { entries.get(0).unwrap().object },
708                             &self.resource_bridge,
709                         )
710                         .map_err(|_| VideoError::InvalidArgument)?
711                     }
712                     ResourceType::GuestPages => GuestResource::from_virtio_guest_mem_entry(
713                         // Safe because we confirmed the correct type for the resource.
714                         unsafe {
715                             std::slice::from_raw_parts(
716                                 entries.as_ptr() as *const protocol::virtio_video_mem_entry,
717                                 entries.len(),
718                             )
719                         },
720                         &self.mem,
721                         &stream.dst_params.plane_formats,
722                     )
723                     .map_err(|_| VideoError::InvalidArgument)?,
724                 };
725 
726                 let offset = plane_offsets[0];
727                 stream.dst_resources.insert(
728                     resource_id,
729                     OutputResource {
730                         resource,
731                         offset,
732                         queue_params: None,
733                     },
734                 );
735             }
736         }
737 
738         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
739     }
740 
resource_queue( &mut self, stream_id: u32, queue_type: QueueType, resource_id: u32, timestamp: u64, data_sizes: Vec<u32>, ) -> VideoResult<VideoCmdResponseType>741     fn resource_queue(
742         &mut self,
743         stream_id: u32,
744         queue_type: QueueType,
745         resource_id: u32,
746         timestamp: u64,
747         data_sizes: Vec<u32>,
748     ) -> VideoResult<VideoCmdResponseType> {
749         let stream = self
750             .streams
751             .get_mut(&stream_id)
752             .ok_or(VideoError::InvalidStreamId(stream_id))?;
753 
754         let encoder_session = match stream.encoder_session {
755             Some(ref mut e) => e,
756             None => {
757                 // The encoder session is created on the first ResourceCreate,
758                 // so it should exist here.
759                 error!("Encoder session did not exist at resource_queue.");
760                 return Err(VideoError::InvalidOperation);
761             }
762         };
763 
764         match queue_type {
765             QueueType::Input => {
766                 // We currently only support single-buffer formats, but some clients may mistake
767                 // color planes with memory planes and submit several planes to us. This doesn't
768                 // matter as we will only consider the first one.
769                 if data_sizes.len() < 1 {
770                     return Err(VideoError::InvalidParameter);
771                 }
772 
773                 let src_resource = stream.src_resources.get_mut(&resource_id).ok_or(
774                     VideoError::InvalidResourceId {
775                         stream_id,
776                         resource_id,
777                     },
778                 )?;
779 
780                 let force_keyframe = std::mem::replace(&mut stream.force_keyframe, false);
781 
782                 match encoder_session.encode(
783                     src_resource
784                         .resource
785                         .try_clone()
786                         .map_err(|_| VideoError::InvalidArgument)?,
787                     timestamp,
788                     force_keyframe,
789                 ) {
790                     Ok(input_buffer_id) => {
791                         if let Some(last_resource_id) = stream
792                             .encoder_input_buffer_ids
793                             .insert(input_buffer_id, resource_id)
794                         {
795                             error!(
796                                 "encoder input id {} was already mapped to resource id {}",
797                                 input_buffer_id, last_resource_id
798                             );
799                             return Err(VideoError::InvalidOperation);
800                         }
801                         let queue_params = QueuedInputResourceParams {
802                             encoder_id: input_buffer_id,
803                             timestamp,
804                             in_queue: true,
805                         };
806                         if let Some(last_queue_params) =
807                             src_resource.queue_params.replace(queue_params)
808                         {
809                             if last_queue_params.in_queue {
810                                 error!(
811                                     "resource {} was already queued ({:?})",
812                                     resource_id, last_queue_params
813                                 );
814                                 return Err(VideoError::InvalidOperation);
815                             }
816                         }
817                     }
818                     Err(e) => {
819                         // TODO(alexlau): Return the actual error
820                         error!("encode failed: {}", e);
821                         return Err(VideoError::InvalidOperation);
822                     }
823                 }
824                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
825                     stream_id,
826                     queue_type: QueueType::Input,
827                     resource_id,
828                 }))
829             }
830             QueueType::Output => {
831                 // Bitstream buffers always have only one plane.
832                 if data_sizes.len() != 1 {
833                     return Err(VideoError::InvalidParameter);
834                 }
835 
836                 let dst_resource = stream.dst_resources.get_mut(&resource_id).ok_or(
837                     VideoError::InvalidResourceId {
838                         stream_id,
839                         resource_id,
840                     },
841                 )?;
842 
843                 let mut buffer_size = data_sizes[0];
844 
845                 // It seems that data_sizes[0] is 0 here. For now, take the stride
846                 // from resource_info instead because we're always allocating <size> x 1
847                 // blobs..
848                 // TODO(alexlau): Figure out how to fix this.
849                 if buffer_size == 0 {
850                     buffer_size = (dst_resource.resource.planes[0].offset
851                         + dst_resource.resource.planes[0].stride)
852                         as u32;
853                 }
854 
855                 // Stores an output buffer to notify EOS.
856                 // This is necessary because libvda is unable to indicate EOS along with returned buffers.
857                 // For now, when a `Flush()` completes, this saved resource will be returned as a zero-sized
858                 // buffer with the EOS flag.
859                 if stream.eos_manager.try_reserve_eos_buffer(resource_id) {
860                     return Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
861                         stream_id,
862                         queue_type: QueueType::Output,
863                         resource_id,
864                     }));
865                 }
866 
867                 match encoder_session.use_output_buffer(
868                     dst_resource
869                         .resource
870                         .handle
871                         .try_clone()
872                         .map_err(|_| VideoError::InvalidParameter)?,
873                     dst_resource.offset,
874                     buffer_size,
875                 ) {
876                     Ok(output_buffer_id) => {
877                         if let Some(last_resource_id) = stream
878                             .encoder_output_buffer_ids
879                             .insert(output_buffer_id, resource_id)
880                         {
881                             error!(
882                                 "encoder output id {} was already mapped to resource id {}",
883                                 output_buffer_id, last_resource_id
884                             );
885                         }
886                         let queue_params = QueuedOutputResourceParams {
887                             encoder_id: output_buffer_id,
888                             timestamp,
889                             in_queue: true,
890                         };
891                         if let Some(last_queue_params) =
892                             dst_resource.queue_params.replace(queue_params)
893                         {
894                             if last_queue_params.in_queue {
895                                 error!(
896                                     "resource {} was already queued ({:?})",
897                                     resource_id, last_queue_params
898                                 );
899                             }
900                         }
901                     }
902                     Err(e) => {
903                         error!("use_output_buffer failed: {}", e);
904                         return Err(VideoError::InvalidOperation);
905                     }
906                 }
907                 Ok(VideoCmdResponseType::Async(AsyncCmdTag::Queue {
908                     stream_id,
909                     queue_type: QueueType::Output,
910                     resource_id,
911                 }))
912             }
913         }
914     }
915 
resource_destroy_all(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType>916     fn resource_destroy_all(&mut self, stream_id: u32) -> VideoResult<VideoCmdResponseType> {
917         let stream = self
918             .streams
919             .get_mut(&stream_id)
920             .ok_or(VideoError::InvalidStreamId(stream_id))?;
921         stream.src_resources.clear();
922         stream.encoder_input_buffer_ids.clear();
923         stream.dst_resources.clear();
924         stream.encoder_output_buffer_ids.clear();
925         stream.eos_manager.reset();
926         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
927     }
928 
queue_clear( &mut self, stream_id: u32, queue_type: QueueType, ) -> VideoResult<VideoCmdResponseType>929     fn queue_clear(
930         &mut self,
931         stream_id: u32,
932         queue_type: QueueType,
933     ) -> VideoResult<VideoCmdResponseType> {
934         // Unfortunately, there is no way to clear the queue with VEA.
935         // VDA has Reset() which also isn't done on a per-queue basis,
936         // but VEA has no such API.
937         // Doing a Flush() here and waiting for the flush response is also
938         // not an option, because the virtio-video driver expects a prompt
939         // response (search for "timed out waiting for queue clear" in
940         // virtio_video_enc.c).
941         // So for now, we do a Flush(), but also mark each currently
942         // queued resource as no longer `in_queue`, and skip them when they
943         // are returned.
944         // TODO(b/153406792): Support per-queue clearing.
945         let stream = self
946             .streams
947             .get_mut(&stream_id)
948             .ok_or(VideoError::InvalidStreamId(stream_id))?;
949 
950         match queue_type {
951             QueueType::Input => {
952                 for src_resource in stream.src_resources.values_mut() {
953                     if let Some(ref mut queue_params) = src_resource.queue_params {
954                         queue_params.in_queue = false;
955                     }
956                 }
957             }
958             QueueType::Output => {
959                 for dst_resource in stream.dst_resources.values_mut() {
960                     if let Some(ref mut queue_params) = dst_resource.queue_params {
961                         queue_params.in_queue = false;
962                     }
963                 }
964                 stream.eos_manager.reset();
965             }
966         }
967         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
968     }
969 
get_params( &mut self, stream_id: u32, queue_type: QueueType, is_ext: bool, ) -> VideoResult<VideoCmdResponseType>970     fn get_params(
971         &mut self,
972         stream_id: u32,
973         queue_type: QueueType,
974         is_ext: bool,
975     ) -> VideoResult<VideoCmdResponseType> {
976         let stream = self
977             .streams
978             .get_mut(&stream_id)
979             .ok_or(VideoError::InvalidStreamId(stream_id))?;
980 
981         if stream.encoder_session.is_some() && !stream.received_input_buffers_event {
982             // If we haven't yet received an RequireInputBuffers
983             // event, we need to wait for that before replying so that
984             // the G_FMT response has the correct data.
985             let pending_command = match queue_type {
986                 QueueType::Input => PendingCommand::GetSrcParams { is_ext },
987                 QueueType::Output => PendingCommand::GetDstParams { is_ext },
988             };
989 
990             if !stream.pending_commands.insert(pending_command) {
991                 // There is already a G_FMT call waiting.
992                 error!("Pending get params call already exists.");
993                 return Err(VideoError::InvalidOperation);
994             }
995 
996             Ok(VideoCmdResponseType::Async(AsyncCmdTag::GetParams {
997                 stream_id,
998                 queue_type,
999             }))
1000         } else {
1001             let params = match queue_type {
1002                 QueueType::Input => stream.src_params.clone(),
1003                 QueueType::Output => stream.dst_params.clone(),
1004             };
1005             Ok(VideoCmdResponseType::Sync(CmdResponse::GetParams {
1006                 queue_type,
1007                 params,
1008                 is_ext,
1009             }))
1010         }
1011     }
1012 
set_params( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, queue_type: QueueType, format: Option<Format>, frame_width: u32, frame_height: u32, frame_rate: u32, plane_formats: Vec<PlaneFormat>, _is_ext: bool, ) -> VideoResult<VideoCmdResponseType>1013     fn set_params(
1014         &mut self,
1015         wait_ctx: &WaitContext<Token>,
1016         stream_id: u32,
1017         queue_type: QueueType,
1018         format: Option<Format>,
1019         frame_width: u32,
1020         frame_height: u32,
1021         frame_rate: u32,
1022         plane_formats: Vec<PlaneFormat>,
1023         _is_ext: bool,
1024     ) -> VideoResult<VideoCmdResponseType> {
1025         let stream = self
1026             .streams
1027             .get_mut(&stream_id)
1028             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1029 
1030         let mut create_session = stream.encoder_session.is_none();
1031         let resources_queued = stream.src_resources.len() > 0 || stream.dst_resources.len() > 0;
1032 
1033         // Dynamic framerate changes are allowed. The framerate can be set on either the input or
1034         // output queue. Changing the framerate can influence the selected H.264 level, as the
1035         // level might be adjusted to conform to the minimum requirements for the selected bitrate
1036         // and framerate. As dynamic level changes are not supported we will just recreate the
1037         // encoder session as long as no resources have been queued yet. If an encoder session is
1038         // active we will request a dynamic framerate change instead, and it's up to the encoder
1039         // backend to return an error on invalid requests.
1040         if stream.dst_params.frame_rate != frame_rate {
1041             stream.src_params.frame_rate = frame_rate;
1042             stream.dst_params.frame_rate = frame_rate;
1043             if let Some(ref mut encoder_session) = stream.encoder_session {
1044                 if !resources_queued {
1045                     create_session = true;
1046                 } else if let Err(e) = encoder_session.request_encoding_params_change(
1047                     stream.dst_bitrate,
1048                     stream.dst_params.frame_rate,
1049                 ) {
1050                     error!("failed to dynamically request framerate change: {}", e);
1051                     return Err(VideoError::InvalidOperation);
1052                 }
1053             }
1054         }
1055 
1056         match queue_type {
1057             QueueType::Input => {
1058                 if stream.src_params.frame_width != frame_width
1059                     || stream.src_params.frame_height != frame_height
1060                     || stream.src_params.format != format
1061                     || stream.src_params.plane_formats != plane_formats
1062                 {
1063                     if resources_queued {
1064                         // Buffers have already been queued and encoding has already started.
1065                         return Err(VideoError::InvalidOperation);
1066                     }
1067 
1068                     // There should be at least a single plane.
1069                     if plane_formats.is_empty() {
1070                         return Err(VideoError::InvalidArgument);
1071                     }
1072 
1073                     let desired_format =
1074                         format.or(stream.src_params.format).unwrap_or(Format::NV12);
1075                     self.cros_capabilities.populate_src_params(
1076                         &mut stream.src_params,
1077                         desired_format,
1078                         frame_width,
1079                         frame_height,
1080                         plane_formats[0].stride,
1081                     )?;
1082 
1083                     stream.dst_params.frame_width = frame_width;
1084                     stream.dst_params.frame_height = frame_height;
1085 
1086                     create_session = true
1087                 }
1088             }
1089             QueueType::Output => {
1090                 if stream.dst_params.format != format
1091                     || stream.dst_params.plane_formats != plane_formats
1092                 {
1093                     if resources_queued {
1094                         // Buffers have already been queued and encoding has already started.
1095                         return Err(VideoError::InvalidOperation);
1096                     }
1097 
1098                     let desired_format =
1099                         format.or(stream.dst_params.format).unwrap_or(Format::H264);
1100 
1101                     // There should be exactly one output buffer.
1102                     if plane_formats.len() != 1 {
1103                         return Err(VideoError::InvalidArgument);
1104                     }
1105 
1106                     self.cros_capabilities.populate_dst_params(
1107                         &mut stream.dst_params,
1108                         desired_format,
1109                         plane_formats[0].plane_size,
1110                     )?;
1111 
1112                     // Format is always populated for encoder.
1113                     let new_format = stream
1114                         .dst_params
1115                         .format
1116                         .ok_or(VideoError::InvalidArgument)?;
1117 
1118                     // If the selected profile no longer corresponds to the selected coded format,
1119                     // reset it.
1120                     stream.dst_profile = self
1121                         .cros_capabilities
1122                         .get_default_profile(&new_format)
1123                         .ok_or(VideoError::InvalidArgument)?;
1124 
1125                     if new_format == Format::H264 {
1126                         stream.dst_h264_level = Some(Level::H264_1_0);
1127                     } else {
1128                         stream.dst_h264_level = None;
1129                     }
1130 
1131                     create_session = true;
1132                 }
1133             }
1134         }
1135 
1136         if create_session {
1137             // An encoder session has to be created immediately upon a SetParams
1138             // (S_FMT) call, because we need to receive the RequireInputBuffers
1139             // callback which has output buffer size info, in order to populate
1140             // dst_params to have the correct size on subsequent GetParams (G_FMT) calls.
1141             if stream.encoder_session.is_some() {
1142                 stream.clear_encode_session(wait_ctx)?;
1143                 if !stream.received_input_buffers_event {
1144                     // This could happen if two SetParams calls are occuring at the same time.
1145                     // For example, the user calls SetParams for the input queue on one thread,
1146                     // and a new encode session is created. Then on another thread, SetParams
1147                     // is called for the output queue before the first SetParams call has returned.
1148                     // At this point, there is a new EncodeSession being created that has not
1149                     // yet received a RequireInputBuffers event.
1150                     // Even if we clear the encoder session and recreate it, this case
1151                     // is handled because stream.pending_commands will still contain
1152                     // the waiting GetParams responses, which will then receive fresh data once
1153                     // the new session's RequireInputBuffers event happens.
1154                     warn!(
1155                         "New encoder session being created while waiting for RequireInputBuffers."
1156                     )
1157                 }
1158             }
1159             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
1160         }
1161         Ok(VideoCmdResponseType::Sync(CmdResponse::NoData))
1162     }
1163 
query_control(&self, query_ctrl_type: QueryCtrlType) -> VideoResult<VideoCmdResponseType>1164     fn query_control(&self, query_ctrl_type: QueryCtrlType) -> VideoResult<VideoCmdResponseType> {
1165         let query_ctrl_response = match query_ctrl_type {
1166             QueryCtrlType::Profile(format) => match self.cros_capabilities.get_profiles(&format) {
1167                 Some(profiles) => QueryCtrlResponse::Profile(profiles.clone()),
1168                 None => {
1169                     return Err(VideoError::UnsupportedControl(CtrlType::Profile));
1170                 }
1171             },
1172             QueryCtrlType::Level(format) => {
1173                 match format {
1174                     Format::H264 => QueryCtrlResponse::Level(vec![
1175                         Level::H264_1_0,
1176                         Level::H264_1_1,
1177                         Level::H264_1_2,
1178                         Level::H264_1_3,
1179                         Level::H264_2_0,
1180                         Level::H264_2_1,
1181                         Level::H264_2_2,
1182                         Level::H264_3_0,
1183                         Level::H264_3_1,
1184                         Level::H264_3_2,
1185                         Level::H264_4_0,
1186                         Level::H264_4_1,
1187                         Level::H264_4_2,
1188                         Level::H264_5_0,
1189                         Level::H264_5_1,
1190                     ]),
1191                     _ => {
1192                         // Levels are only supported for H264.
1193                         return Err(VideoError::UnsupportedControl(CtrlType::Level));
1194                     }
1195                 }
1196             }
1197         };
1198 
1199         Ok(VideoCmdResponseType::Sync(CmdResponse::QueryControl(
1200             query_ctrl_response,
1201         )))
1202     }
1203 
get_control( &self, stream_id: u32, ctrl_type: CtrlType, ) -> VideoResult<VideoCmdResponseType>1204     fn get_control(
1205         &self,
1206         stream_id: u32,
1207         ctrl_type: CtrlType,
1208     ) -> VideoResult<VideoCmdResponseType> {
1209         let stream = self
1210             .streams
1211             .get(&stream_id)
1212             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1213         let ctrl_val = match ctrl_type {
1214             CtrlType::BitrateMode => CtrlVal::BitrateMode(stream.dst_bitrate.mode()),
1215             CtrlType::Bitrate => CtrlVal::Bitrate(stream.dst_bitrate.target()),
1216             CtrlType::BitratePeak => CtrlVal::BitratePeak(match stream.dst_bitrate {
1217                 Bitrate::VBR { peak, .. } => peak,
1218                 // For CBR there is no peak, so return the target (which is technically correct).
1219                 Bitrate::CBR { target } => target,
1220             }),
1221             CtrlType::Profile => CtrlVal::Profile(stream.dst_profile),
1222             CtrlType::Level => {
1223                 let format = stream
1224                     .dst_params
1225                     .format
1226                     .ok_or(VideoError::InvalidArgument)?;
1227                 match format {
1228                     Format::H264 => CtrlVal::Level(stream.dst_h264_level.ok_or_else(|| {
1229                         error!("H264 level not set");
1230                         VideoError::InvalidArgument
1231                     })?),
1232                     _ => {
1233                         return Err(VideoError::UnsupportedControl(ctrl_type));
1234                     }
1235                 }
1236             }
1237             // Button controls should not be queried.
1238             CtrlType::ForceKeyframe => return Err(VideoError::UnsupportedControl(ctrl_type)),
1239             // Prepending SPS and PPS to IDR is always enabled in the libvda backend.
1240             // TODO (b/161495502): account for other backends
1241             CtrlType::PrependSpsPpsToIdr => CtrlVal::PrependSpsPpsToIdr(true),
1242         };
1243         Ok(VideoCmdResponseType::Sync(CmdResponse::GetControl(
1244             ctrl_val,
1245         )))
1246     }
1247 
set_control( &mut self, wait_ctx: &WaitContext<Token>, stream_id: u32, ctrl_val: CtrlVal, ) -> VideoResult<VideoCmdResponseType>1248     fn set_control(
1249         &mut self,
1250         wait_ctx: &WaitContext<Token>,
1251         stream_id: u32,
1252         ctrl_val: CtrlVal,
1253     ) -> VideoResult<VideoCmdResponseType> {
1254         let stream = self
1255             .streams
1256             .get_mut(&stream_id)
1257             .ok_or(VideoError::InvalidStreamId(stream_id))?;
1258         let mut recreate_session = false;
1259         let resources_queued = stream.src_resources.len() > 0 || stream.dst_resources.len() > 0;
1260 
1261         match ctrl_val {
1262             CtrlVal::BitrateMode(bitrate_mode) => {
1263                 if stream.dst_bitrate.mode() != bitrate_mode {
1264                     if resources_queued {
1265                         error!("set control called for bitrate mode but already encoding.");
1266                         return Err(VideoError::InvalidOperation);
1267                     }
1268                     stream.dst_bitrate = match bitrate_mode {
1269                         BitrateMode::CBR => Bitrate::CBR {
1270                             target: stream.dst_bitrate.target(),
1271                         },
1272                         BitrateMode::VBR => Bitrate::VBR {
1273                             target: stream.dst_bitrate.target(),
1274                             peak: stream.dst_bitrate.target(),
1275                         },
1276                     };
1277                     recreate_session = true;
1278                 }
1279             }
1280             CtrlVal::Bitrate(bitrate) => {
1281                 if stream.dst_bitrate.target() != bitrate {
1282                     let mut new_bitrate = stream.dst_bitrate;
1283                     match &mut new_bitrate {
1284                         Bitrate::CBR { target } | Bitrate::VBR { target, .. } => *target = bitrate,
1285                     }
1286                     if let Some(ref mut encoder_session) = stream.encoder_session {
1287                         if let Err(e) = encoder_session.request_encoding_params_change(
1288                             new_bitrate,
1289                             stream.dst_params.frame_rate,
1290                         ) {
1291                             error!("failed to dynamically request target bitrate change: {}", e);
1292                             return Err(VideoError::InvalidOperation);
1293                         }
1294                     }
1295                     stream.dst_bitrate = new_bitrate;
1296                 }
1297             }
1298             CtrlVal::BitratePeak(bitrate) => {
1299                 match stream.dst_bitrate {
1300                     Bitrate::VBR { peak, .. } => {
1301                         if peak != bitrate {
1302                             let new_bitrate = Bitrate::VBR {
1303                                 target: stream.dst_bitrate.target(),
1304                                 peak: bitrate,
1305                             };
1306                             if let Some(ref mut encoder_session) = stream.encoder_session {
1307                                 if let Err(e) = encoder_session.request_encoding_params_change(
1308                                     new_bitrate,
1309                                     stream.dst_params.frame_rate,
1310                                 ) {
1311                                     error!(
1312                                         "failed to dynamically request peak bitrate change: {}",
1313                                         e
1314                                     );
1315                                     return Err(VideoError::InvalidOperation);
1316                                 }
1317                             }
1318                             stream.dst_bitrate = new_bitrate;
1319                         }
1320                     }
1321                     // Trying to set the peak bitrate while in constant mode. This is not
1322                     // an error, just ignored.
1323                     Bitrate::CBR { .. } => {}
1324                 }
1325             }
1326             CtrlVal::Profile(profile) => {
1327                 if stream.dst_profile != profile {
1328                     if resources_queued {
1329                         error!("set control called for profile but already encoding.");
1330                         return Err(VideoError::InvalidOperation);
1331                     }
1332                     let format = stream
1333                         .dst_params
1334                         .format
1335                         .ok_or(VideoError::InvalidArgument)?;
1336                     if format != profile.to_format() {
1337                         error!(
1338                             "specified profile does not correspond to the selected format ({})",
1339                             format
1340                         );
1341                         return Err(VideoError::InvalidOperation);
1342                     }
1343                     stream.dst_profile = profile;
1344                     recreate_session = true;
1345                 }
1346             }
1347             CtrlVal::Level(level) => {
1348                 if stream.dst_h264_level != Some(level) {
1349                     if resources_queued {
1350                         error!("set control called for level but already encoding.");
1351                         return Err(VideoError::InvalidOperation);
1352                     }
1353                     let format = stream
1354                         .dst_params
1355                         .format
1356                         .ok_or(VideoError::InvalidArgument)?;
1357                     if format != Format::H264 {
1358                         error!(
1359                             "set control called for level but format is not H264 ({})",
1360                             format
1361                         );
1362                         return Err(VideoError::InvalidOperation);
1363                     }
1364                     stream.dst_h264_level = Some(level);
1365                     recreate_session = true;
1366                 }
1367             }
1368             CtrlVal::ForceKeyframe => {
1369                 stream.force_keyframe = true;
1370             }
1371             CtrlVal::PrependSpsPpsToIdr(prepend_sps_pps_to_idr) => {
1372                 // Prepending SPS and PPS to IDR is always enabled in the libvda backend,
1373                 // disabling it will always fail.
1374                 // TODO (b/161495502): account for other backends
1375                 if !prepend_sps_pps_to_idr {
1376                     return Err(VideoError::InvalidOperation);
1377                 }
1378             }
1379         }
1380 
1381         // We can safely recreate the encoder session if no resources were queued yet.
1382         if recreate_session && stream.encoder_session.is_some() {
1383             stream.clear_encode_session(wait_ctx)?;
1384             stream.set_encode_session(&mut self.encoder, wait_ctx)?;
1385         }
1386 
1387         Ok(VideoCmdResponseType::Sync(CmdResponse::SetControl))
1388     }
1389 }
1390 
1391 impl<T: Encoder> Device for EncoderDevice<T> {
process_cmd( &mut self, req: VideoCmd, wait_ctx: &WaitContext<Token>, ) -> ( VideoCmdResponseType, Option<(u32, Vec<VideoEvtResponseType>)>, )1392     fn process_cmd(
1393         &mut self,
1394         req: VideoCmd,
1395         wait_ctx: &WaitContext<Token>,
1396     ) -> (
1397         VideoCmdResponseType,
1398         Option<(u32, Vec<VideoEvtResponseType>)>,
1399     ) {
1400         let mut event_ret = None;
1401         let cmd_response = match req {
1402             VideoCmd::QueryCapability { queue_type } => self.query_capabilities(queue_type),
1403             VideoCmd::StreamCreate {
1404                 stream_id,
1405                 coded_format: desired_format,
1406                 input_resource_type,
1407                 output_resource_type,
1408             } => self.stream_create(
1409                 stream_id,
1410                 desired_format,
1411                 input_resource_type,
1412                 output_resource_type,
1413             ),
1414             VideoCmd::StreamDestroy { stream_id } => self.stream_destroy(stream_id),
1415             VideoCmd::StreamDrain { stream_id } => self.stream_drain(stream_id),
1416             VideoCmd::ResourceCreate {
1417                 stream_id,
1418                 queue_type,
1419                 resource_id,
1420                 plane_offsets,
1421                 plane_entries,
1422             } => self.resource_create(
1423                 wait_ctx,
1424                 stream_id,
1425                 queue_type,
1426                 resource_id,
1427                 plane_offsets,
1428                 plane_entries,
1429             ),
1430             VideoCmd::ResourceQueue {
1431                 stream_id,
1432                 queue_type,
1433                 resource_id,
1434                 timestamp,
1435                 data_sizes,
1436             } => {
1437                 let resp =
1438                     self.resource_queue(stream_id, queue_type, resource_id, timestamp, data_sizes);
1439 
1440                 if resp.is_ok() && queue_type == QueueType::Output {
1441                     if let Some(stream) = self.streams.get_mut(&stream_id) {
1442                         // If we have a flush pending, add the response for dequeueing the EOS
1443                         // buffer.
1444                         if stream.eos_manager.client_awaits_eos {
1445                             info!(
1446                                 "stream {}: using queued buffer as EOS for pending flush",
1447                                 stream_id
1448                             );
1449                             event_ret = match stream.eos_manager.try_complete_eos(vec![]) {
1450                                 Some(eos_resps) => Some((stream_id, eos_resps)),
1451                                 None => {
1452                                     error!("stream {}: try_get_eos_buffer() should have returned a valid response. This is a bug.", stream_id);
1453                                     Some((
1454                                         stream_id,
1455                                         vec![VideoEvtResponseType::Event(VideoEvt {
1456                                             typ: EvtType::Error,
1457                                             stream_id,
1458                                         })],
1459                                     ))
1460                                 }
1461                             };
1462                         }
1463                     } else {
1464                         error!(
1465                             "stream {}: the stream ID should be valid here. This is a bug.",
1466                             stream_id
1467                         );
1468                         event_ret = Some((
1469                             stream_id,
1470                             vec![VideoEvtResponseType::Event(VideoEvt {
1471                                 typ: EvtType::Error,
1472                                 stream_id,
1473                             })],
1474                         ));
1475                     }
1476                 }
1477 
1478                 resp
1479             }
1480             VideoCmd::ResourceDestroyAll { stream_id, .. } => self.resource_destroy_all(stream_id),
1481             VideoCmd::QueueClear {
1482                 stream_id,
1483                 queue_type,
1484             } => self.queue_clear(stream_id, queue_type),
1485             VideoCmd::GetParams {
1486                 stream_id,
1487                 queue_type,
1488                 is_ext,
1489             } => self.get_params(stream_id, queue_type, is_ext),
1490             VideoCmd::SetParams {
1491                 stream_id,
1492                 queue_type,
1493                 params:
1494                     Params {
1495                         format,
1496                         frame_width,
1497                         frame_height,
1498                         frame_rate,
1499                         plane_formats,
1500                         ..
1501                     },
1502                 is_ext,
1503             } => self.set_params(
1504                 wait_ctx,
1505                 stream_id,
1506                 queue_type,
1507                 format,
1508                 frame_width,
1509                 frame_height,
1510                 frame_rate,
1511                 plane_formats,
1512                 is_ext,
1513             ),
1514             VideoCmd::QueryControl { query_ctrl_type } => self.query_control(query_ctrl_type),
1515             VideoCmd::GetControl {
1516                 stream_id,
1517                 ctrl_type,
1518             } => self.get_control(stream_id, ctrl_type),
1519             VideoCmd::SetControl {
1520                 stream_id,
1521                 ctrl_val,
1522             } => self.set_control(wait_ctx, stream_id, ctrl_val),
1523         };
1524         let cmd_ret = match cmd_response {
1525             Ok(r) => r,
1526             Err(e) => {
1527                 error!("returning error response: {}", &e);
1528                 VideoCmdResponseType::Sync(e.into())
1529             }
1530         };
1531         (cmd_ret, event_ret)
1532     }
1533 
process_event( &mut self, _desc_map: &mut AsyncCmdDescMap, stream_id: u32, ) -> Option<Vec<VideoEvtResponseType>>1534     fn process_event(
1535         &mut self,
1536         _desc_map: &mut AsyncCmdDescMap,
1537         stream_id: u32,
1538     ) -> Option<Vec<VideoEvtResponseType>> {
1539         let stream = match self.streams.get_mut(&stream_id) {
1540             Some(s) => s,
1541             None => {
1542                 // TODO: remove fd from poll context?
1543                 error!("Received event for missing stream id {}", stream_id);
1544                 return None;
1545             }
1546         };
1547 
1548         let encoder_session = match stream.encoder_session {
1549             Some(ref mut s) => s,
1550             None => {
1551                 error!(
1552                     "Received event for missing encoder session of stream id {}",
1553                     stream_id
1554                 );
1555                 return None;
1556             }
1557         };
1558 
1559         let event = match encoder_session.read_event() {
1560             Ok(e) => e,
1561             Err(e) => {
1562                 error!("Failed to read event for stream id {}: {}", stream_id, e);
1563                 return None;
1564             }
1565         };
1566 
1567         match event {
1568             EncoderEvent::RequireInputBuffers {
1569                 input_count,
1570                 input_frame_width,
1571                 input_frame_height,
1572                 output_buffer_size,
1573             } => stream.require_input_buffers(
1574                 input_count,
1575                 input_frame_width,
1576                 input_frame_height,
1577                 output_buffer_size,
1578             ),
1579             EncoderEvent::ProcessedInputBuffer {
1580                 id: input_buffer_id,
1581             } => stream.processed_input_buffer(input_buffer_id),
1582             EncoderEvent::ProcessedOutputBuffer {
1583                 id: output_buffer_id,
1584                 bytesused,
1585                 keyframe,
1586                 timestamp,
1587             } => stream.processed_output_buffer(output_buffer_id, bytesused, keyframe, timestamp),
1588             EncoderEvent::FlushResponse { flush_done } => stream.flush_response(flush_done),
1589             EncoderEvent::NotifyError { error } => stream.notify_error(error),
1590         }
1591     }
1592 }
1593