1 // Copyright 2024 Google LLC
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 use crate::codecs::Decoder;
16 use crate::codecs::DecoderConfig;
17 use crate::decoder::CodecChoice;
18 use crate::decoder::GridImageHelper;
19 use crate::image::Image;
20 use crate::image::YuvRange;
21 use crate::internal_utils::pixels::*;
22 use crate::internal_utils::stream::IStream;
23 use crate::internal_utils::*;
24 use crate::*;
25
26 use ndk_sys::bindings::*;
27
28 use std::ffi::CString;
29 use std::os::raw::c_char;
30 use std::ptr;
31
32 #[cfg(android_soong)]
33 include!(concat!(env!("OUT_DIR"), "/mediaimage2_bindgen.rs"));
34
35 // This sub-module is used by non-soong Android builds. It contains the bindings necessary to
36 // infer the YUV format that comes out of MediaCodec. The C struct source is here:
37 // https://cs.android.com/android/platform/superproject/main/+/main:frameworks/native/headers/media_plugin/media/hardware/VideoAPI.h;l=60;drc=a68f3a49e36e043b1640fe85010b0005d1bdb875
38 #[allow(non_camel_case_types, non_snake_case, unused)]
39 #[cfg(not(android_soong))]
40 mod android_soong_placeholder {
41 #[repr(C)]
42 #[derive(Clone, Copy)]
43 pub(crate) struct android_MediaImage2_PlaneInfo {
44 pub mOffset: u32,
45 pub mColInc: i32,
46 pub mRowInc: i32,
47 pub mHorizSubsampling: u32,
48 pub mVertSubsampling: u32,
49 }
50
51 #[derive(Clone, Copy)]
52 #[repr(C)]
53 pub(crate) struct android_MediaImage2 {
54 pub mType: u32,
55 pub mNumPlanes: u32,
56 pub mWidth: u32,
57 pub mHeight: u32,
58 pub mBitDepth: u32,
59 pub mBitDepthAllocated: u32,
60 pub mPlane: [android_MediaImage2_PlaneInfo; 4usize],
61 }
62
63 #[allow(non_upper_case_globals)]
64 pub(crate) const android_MediaImage2_Type_MEDIA_IMAGE_TYPE_YUV: u32 = 1;
65 }
66
67 #[cfg(not(android_soong))]
68 use android_soong_placeholder::*;
69
70 #[derive(Debug)]
71 struct MediaFormat {
72 format: *mut AMediaFormat,
73 }
74
75 macro_rules! c_str {
76 ($var: ident, $var_tmp:ident, $str:expr) => {
77 let $var_tmp = CString::new($str).unwrap();
78 let $var = $var_tmp.as_ptr();
79 };
80 }
81
82 #[derive(Debug, Default)]
83 struct PlaneInfo {
84 color_format: AndroidMediaCodecOutputColorFormat,
85 offset: [isize; 3],
86 row_stride: [u32; 3],
87 column_stride: [u32; 3],
88 }
89
90 impl PlaneInfo {
pixel_format(&self) -> PixelFormat91 fn pixel_format(&self) -> PixelFormat {
92 match self.color_format {
93 AndroidMediaCodecOutputColorFormat::P010 => PixelFormat::AndroidP010,
94 AndroidMediaCodecOutputColorFormat::Yuv420Flexible => {
95 let u_before_v = self.offset[2] == self.offset[1] + 1;
96 let v_before_u = self.offset[1] == self.offset[2] + 1;
97 let is_nv_format = self.column_stride == [1, 2, 2] && (u_before_v || v_before_u);
98 match (is_nv_format, u_before_v) {
99 (true, true) => PixelFormat::AndroidNv12,
100 (true, false) => PixelFormat::AndroidNv21,
101 (false, _) => PixelFormat::Yuv420,
102 }
103 }
104 }
105 }
106
depth(&self) -> u8107 fn depth(&self) -> u8 {
108 match self.color_format {
109 AndroidMediaCodecOutputColorFormat::P010 => 16,
110 AndroidMediaCodecOutputColorFormat::Yuv420Flexible => 8,
111 }
112 }
113 }
114
115 impl MediaFormat {
116 // These constants are documented in
117 // https://developer.android.com/reference/android/media/MediaFormat
118 const COLOR_RANGE_LIMITED: i32 = 2;
119
120 const COLOR_STANDARD_BT709: i32 = 1;
121 const COLOR_STANDARD_BT601_PAL: i32 = 2;
122 const COLOR_STANDARD_BT601_NTSC: i32 = 4;
123 const COLOR_STANDARD_BT2020: i32 = 6;
124
125 const COLOR_TRANSFER_LINEAR: i32 = 1;
126 const COLOR_TRANSFER_SDR_VIDEO: i32 = 3;
127 const COLOR_TRANSFER_HLG: i32 = 7;
128
get_i32(&self, key: *const c_char) -> Option<i32>129 fn get_i32(&self, key: *const c_char) -> Option<i32> {
130 let mut value: i32 = 0;
131 match unsafe { AMediaFormat_getInt32(self.format, key, &mut value as *mut _) } {
132 true => Some(value),
133 false => None,
134 }
135 }
136
get_i32_from_str(&self, key: &str) -> Option<i32>137 fn get_i32_from_str(&self, key: &str) -> Option<i32> {
138 c_str!(key_str, key_str_tmp, key);
139 self.get_i32(key_str)
140 }
141
width(&self) -> AvifResult<i32>142 fn width(&self) -> AvifResult<i32> {
143 self.get_i32(unsafe { AMEDIAFORMAT_KEY_WIDTH })
144 .ok_or(AvifError::UnknownError("".into()))
145 }
146
height(&self) -> AvifResult<i32>147 fn height(&self) -> AvifResult<i32> {
148 self.get_i32(unsafe { AMEDIAFORMAT_KEY_HEIGHT })
149 .ok_or(AvifError::UnknownError("".into()))
150 }
151
slice_height(&self) -> AvifResult<i32>152 fn slice_height(&self) -> AvifResult<i32> {
153 self.get_i32(unsafe { AMEDIAFORMAT_KEY_SLICE_HEIGHT })
154 .ok_or(AvifError::UnknownError("".into()))
155 }
156
stride(&self) -> AvifResult<i32>157 fn stride(&self) -> AvifResult<i32> {
158 self.get_i32(unsafe { AMEDIAFORMAT_KEY_STRIDE })
159 .ok_or(AvifError::UnknownError("".into()))
160 }
161
color_format(&self) -> AvifResult<i32>162 fn color_format(&self) -> AvifResult<i32> {
163 self.get_i32(unsafe { AMEDIAFORMAT_KEY_COLOR_FORMAT })
164 .ok_or(AvifError::UnknownError("".into()))
165 }
166
color_range(&self) -> YuvRange167 fn color_range(&self) -> YuvRange {
168 // color-range is documented but isn't exposed as a constant in the NDK:
169 // https://developer.android.com/reference/android/media/MediaFormat#KEY_COLOR_RANGE
170 let color_range = self
171 .get_i32_from_str("color-range")
172 .unwrap_or(Self::COLOR_RANGE_LIMITED);
173 if color_range == Self::COLOR_RANGE_LIMITED {
174 YuvRange::Limited
175 } else {
176 YuvRange::Full
177 }
178 }
179
color_primaries(&self) -> ColorPrimaries180 fn color_primaries(&self) -> ColorPrimaries {
181 // color-standard is documented but isn't exposed as a constant in the NDK:
182 // https://developer.android.com/reference/android/media/MediaFormat#KEY_COLOR_STANDARD
183 let color_standard = self.get_i32_from_str("color-standard").unwrap_or(-1);
184 match color_standard {
185 Self::COLOR_STANDARD_BT709 => ColorPrimaries::Bt709,
186 Self::COLOR_STANDARD_BT2020 => ColorPrimaries::Bt2020,
187 Self::COLOR_STANDARD_BT601_PAL | Self::COLOR_STANDARD_BT601_NTSC => {
188 ColorPrimaries::Bt601
189 }
190 _ => ColorPrimaries::Unspecified,
191 }
192 }
193
transfer_characteristics(&self) -> TransferCharacteristics194 fn transfer_characteristics(&self) -> TransferCharacteristics {
195 // color-transfer is documented but isn't exposed as a constant in the NDK:
196 // https://developer.android.com/reference/android/media/MediaFormat#KEY_COLOR_TRANSFER
197 match self.get_i32_from_str("color-transfer").unwrap_or(-1) {
198 Self::COLOR_TRANSFER_LINEAR => TransferCharacteristics::Linear,
199 Self::COLOR_TRANSFER_HLG => TransferCharacteristics::Hlg,
200 Self::COLOR_TRANSFER_SDR_VIDEO => TransferCharacteristics::Bt601,
201 _ => TransferCharacteristics::Unspecified,
202 }
203 }
204
guess_plane_info(&self) -> AvifResult<PlaneInfo>205 fn guess_plane_info(&self) -> AvifResult<PlaneInfo> {
206 let height = self.height()?;
207 let slice_height = self.slice_height().unwrap_or(height);
208 let stride = self.stride()?;
209 let color_format: AndroidMediaCodecOutputColorFormat = self.color_format()?.into();
210 let mut plane_info = PlaneInfo {
211 color_format,
212 ..Default::default()
213 };
214 match color_format {
215 AndroidMediaCodecOutputColorFormat::P010 => {
216 plane_info.row_stride = [
217 u32_from_i32(stride)?,
218 u32_from_i32(stride)?,
219 0, // V plane is not used for P010.
220 ];
221 plane_info.column_stride = [
222 2, 2, 0, // V plane is not used for P010.
223 ];
224 plane_info.offset = [
225 0,
226 isize_from_i32(stride * slice_height)?,
227 0, // V plane is not used for P010.
228 ];
229 }
230 AndroidMediaCodecOutputColorFormat::Yuv420Flexible => {
231 plane_info.row_stride = [
232 u32_from_i32(stride)?,
233 u32_from_i32((stride + 1) / 2)?,
234 u32_from_i32((stride + 1) / 2)?,
235 ];
236 plane_info.column_stride = [1, 1, 1];
237 plane_info.offset[0] = 0;
238 plane_info.offset[1] = isize_from_i32(stride * slice_height)?;
239 let u_plane_size = isize_from_i32(((stride + 1) / 2) * ((height + 1) / 2))?;
240 // When color format is YUV_420_FLEXIBLE, the V plane comes before the U plane.
241 plane_info.offset[2] = plane_info.offset[1] - u_plane_size;
242 }
243 }
244 Ok(plane_info)
245 }
246
get_plane_info(&self) -> AvifResult<PlaneInfo>247 fn get_plane_info(&self) -> AvifResult<PlaneInfo> {
248 c_str!(key_str, key_str_tmp, "image-data");
249 let mut data: *mut std::ffi::c_void = ptr::null_mut();
250 let mut size: usize = 0;
251 if !unsafe {
252 AMediaFormat_getBuffer(
253 self.format,
254 key_str,
255 &mut data as *mut _,
256 &mut size as *mut _,
257 )
258 } {
259 return self.guess_plane_info();
260 }
261 if size != std::mem::size_of::<android_MediaImage2>() {
262 return self.guess_plane_info();
263 }
264 let image_data = unsafe { *(data as *const android_MediaImage2) };
265 if image_data.mType != android_MediaImage2_Type_MEDIA_IMAGE_TYPE_YUV {
266 return self.guess_plane_info();
267 }
268 let planes = unsafe { ptr::read_unaligned(ptr::addr_of!(image_data.mPlane)) };
269 let mut plane_info = PlaneInfo {
270 color_format: self.color_format()?.into(),
271 ..Default::default()
272 };
273 // Clippy suggests using an iterator with an enumerator which does not seem more readable
274 // than using explicit indices.
275 #[allow(clippy::needless_range_loop)]
276 for plane_index in 0usize..3 {
277 plane_info.offset[plane_index] = isize_from_u32(planes[plane_index].mOffset)?;
278 plane_info.row_stride[plane_index] = u32_from_i32(planes[plane_index].mRowInc)?;
279 plane_info.column_stride[plane_index] = u32_from_i32(planes[plane_index].mColInc)?;
280 }
281 Ok(plane_info)
282 }
283 }
284
285 enum CodecInitializer {
286 ByName(String),
287 ByMimeType(String),
288 }
289
290 #[cfg(android_soong)]
prefer_hardware_decoder(config: &DecoderConfig) -> bool291 fn prefer_hardware_decoder(config: &DecoderConfig) -> bool {
292 let prefer_hw = rustutils::system_properties::read_bool(
293 "media.stagefright.thumbnail.prefer_hw_codecs",
294 false,
295 )
296 .unwrap_or(false);
297 if config.codec_config.is_avif() {
298 // We will return true when all of the below conditions are true:
299 // 1) prefer_hw is true.
300 // 2) category is not Alpha and category is not Gainmap. We do not prefer hardware for
301 // decoding these categories since they generally tend to be monochrome images and using
302 // hardware for that is unreliable.
303 // 3) profile is 0. As of Sep 2024, there are no AV1 hardware decoders that support
304 // anything other than profile 0.
305 prefer_hw
306 && config.category != Category::Alpha
307 && config.category != Category::Gainmap
308 && config.codec_config.profile() == 0
309 } else {
310 // We will return true when one of the following conditions are true:
311 // 1) prefer_hw is true.
312 // 2) depth is greater than 8. As of Nov 2024, the default HEVC software decoder on Android
313 // only supports 8-bit images.
314 prefer_hw || config.depth > 8
315 }
316 }
317
get_codec_initializers(config: &DecoderConfig) -> Vec<CodecInitializer>318 fn get_codec_initializers(config: &DecoderConfig) -> Vec<CodecInitializer> {
319 #[cfg(android_soong)]
320 {
321 // Use a specific decoder if it is requested.
322 if let Ok(Some(decoder)) =
323 rustutils::system_properties::read("media.crabbyavif.debug.decoder")
324 {
325 if !decoder.is_empty() {
326 return vec![CodecInitializer::ByName(decoder)];
327 }
328 }
329 }
330 let dav1d = String::from("c2.android.av1-dav1d.decoder");
331 let gav1 = String::from("c2.android.av1.decoder");
332 let hevc = String::from("c2.android.hevc.decoder");
333 // As of Sep 2024, c2.android.av1.decoder is the only known decoder to support 12-bit AV1. So
334 // prefer that for 12 bit images.
335 let prefer_gav1 = config.depth == 12;
336 let is_avif = config.codec_config.is_avif();
337 let mime_type = if is_avif { MediaCodec::AV1_MIME } else { MediaCodec::HEVC_MIME };
338 let prefer_hw = false;
339 #[cfg(android_soong)]
340 let prefer_hw = prefer_hardware_decoder(config);
341 match (prefer_hw, is_avif, prefer_gav1) {
342 (true, false, _) => vec![
343 CodecInitializer::ByMimeType(mime_type.to_string()),
344 CodecInitializer::ByName(hevc),
345 ],
346 (false, false, _) => vec![
347 CodecInitializer::ByName(hevc),
348 CodecInitializer::ByMimeType(mime_type.to_string()),
349 ],
350 (true, true, true) => vec![
351 CodecInitializer::ByName(gav1),
352 CodecInitializer::ByMimeType(mime_type.to_string()),
353 CodecInitializer::ByName(dav1d),
354 ],
355 (true, true, false) => vec![
356 CodecInitializer::ByMimeType(mime_type.to_string()),
357 CodecInitializer::ByName(dav1d),
358 CodecInitializer::ByName(gav1),
359 ],
360 (false, true, true) => vec![
361 CodecInitializer::ByName(gav1),
362 CodecInitializer::ByName(dav1d),
363 CodecInitializer::ByMimeType(mime_type.to_string()),
364 ],
365 (false, true, false) => vec![
366 CodecInitializer::ByName(dav1d),
367 CodecInitializer::ByName(gav1),
368 CodecInitializer::ByMimeType(mime_type.to_string()),
369 ],
370 }
371 }
372
373 #[derive(Default)]
374 pub struct MediaCodec {
375 codec: Option<*mut AMediaCodec>,
376 codec_index: usize,
377 format: Option<MediaFormat>,
378 output_buffer_index: Option<usize>,
379 config: Option<DecoderConfig>,
380 codec_initializers: Vec<CodecInitializer>,
381 }
382
383 impl MediaCodec {
384 const AV1_MIME: &str = "video/av01";
385 const HEVC_MIME: &str = "video/hevc";
386 const MAX_RETRIES: u32 = 100;
387 const TIMEOUT: u32 = 10000;
388
initialize_impl(&mut self, low_latency: bool) -> AvifResult<()>389 fn initialize_impl(&mut self, low_latency: bool) -> AvifResult<()> {
390 let config = self.config.unwrap_ref();
391 if self.codec_index >= self.codec_initializers.len() {
392 return Err(AvifError::NoCodecAvailable);
393 }
394 let format = unsafe { AMediaFormat_new() };
395 if format.is_null() {
396 return Err(AvifError::UnknownError("".into()));
397 }
398 c_str!(
399 mime_type,
400 mime_type_tmp,
401 if config.codec_config.is_avif() { Self::AV1_MIME } else { Self::HEVC_MIME }
402 );
403 unsafe {
404 AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, mime_type);
405 AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, i32_from_u32(config.width)?);
406 AMediaFormat_setInt32(
407 format,
408 AMEDIAFORMAT_KEY_HEIGHT,
409 i32_from_u32(config.height)?,
410 );
411 AMediaFormat_setInt32(
412 format,
413 AMEDIAFORMAT_KEY_COLOR_FORMAT,
414 if config.depth == 8 {
415 // For 8-bit images, always use Yuv420Flexible.
416 AndroidMediaCodecOutputColorFormat::Yuv420Flexible
417 } else {
418 // For all other images, use whatever format is requested.
419 config.android_mediacodec_output_color_format
420 } as i32,
421 );
422 if low_latency {
423 // low-latency is documented but isn't exposed as a constant in the NDK:
424 // https://developer.android.com/reference/android/media/MediaFormat#KEY_LOW_LATENCY
425 c_str!(low_latency_str, low_latency_tmp, "low-latency");
426 AMediaFormat_setInt32(format, low_latency_str, 1);
427 }
428 AMediaFormat_setInt32(
429 format,
430 AMEDIAFORMAT_KEY_MAX_INPUT_SIZE,
431 i32_from_usize(config.max_input_size)?,
432 );
433 let codec_specific_data = config.codec_config.raw_data();
434 if !codec_specific_data.is_empty() {
435 AMediaFormat_setBuffer(
436 format,
437 AMEDIAFORMAT_KEY_CSD_0,
438 codec_specific_data.as_ptr() as *const _,
439 codec_specific_data.len(),
440 );
441 }
442 }
443
444 let codec = match &self.codec_initializers[self.codec_index] {
445 CodecInitializer::ByName(name) => {
446 c_str!(codec_name, codec_name_tmp, name.as_str());
447 unsafe { AMediaCodec_createCodecByName(codec_name) }
448 }
449 CodecInitializer::ByMimeType(mime_type) => {
450 c_str!(codec_mime, codec_mime_tmp, mime_type.as_str());
451 unsafe { AMediaCodec_createDecoderByType(codec_mime) }
452 }
453 };
454 if codec.is_null() {
455 unsafe { AMediaFormat_delete(format) };
456 return Err(AvifError::NoCodecAvailable);
457 }
458 let status =
459 unsafe { AMediaCodec_configure(codec, format, ptr::null_mut(), ptr::null_mut(), 0) };
460 if status != media_status_t_AMEDIA_OK {
461 unsafe {
462 AMediaCodec_delete(codec);
463 AMediaFormat_delete(format);
464 }
465 return Err(AvifError::NoCodecAvailable);
466 }
467 let status = unsafe { AMediaCodec_start(codec) };
468 if status != media_status_t_AMEDIA_OK {
469 unsafe {
470 AMediaCodec_delete(codec);
471 AMediaFormat_delete(format);
472 }
473 return Err(AvifError::NoCodecAvailable);
474 }
475 self.codec = Some(codec);
476 Ok(())
477 }
478
output_buffer_to_image( &self, buffer: *mut u8, image: &mut Image, category: Category, ) -> AvifResult<()>479 fn output_buffer_to_image(
480 &self,
481 buffer: *mut u8,
482 image: &mut Image,
483 category: Category,
484 ) -> AvifResult<()> {
485 if self.format.is_none() {
486 return Err(AvifError::UnknownError("format is none".into()));
487 }
488 let format = self.format.unwrap_ref();
489 image.width = format.width()? as u32;
490 image.height = format.height()? as u32;
491 image.yuv_range = format.color_range();
492 let plane_info = format.get_plane_info()?;
493 image.depth = plane_info.depth();
494 image.yuv_format = plane_info.pixel_format();
495 match category {
496 Category::Alpha => {
497 image.row_bytes[3] = plane_info.row_stride[0];
498 image.planes[3] = Some(Pixels::from_raw_pointer(
499 unsafe { buffer.offset(plane_info.offset[0]) },
500 image.depth as u32,
501 image.height,
502 image.row_bytes[3],
503 )?);
504 }
505 _ => {
506 image.chroma_sample_position = ChromaSamplePosition::Unknown;
507 image.color_primaries = format.color_primaries();
508 image.transfer_characteristics = format.transfer_characteristics();
509 // MediaCodec does not expose matrix coefficients. Try to infer that based on color
510 // primaries to get the most accurate color conversion possible.
511 image.matrix_coefficients = match image.color_primaries {
512 ColorPrimaries::Bt601 => MatrixCoefficients::Bt601,
513 ColorPrimaries::Bt709 => MatrixCoefficients::Bt709,
514 ColorPrimaries::Bt2020 => MatrixCoefficients::Bt2020Ncl,
515 _ => MatrixCoefficients::Unspecified,
516 };
517
518 for i in 0usize..3 {
519 if i == 2
520 && matches!(
521 image.yuv_format,
522 PixelFormat::AndroidP010
523 | PixelFormat::AndroidNv12
524 | PixelFormat::AndroidNv21
525 )
526 {
527 // V plane is not needed for these formats.
528 break;
529 }
530 image.row_bytes[i] = plane_info.row_stride[i];
531 let plane_height = if i == 0 { image.height } else { (image.height + 1) / 2 };
532 image.planes[i] = Some(Pixels::from_raw_pointer(
533 unsafe { buffer.offset(plane_info.offset[i]) },
534 image.depth as u32,
535 plane_height,
536 image.row_bytes[i],
537 )?);
538 }
539 }
540 }
541 Ok(())
542 }
543
enqueue_payload(&self, input_index: isize, payload: &[u8], flags: u32) -> AvifResult<()>544 fn enqueue_payload(&self, input_index: isize, payload: &[u8], flags: u32) -> AvifResult<()> {
545 let codec = self.codec.unwrap();
546 let mut input_buffer_size: usize = 0;
547 let input_buffer = unsafe {
548 AMediaCodec_getInputBuffer(
549 codec,
550 input_index as usize,
551 &mut input_buffer_size as *mut _,
552 )
553 };
554 if input_buffer.is_null() {
555 return Err(AvifError::UnknownError(format!(
556 "input buffer at index {input_index} was null"
557 )));
558 }
559 let hevc_whole_nal_units = self.hevc_whole_nal_units(payload)?;
560 let codec_payload = match &hevc_whole_nal_units {
561 Some(hevc_payload) => hevc_payload,
562 None => payload,
563 };
564 if input_buffer_size < codec_payload.len() {
565 return Err(AvifError::UnknownError(format!(
566 "input buffer (size {input_buffer_size}) was not big enough. required size: {}",
567 codec_payload.len()
568 )));
569 }
570 unsafe {
571 ptr::copy_nonoverlapping(codec_payload.as_ptr(), input_buffer, codec_payload.len());
572
573 if AMediaCodec_queueInputBuffer(
574 codec,
575 usize_from_isize(input_index)?,
576 /*offset=*/ 0,
577 codec_payload.len(),
578 /*pts=*/ 0,
579 flags,
580 ) != media_status_t_AMEDIA_OK
581 {
582 return Err(AvifError::UnknownError("".into()));
583 }
584 }
585 Ok(())
586 }
587
get_next_image_impl( &mut self, payload: &[u8], _spatial_id: u8, image: &mut Image, category: Category, ) -> AvifResult<()>588 fn get_next_image_impl(
589 &mut self,
590 payload: &[u8],
591 _spatial_id: u8,
592 image: &mut Image,
593 category: Category,
594 ) -> AvifResult<()> {
595 if self.codec.is_none() {
596 self.initialize_impl(/*low_latency=*/ true)?;
597 }
598 let codec = self.codec.unwrap();
599 if self.output_buffer_index.is_some() {
600 // Release any existing output buffer.
601 unsafe {
602 AMediaCodec_releaseOutputBuffer(codec, self.output_buffer_index.unwrap(), false);
603 }
604 }
605 let mut retry_count = 0;
606 unsafe {
607 while retry_count < Self::MAX_RETRIES {
608 retry_count += 1;
609 let input_index = AMediaCodec_dequeueInputBuffer(codec, Self::TIMEOUT as _);
610 if input_index >= 0 {
611 self.enqueue_payload(input_index, payload, 0)?;
612 break;
613 } else if input_index == AMEDIACODEC_INFO_TRY_AGAIN_LATER as isize {
614 continue;
615 } else {
616 return Err(AvifError::UnknownError(format!(
617 "got input index < 0: {input_index}"
618 )));
619 }
620 }
621 }
622 let mut buffer: Option<*mut u8> = None;
623 let mut buffer_size: usize = 0;
624 let mut buffer_info = AMediaCodecBufferInfo::default();
625 retry_count = 0;
626 while retry_count < Self::MAX_RETRIES {
627 retry_count += 1;
628 unsafe {
629 let output_index = AMediaCodec_dequeueOutputBuffer(
630 codec,
631 &mut buffer_info as *mut _,
632 Self::TIMEOUT as _,
633 );
634 if output_index >= 0 {
635 let output_buffer = AMediaCodec_getOutputBuffer(
636 codec,
637 usize_from_isize(output_index)?,
638 &mut buffer_size as *mut _,
639 );
640 if output_buffer.is_null() {
641 return Err(AvifError::UnknownError("output buffer is null".into()));
642 }
643 buffer = Some(output_buffer);
644 self.output_buffer_index = Some(usize_from_isize(output_index)?);
645 break;
646 } else if output_index == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED as isize {
647 continue;
648 } else if output_index == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED as isize {
649 let format = AMediaCodec_getOutputFormat(codec);
650 if format.is_null() {
651 return Err(AvifError::UnknownError("output format was null".into()));
652 }
653 self.format = Some(MediaFormat { format });
654 continue;
655 } else if output_index == AMEDIACODEC_INFO_TRY_AGAIN_LATER as isize {
656 continue;
657 } else {
658 return Err(AvifError::UnknownError(format!(
659 "mediacodec dequeue_output_buffer failed: {output_index}"
660 )));
661 }
662 }
663 }
664 if buffer.is_none() {
665 return Err(AvifError::UnknownError(
666 "did not get buffer from mediacodec".into(),
667 ));
668 }
669 self.output_buffer_to_image(buffer.unwrap(), image, category)?;
670 Ok(())
671 }
672
get_next_image_grid_impl( &mut self, payloads: &[Vec<u8>], grid_image_helper: &mut GridImageHelper, ) -> AvifResult<()>673 fn get_next_image_grid_impl(
674 &mut self,
675 payloads: &[Vec<u8>],
676 grid_image_helper: &mut GridImageHelper,
677 ) -> AvifResult<()> {
678 if self.codec.is_none() {
679 self.initialize_impl(/*low_latency=*/ false)?;
680 }
681 let codec = self.codec.unwrap();
682 let mut retry_count = 0;
683 let mut payloads_iter = payloads.iter().peekable();
684 unsafe {
685 while !grid_image_helper.is_grid_complete()? {
686 // Queue as many inputs as we possibly can, then block on dequeuing outputs. After
687 // getting each output, come back and queue the inputs again to keep the decoder as
688 // busy as possible.
689 while payloads_iter.peek().is_some() {
690 let input_index = AMediaCodec_dequeueInputBuffer(codec, 0);
691 if input_index < 0 {
692 if retry_count >= Self::MAX_RETRIES {
693 return Err(AvifError::UnknownError("max retries exceeded".into()));
694 }
695 break;
696 }
697 let payload = payloads_iter.next().unwrap();
698 self.enqueue_payload(
699 input_index,
700 payload,
701 if payloads_iter.peek().is_some() {
702 0
703 } else {
704 AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM as u32
705 },
706 )?;
707 }
708 loop {
709 let mut buffer_info = AMediaCodecBufferInfo::default();
710 let output_index = AMediaCodec_dequeueOutputBuffer(
711 codec,
712 &mut buffer_info as *mut _,
713 Self::TIMEOUT as _,
714 );
715 if output_index == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED as isize {
716 continue;
717 } else if output_index == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED as isize {
718 let format = AMediaCodec_getOutputFormat(codec);
719 if format.is_null() {
720 return Err(AvifError::UnknownError("output format was null".into()));
721 }
722 self.format = Some(MediaFormat { format });
723 continue;
724 } else if output_index == AMEDIACODEC_INFO_TRY_AGAIN_LATER as isize {
725 retry_count += 1;
726 if retry_count >= Self::MAX_RETRIES {
727 return Err(AvifError::UnknownError("max retries exceeded".into()));
728 }
729 break;
730 } else if output_index < 0 {
731 return Err(AvifError::UnknownError("".into()));
732 } else {
733 let mut buffer_size: usize = 0;
734 let output_buffer = AMediaCodec_getOutputBuffer(
735 codec,
736 usize_from_isize(output_index)?,
737 &mut buffer_size as *mut _,
738 );
739 if output_buffer.is_null() {
740 return Err(AvifError::UnknownError("output buffer is null".into()));
741 }
742 let mut cell_image = Image::default();
743 self.output_buffer_to_image(
744 output_buffer,
745 &mut cell_image,
746 grid_image_helper.category,
747 )?;
748 grid_image_helper.copy_from_cell_image(&mut cell_image)?;
749 if !grid_image_helper.is_grid_complete()? {
750 // The last output buffer will be released when the codec is dropped.
751 AMediaCodec_releaseOutputBuffer(codec, output_index as _, false);
752 }
753 break;
754 }
755 }
756 }
757 }
758 Ok(())
759 }
760
drop_impl(&mut self)761 fn drop_impl(&mut self) {
762 if self.codec.is_some() {
763 if self.output_buffer_index.is_some() {
764 unsafe {
765 AMediaCodec_releaseOutputBuffer(
766 self.codec.unwrap(),
767 self.output_buffer_index.unwrap(),
768 false,
769 );
770 }
771 self.output_buffer_index = None;
772 }
773 unsafe {
774 AMediaCodec_stop(self.codec.unwrap());
775 AMediaCodec_delete(self.codec.unwrap());
776 }
777 self.codec = None;
778 }
779 self.format = None;
780 }
781 }
782
783 impl Decoder for MediaCodec {
codec(&self) -> CodecChoice784 fn codec(&self) -> CodecChoice {
785 CodecChoice::MediaCodec
786 }
787
initialize(&mut self, config: &DecoderConfig) -> AvifResult<()>788 fn initialize(&mut self, config: &DecoderConfig) -> AvifResult<()> {
789 self.codec_initializers = get_codec_initializers(config);
790 self.config = Some(config.clone());
791 // Actual codec initialization will be performed in get_next_image since we may try
792 // multiple codecs.
793 Ok(())
794 }
795
get_next_image( &mut self, payload: &[u8], spatial_id: u8, image: &mut Image, category: Category, ) -> AvifResult<()>796 fn get_next_image(
797 &mut self,
798 payload: &[u8],
799 spatial_id: u8,
800 image: &mut Image,
801 category: Category,
802 ) -> AvifResult<()> {
803 while self.codec_index < self.codec_initializers.len() {
804 let res = self.get_next_image_impl(payload, spatial_id, image, category);
805 if res.is_ok() {
806 return Ok(());
807 }
808 // Drop the current codec and try the next one.
809 self.drop_impl();
810 self.codec_index += 1;
811 }
812 Err(AvifError::UnknownError(
813 "all the codecs failed to extract an image".into(),
814 ))
815 }
816
get_next_image_grid( &mut self, payloads: &[Vec<u8>], _spatial_id: u8, grid_image_helper: &mut GridImageHelper, ) -> AvifResult<()>817 fn get_next_image_grid(
818 &mut self,
819 payloads: &[Vec<u8>],
820 _spatial_id: u8,
821 grid_image_helper: &mut GridImageHelper,
822 ) -> AvifResult<()> {
823 while self.codec_index < self.codec_initializers.len() {
824 let res = self.get_next_image_grid_impl(payloads, grid_image_helper);
825 if res.is_ok() {
826 return Ok(());
827 }
828 // Drop the current codec and try the next one.
829 self.drop_impl();
830 self.codec_index += 1;
831 }
832 Err(AvifError::UnknownError(
833 "all the codecs failed to extract an image".into(),
834 ))
835 }
836 }
837
838 impl MediaCodec {
hevc_whole_nal_units(&self, payload: &[u8]) -> AvifResult<Option<Vec<u8>>>839 fn hevc_whole_nal_units(&self, payload: &[u8]) -> AvifResult<Option<Vec<u8>>> {
840 if !self.config.unwrap_ref().codec_config.is_heic() {
841 return Ok(None);
842 }
843 // For HEVC, MediaCodec expects whole NAL units with each unit prefixed with a start code
844 // of "\x00\x00\x00\x01".
845 let nal_length_size = self.config.unwrap_ref().codec_config.nal_length_size() as usize;
846 let mut offset = 0;
847 let mut hevc_payload = Vec::new();
848 while offset < payload.len() {
849 let payload_slice = &payload[offset..];
850 let mut stream = IStream::create(payload_slice);
851 let nal_length = usize_from_u64(stream.read_uxx(nal_length_size as u8)?)?;
852 let nal_unit_end = checked_add!(nal_length, nal_length_size)?;
853 let nal_unit_range = nal_length_size..nal_unit_end;
854 check_slice_range(payload_slice.len(), &nal_unit_range)?;
855 // Start code.
856 hevc_payload.extend_from_slice(&[0, 0, 0, 1]);
857 // NAL Unit.
858 hevc_payload.extend_from_slice(&payload_slice[nal_unit_range]);
859 offset = checked_add!(offset, nal_unit_end)?;
860 }
861 Ok(Some(hevc_payload))
862 }
863 }
864
865 impl Drop for MediaFormat {
drop(&mut self)866 fn drop(&mut self) {
867 unsafe { AMediaFormat_delete(self.format) };
868 }
869 }
870
871 impl Drop for MediaCodec {
drop(&mut self)872 fn drop(&mut self) {
873 self.drop_impl();
874 }
875 }
876