• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "OMXUtils"
19 
20 #include <string.h>
21 
22 #include <android-base/macros.h>
23 #include <media/stagefright/omx/OMXUtils.h>
24 #include <media/stagefright/foundation/ADebug.h>
25 #include <media/stagefright/foundation/AUtils.h>
26 #include <media/stagefright/foundation/MediaDefs.h>
27 #include <media/stagefright/MediaErrors.h>
28 #include <media/hardware/HardwareAPI.h>
29 #include <system/graphics-base.h>
30 
31 namespace android {
32 
StatusFromOMXError(OMX_ERRORTYPE err)33 status_t StatusFromOMXError(OMX_ERRORTYPE err) {
34     switch (err) {
35         case OMX_ErrorNone:
36             return OK;
37         case OMX_ErrorNoMore:
38             return NOT_ENOUGH_DATA;
39         case OMX_ErrorUnsupportedSetting:
40         case OMX_ErrorUnsupportedIndex:
41             return ERROR_UNSUPPORTED; // this is a media specific error
42         case OMX_ErrorBadParameter:
43             return BAD_VALUE;
44         case OMX_ErrorInsufficientResources:
45             return NO_MEMORY;
46         case OMX_ErrorInvalidComponentName:
47         case OMX_ErrorComponentNotFound:
48             return NAME_NOT_FOUND;
49         default:
50             return UNKNOWN_ERROR;
51     }
52 }
53 
54 /**************************************************************************************************/
55 
DescribeColorFormatParams(const DescribeColorFormat2Params & params)56 DescribeColorFormatParams::DescribeColorFormatParams(const DescribeColorFormat2Params &params) {
57     InitOMXParams(this);
58 
59     eColorFormat = params.eColorFormat;
60     nFrameWidth = params.nFrameWidth;
61     nFrameHeight = params.nFrameHeight;
62     nStride = params.nStride;
63     nSliceHeight = params.nSliceHeight;
64     bUsingNativeBuffers = params.bUsingNativeBuffers;
65     // we don't copy media images as this conversion is only used pre-query
66 };
67 
initFromV1(const DescribeColorFormatParams & params)68 void DescribeColorFormat2Params::initFromV1(const DescribeColorFormatParams &params) {
69     InitOMXParams(this);
70 
71     eColorFormat = params.eColorFormat;
72     nFrameWidth = params.nFrameWidth;
73     nFrameHeight = params.nFrameHeight;
74     nStride = params.nStride;
75     nSliceHeight = params.nSliceHeight;
76     bUsingNativeBuffers = params.bUsingNativeBuffers;
77     sMediaImage.initFromV1(params.sMediaImage);
78 };
79 
initFromV1(const MediaImage & image)80 void MediaImage2::initFromV1(const MediaImage &image) {
81     memset(this, 0, sizeof(*this));
82 
83     if (image.mType != MediaImage::MEDIA_IMAGE_TYPE_YUV) {
84         mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
85         return;
86     }
87 
88     for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
89         if (image.mPlane[ix].mHorizSubsampling > INT32_MAX
90                 || image.mPlane[ix].mVertSubsampling > INT32_MAX) {
91             mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
92             return;
93         }
94     }
95 
96     mType = (MediaImage2::Type)image.mType;
97     mNumPlanes = image.mNumPlanes;
98     mWidth = image.mWidth;
99     mHeight = image.mHeight;
100     mBitDepth = image.mBitDepth;
101     mBitDepthAllocated = 8;
102     for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
103         mPlane[ix].mOffset = image.mPlane[ix].mOffset;
104         mPlane[ix].mColInc = image.mPlane[ix].mColInc;
105         mPlane[ix].mRowInc = image.mPlane[ix].mRowInc;
106         mPlane[ix].mHorizSubsampling = (int32_t)image.mPlane[ix].mHorizSubsampling;
107         mPlane[ix].mVertSubsampling = (int32_t)image.mPlane[ix].mVertSubsampling;
108     }
109 }
110 
111 /**************************************************************************************************/
112 
GetComponentRole(bool isEncoder,const char * mime)113 const char *GetComponentRole(bool isEncoder, const char *mime) {
114     struct MimeToRole {
115         const char *mime;
116         const char *decoderRole;
117         const char *encoderRole;
118     };
119 
120     static const MimeToRole kMimeToRole[] = {
121         { MEDIA_MIMETYPE_AUDIO_MPEG,
122             "audio_decoder.mp3", "audio_encoder.mp3" },
123         { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
124             "audio_decoder.mp1", "audio_encoder.mp1" },
125         { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
126             "audio_decoder.mp2", "audio_encoder.mp2" },
127         { MEDIA_MIMETYPE_AUDIO_AMR_NB,
128             "audio_decoder.amrnb", "audio_encoder.amrnb" },
129         { MEDIA_MIMETYPE_AUDIO_AMR_WB,
130             "audio_decoder.amrwb", "audio_encoder.amrwb" },
131         { MEDIA_MIMETYPE_AUDIO_AAC,
132             "audio_decoder.aac", "audio_encoder.aac" },
133         { MEDIA_MIMETYPE_AUDIO_VORBIS,
134             "audio_decoder.vorbis", "audio_encoder.vorbis" },
135         { MEDIA_MIMETYPE_AUDIO_OPUS,
136             "audio_decoder.opus", "audio_encoder.opus" },
137         { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
138             "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
139         { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
140             "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
141         { MEDIA_MIMETYPE_VIDEO_AVC,
142             "video_decoder.avc", "video_encoder.avc" },
143         { MEDIA_MIMETYPE_VIDEO_HEVC,
144             "video_decoder.hevc", "video_encoder.hevc" },
145         { MEDIA_MIMETYPE_VIDEO_MPEG4,
146             "video_decoder.mpeg4", "video_encoder.mpeg4" },
147         { MEDIA_MIMETYPE_VIDEO_H263,
148             "video_decoder.h263", "video_encoder.h263" },
149         { MEDIA_MIMETYPE_VIDEO_VP8,
150             "video_decoder.vp8", "video_encoder.vp8" },
151         { MEDIA_MIMETYPE_VIDEO_VP9,
152             "video_decoder.vp9", "video_encoder.vp9" },
153         { MEDIA_MIMETYPE_VIDEO_AV1,
154             "video_decoder.av1", "video_encoder.av1" },
155         { MEDIA_MIMETYPE_AUDIO_RAW,
156             "audio_decoder.raw", "audio_encoder.raw" },
157         { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
158             "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
159         { MEDIA_MIMETYPE_AUDIO_FLAC,
160             "audio_decoder.flac", "audio_encoder.flac" },
161         { MEDIA_MIMETYPE_AUDIO_MSGSM,
162             "audio_decoder.gsm", "audio_encoder.gsm" },
163         { MEDIA_MIMETYPE_VIDEO_MPEG2,
164             "video_decoder.mpeg2", "video_encoder.mpeg2" },
165         { MEDIA_MIMETYPE_AUDIO_AC3,
166             "audio_decoder.ac3", "audio_encoder.ac3" },
167         { MEDIA_MIMETYPE_AUDIO_EAC3,
168             "audio_decoder.eac3", "audio_encoder.eac3" },
169         { MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
170             "audio_decoder.eac3_joc", "audio_encoder.eac3_joc" },
171         { MEDIA_MIMETYPE_AUDIO_AC4,
172             "audio_decoder.ac4", "audio_encoder.ac4" },
173         { MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
174             "image_decoder.heic", "image_encoder.heic" },
175         { MEDIA_MIMETYPE_IMAGE_AVIF,
176             "image_decoder.avif", "image_encoder.avif" },
177     };
178 
179     static const size_t kNumMimeToRole =
180         sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
181 
182     size_t i;
183     for (i = 0; i < kNumMimeToRole; ++i) {
184         if (!strcasecmp(mime, kMimeToRole[i].mime)) {
185             break;
186         }
187     }
188 
189     if (i == kNumMimeToRole) {
190         return NULL;
191     }
192 
193     return isEncoder ? kMimeToRole[i].encoderRole
194                   : kMimeToRole[i].decoderRole;
195 }
196 
SetComponentRole(const sp<IOMXNode> & omxNode,const char * role)197 status_t SetComponentRole(const sp<IOMXNode> &omxNode, const char *role) {
198     OMX_PARAM_COMPONENTROLETYPE roleParams;
199     InitOMXParams(&roleParams);
200 
201     strncpy((char *)roleParams.cRole,
202             role, OMX_MAX_STRINGNAME_SIZE - 1);
203 
204     roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
205 
206     return omxNode->setParameter(
207             OMX_IndexParamStandardComponentRole,
208             &roleParams, sizeof(roleParams));
209 }
210 
DescribeDefaultColorFormat(DescribeColorFormat2Params & params)211 bool DescribeDefaultColorFormat(DescribeColorFormat2Params &params) {
212     MediaImage2 &image = params.sMediaImage;
213     memset(&image, 0, sizeof(image));
214 
215     image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
216     image.mNumPlanes = 0;
217 
218     const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
219     image.mWidth = params.nFrameWidth;
220     image.mHeight = params.nFrameHeight;
221 
222     // only supporting YUV420
223     if (fmt != OMX_COLOR_FormatYUV420Planar &&
224         fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
225         fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
226         fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
227         fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
228         ALOGW("do not know color format 0x%x = %d", fmt, fmt);
229         if (fmt == OMX_COLOR_FormatYUV420Planar16) {
230             ALOGW("Cannot describe color format OMX_COLOR_FormatYUV420Planar16");
231         }
232         return false;
233     }
234 
235     // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
236     if (params.nStride != 0 && params.nSliceHeight == 0) {
237         ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
238                 params.nFrameHeight);
239         params.nSliceHeight = params.nFrameHeight;
240     }
241 
242     // we need stride and slice-height to be non-zero and sensible. These values were chosen to
243     // prevent integer overflows further down the line, and do not indicate support for
244     // 32kx32k video.
245     if (params.nStride == 0 || params.nSliceHeight == 0
246             || params.nStride > 32768 || params.nSliceHeight > 32768) {
247         ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
248                 fmt, fmt, params.nStride, params.nSliceHeight);
249         return false;
250     }
251 
252     // set-up YUV format
253     image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
254     image.mNumPlanes = 3;
255     image.mBitDepth = 8;
256     image.mBitDepthAllocated = 8;
257     image.mPlane[image.Y].mOffset = 0;
258     image.mPlane[image.Y].mColInc = 1;
259     image.mPlane[image.Y].mRowInc = params.nStride;
260     image.mPlane[image.Y].mHorizSubsampling = 1;
261     image.mPlane[image.Y].mVertSubsampling = 1;
262 
263     switch ((int)fmt) {
264         case HAL_PIXEL_FORMAT_YV12:
265             if (params.bUsingNativeBuffers) {
266                 size_t ystride = align(params.nStride, 16);
267                 size_t cstride = align(params.nStride / 2, 16);
268                 image.mPlane[image.Y].mRowInc = ystride;
269 
270                 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
271                 image.mPlane[image.V].mColInc = 1;
272                 image.mPlane[image.V].mRowInc = cstride;
273                 image.mPlane[image.V].mHorizSubsampling = 2;
274                 image.mPlane[image.V].mVertSubsampling = 2;
275 
276                 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
277                         + (cstride * params.nSliceHeight / 2);
278                 image.mPlane[image.U].mColInc = 1;
279                 image.mPlane[image.U].mRowInc = cstride;
280                 image.mPlane[image.U].mHorizSubsampling = 2;
281                 image.mPlane[image.U].mVertSubsampling = 2;
282                 break;
283             } else {
284                 // fall through as YV12 is used for YUV420Planar by some codecs
285                 FALLTHROUGH_INTENDED;
286             }
287 
288         case OMX_COLOR_FormatYUV420Planar:
289         case OMX_COLOR_FormatYUV420PackedPlanar:
290             image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
291             image.mPlane[image.U].mColInc = 1;
292             image.mPlane[image.U].mRowInc = params.nStride / 2;
293             image.mPlane[image.U].mHorizSubsampling = 2;
294             image.mPlane[image.U].mVertSubsampling = 2;
295 
296             image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
297                     + (params.nStride * params.nSliceHeight / 4);
298             image.mPlane[image.V].mColInc = 1;
299             image.mPlane[image.V].mRowInc = params.nStride / 2;
300             image.mPlane[image.V].mHorizSubsampling = 2;
301             image.mPlane[image.V].mVertSubsampling = 2;
302             break;
303 
304         case OMX_COLOR_FormatYUV420SemiPlanar:
305             // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
306         case OMX_COLOR_FormatYUV420PackedSemiPlanar:
307             // NV12
308             image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
309             image.mPlane[image.U].mColInc = 2;
310             image.mPlane[image.U].mRowInc = params.nStride;
311             image.mPlane[image.U].mHorizSubsampling = 2;
312             image.mPlane[image.U].mVertSubsampling = 2;
313 
314             image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
315             image.mPlane[image.V].mColInc = 2;
316             image.mPlane[image.V].mRowInc = params.nStride;
317             image.mPlane[image.V].mHorizSubsampling = 2;
318             image.mPlane[image.V].mVertSubsampling = 2;
319             break;
320 
321         default:
322             TRESPASS();
323     }
324     return true;
325 }
326 
DescribeColorFormat(const sp<IOMXNode> & omxNode,DescribeColorFormat2Params & describeParams)327 bool DescribeColorFormat(
328         const sp<IOMXNode> &omxNode,
329         DescribeColorFormat2Params &describeParams)
330 {
331     OMX_INDEXTYPE describeColorFormatIndex;
332     if (omxNode->getExtensionIndex(
333             "OMX.google.android.index.describeColorFormat",
334             &describeColorFormatIndex) == OK) {
335         DescribeColorFormatParams describeParamsV1(describeParams);
336         if (omxNode->getParameter(
337                 describeColorFormatIndex,
338                 &describeParamsV1, sizeof(describeParamsV1)) == OK) {
339             describeParams.initFromV1(describeParamsV1);
340             return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
341         }
342     } else if (omxNode->getExtensionIndex(
343             "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
344                && omxNode->getParameter(
345                        describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
346         return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
347     }
348 
349     return DescribeDefaultColorFormat(describeParams);
350 }
351 
352 // static
IsFlexibleColorFormat(const sp<IOMXNode> & omxNode,uint32_t colorFormat,bool usingNativeBuffers,OMX_U32 * flexibleEquivalent)353 bool IsFlexibleColorFormat(
354          const sp<IOMXNode> &omxNode,
355          uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
356     DescribeColorFormat2Params describeParams;
357     InitOMXParams(&describeParams);
358     describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
359     // reasonable initial values (that will be overwritten)
360     describeParams.nFrameWidth = 128;
361     describeParams.nFrameHeight = 128;
362     describeParams.nStride = 128;
363     describeParams.nSliceHeight = 128;
364     describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
365 
366     CHECK(flexibleEquivalent != NULL);
367 
368     if (!DescribeColorFormat(omxNode, describeParams)) {
369         return false;
370     }
371 
372     const MediaImage2 &img = describeParams.sMediaImage;
373     if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
374         if (img.mNumPlanes != 3
375                 || img.mPlane[img.Y].mHorizSubsampling != 1
376                 || img.mPlane[img.Y].mVertSubsampling != 1) {
377             return false;
378         }
379 
380         // YUV 420
381         if (img.mPlane[img.U].mHorizSubsampling == 2
382                 && img.mPlane[img.U].mVertSubsampling == 2
383                 && img.mPlane[img.V].mHorizSubsampling == 2
384                 && img.mPlane[img.V].mVertSubsampling == 2) {
385             // possible flexible YUV420 format
386             if (img.mBitDepth <= 8) {
387                *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
388                return true;
389             }
390         }
391     }
392     return false;
393 }
394 
395 }  // namespace android
396 
397