1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "OMXUtils"
19
20 #include <string.h>
21
22 #include <media/stagefright/omx/OMXUtils.h>
23 #include <media/stagefright/foundation/ADebug.h>
24 #include <media/stagefright/foundation/AUtils.h>
25 #include <media/stagefright/MediaErrors.h>
26 #include <media/hardware/HardwareAPI.h>
27 #include <media/MediaDefs.h>
28 #include <system/graphics-base.h>
29
30 namespace android {
31
StatusFromOMXError(OMX_ERRORTYPE err)32 status_t StatusFromOMXError(OMX_ERRORTYPE err) {
33 switch (err) {
34 case OMX_ErrorNone:
35 return OK;
36 case OMX_ErrorNoMore:
37 return NOT_ENOUGH_DATA;
38 case OMX_ErrorUnsupportedSetting:
39 case OMX_ErrorUnsupportedIndex:
40 return ERROR_UNSUPPORTED; // this is a media specific error
41 case OMX_ErrorBadParameter:
42 return BAD_VALUE;
43 case OMX_ErrorInsufficientResources:
44 return NO_MEMORY;
45 case OMX_ErrorInvalidComponentName:
46 case OMX_ErrorComponentNotFound:
47 return NAME_NOT_FOUND;
48 default:
49 return UNKNOWN_ERROR;
50 }
51 }
52
53 /**************************************************************************************************/
54
DescribeColorFormatParams(const DescribeColorFormat2Params & params)55 DescribeColorFormatParams::DescribeColorFormatParams(const DescribeColorFormat2Params ¶ms) {
56 InitOMXParams(this);
57
58 eColorFormat = params.eColorFormat;
59 nFrameWidth = params.nFrameWidth;
60 nFrameHeight = params.nFrameHeight;
61 nStride = params.nStride;
62 nSliceHeight = params.nSliceHeight;
63 bUsingNativeBuffers = params.bUsingNativeBuffers;
64 // we don't copy media images as this conversion is only used pre-query
65 };
66
initFromV1(const DescribeColorFormatParams & params)67 void DescribeColorFormat2Params::initFromV1(const DescribeColorFormatParams ¶ms) {
68 InitOMXParams(this);
69
70 eColorFormat = params.eColorFormat;
71 nFrameWidth = params.nFrameWidth;
72 nFrameHeight = params.nFrameHeight;
73 nStride = params.nStride;
74 nSliceHeight = params.nSliceHeight;
75 bUsingNativeBuffers = params.bUsingNativeBuffers;
76 sMediaImage.initFromV1(params.sMediaImage);
77 };
78
initFromV1(const MediaImage & image)79 void MediaImage2::initFromV1(const MediaImage &image) {
80 memset(this, 0, sizeof(*this));
81
82 if (image.mType != MediaImage::MEDIA_IMAGE_TYPE_YUV) {
83 mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
84 return;
85 }
86
87 for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
88 if (image.mPlane[ix].mHorizSubsampling > INT32_MAX
89 || image.mPlane[ix].mVertSubsampling > INT32_MAX) {
90 mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
91 return;
92 }
93 }
94
95 mType = (MediaImage2::Type)image.mType;
96 mNumPlanes = image.mNumPlanes;
97 mWidth = image.mWidth;
98 mHeight = image.mHeight;
99 mBitDepth = image.mBitDepth;
100 mBitDepthAllocated = 8;
101 for (size_t ix = 0; ix < image.mNumPlanes; ++ix) {
102 mPlane[ix].mOffset = image.mPlane[ix].mOffset;
103 mPlane[ix].mColInc = image.mPlane[ix].mColInc;
104 mPlane[ix].mRowInc = image.mPlane[ix].mRowInc;
105 mPlane[ix].mHorizSubsampling = (int32_t)image.mPlane[ix].mHorizSubsampling;
106 mPlane[ix].mVertSubsampling = (int32_t)image.mPlane[ix].mVertSubsampling;
107 }
108 }
109
110 /**************************************************************************************************/
111
GetComponentRole(bool isEncoder,const char * mime)112 const char *GetComponentRole(bool isEncoder, const char *mime) {
113 struct MimeToRole {
114 const char *mime;
115 const char *decoderRole;
116 const char *encoderRole;
117 };
118
119 static const MimeToRole kMimeToRole[] = {
120 { MEDIA_MIMETYPE_AUDIO_MPEG,
121 "audio_decoder.mp3", "audio_encoder.mp3" },
122 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
123 "audio_decoder.mp1", "audio_encoder.mp1" },
124 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
125 "audio_decoder.mp2", "audio_encoder.mp2" },
126 { MEDIA_MIMETYPE_AUDIO_AMR_NB,
127 "audio_decoder.amrnb", "audio_encoder.amrnb" },
128 { MEDIA_MIMETYPE_AUDIO_AMR_WB,
129 "audio_decoder.amrwb", "audio_encoder.amrwb" },
130 { MEDIA_MIMETYPE_AUDIO_AAC,
131 "audio_decoder.aac", "audio_encoder.aac" },
132 { MEDIA_MIMETYPE_AUDIO_VORBIS,
133 "audio_decoder.vorbis", "audio_encoder.vorbis" },
134 { MEDIA_MIMETYPE_AUDIO_OPUS,
135 "audio_decoder.opus", "audio_encoder.opus" },
136 { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
137 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
138 { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
139 "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
140 { MEDIA_MIMETYPE_VIDEO_AVC,
141 "video_decoder.avc", "video_encoder.avc" },
142 { MEDIA_MIMETYPE_VIDEO_HEVC,
143 "video_decoder.hevc", "video_encoder.hevc" },
144 { MEDIA_MIMETYPE_VIDEO_MPEG4,
145 "video_decoder.mpeg4", "video_encoder.mpeg4" },
146 { MEDIA_MIMETYPE_VIDEO_H263,
147 "video_decoder.h263", "video_encoder.h263" },
148 { MEDIA_MIMETYPE_VIDEO_VP8,
149 "video_decoder.vp8", "video_encoder.vp8" },
150 { MEDIA_MIMETYPE_VIDEO_VP9,
151 "video_decoder.vp9", "video_encoder.vp9" },
152 { MEDIA_MIMETYPE_AUDIO_RAW,
153 "audio_decoder.raw", "audio_encoder.raw" },
154 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
155 "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
156 { MEDIA_MIMETYPE_AUDIO_FLAC,
157 "audio_decoder.flac", "audio_encoder.flac" },
158 { MEDIA_MIMETYPE_AUDIO_MSGSM,
159 "audio_decoder.gsm", "audio_encoder.gsm" },
160 { MEDIA_MIMETYPE_VIDEO_MPEG2,
161 "video_decoder.mpeg2", "video_encoder.mpeg2" },
162 { MEDIA_MIMETYPE_AUDIO_AC3,
163 "audio_decoder.ac3", "audio_encoder.ac3" },
164 { MEDIA_MIMETYPE_AUDIO_EAC3,
165 "audio_decoder.eac3", "audio_encoder.eac3" },
166 };
167
168 static const size_t kNumMimeToRole =
169 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
170
171 size_t i;
172 for (i = 0; i < kNumMimeToRole; ++i) {
173 if (!strcasecmp(mime, kMimeToRole[i].mime)) {
174 break;
175 }
176 }
177
178 if (i == kNumMimeToRole) {
179 return NULL;
180 }
181
182 return isEncoder ? kMimeToRole[i].encoderRole
183 : kMimeToRole[i].decoderRole;
184 }
185
SetComponentRole(const sp<IOMXNode> & omxNode,const char * role)186 status_t SetComponentRole(const sp<IOMXNode> &omxNode, const char *role) {
187 OMX_PARAM_COMPONENTROLETYPE roleParams;
188 InitOMXParams(&roleParams);
189
190 strncpy((char *)roleParams.cRole,
191 role, OMX_MAX_STRINGNAME_SIZE - 1);
192
193 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
194
195 return omxNode->setParameter(
196 OMX_IndexParamStandardComponentRole,
197 &roleParams, sizeof(roleParams));
198 }
199
DescribeDefaultColorFormat(DescribeColorFormat2Params & params)200 bool DescribeDefaultColorFormat(DescribeColorFormat2Params ¶ms) {
201 MediaImage2 &image = params.sMediaImage;
202 memset(&image, 0, sizeof(image));
203
204 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
205 image.mNumPlanes = 0;
206
207 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
208 image.mWidth = params.nFrameWidth;
209 image.mHeight = params.nFrameHeight;
210
211 // only supporting YUV420
212 if (fmt != OMX_COLOR_FormatYUV420Planar &&
213 fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
214 fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
215 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
216 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
217 ALOGW("do not know color format 0x%x = %d", fmt, fmt);
218 return false;
219 }
220
221 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
222 if (params.nStride != 0 && params.nSliceHeight == 0) {
223 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
224 params.nFrameHeight);
225 params.nSliceHeight = params.nFrameHeight;
226 }
227
228 // we need stride and slice-height to be non-zero and sensible. These values were chosen to
229 // prevent integer overflows further down the line, and do not indicate support for
230 // 32kx32k video.
231 if (params.nStride == 0 || params.nSliceHeight == 0
232 || params.nStride > 32768 || params.nSliceHeight > 32768) {
233 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
234 fmt, fmt, params.nStride, params.nSliceHeight);
235 return false;
236 }
237
238 // set-up YUV format
239 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
240 image.mNumPlanes = 3;
241 image.mBitDepth = 8;
242 image.mBitDepthAllocated = 8;
243 image.mPlane[image.Y].mOffset = 0;
244 image.mPlane[image.Y].mColInc = 1;
245 image.mPlane[image.Y].mRowInc = params.nStride;
246 image.mPlane[image.Y].mHorizSubsampling = 1;
247 image.mPlane[image.Y].mVertSubsampling = 1;
248
249 switch ((int)fmt) {
250 case HAL_PIXEL_FORMAT_YV12:
251 if (params.bUsingNativeBuffers) {
252 size_t ystride = align(params.nStride, 16);
253 size_t cstride = align(params.nStride / 2, 16);
254 image.mPlane[image.Y].mRowInc = ystride;
255
256 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
257 image.mPlane[image.V].mColInc = 1;
258 image.mPlane[image.V].mRowInc = cstride;
259 image.mPlane[image.V].mHorizSubsampling = 2;
260 image.mPlane[image.V].mVertSubsampling = 2;
261
262 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
263 + (cstride * params.nSliceHeight / 2);
264 image.mPlane[image.U].mColInc = 1;
265 image.mPlane[image.U].mRowInc = cstride;
266 image.mPlane[image.U].mHorizSubsampling = 2;
267 image.mPlane[image.U].mVertSubsampling = 2;
268 break;
269 } else {
270 // fall through as YV12 is used for YUV420Planar by some codecs
271 }
272
273 case OMX_COLOR_FormatYUV420Planar:
274 case OMX_COLOR_FormatYUV420PackedPlanar:
275 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
276 image.mPlane[image.U].mColInc = 1;
277 image.mPlane[image.U].mRowInc = params.nStride / 2;
278 image.mPlane[image.U].mHorizSubsampling = 2;
279 image.mPlane[image.U].mVertSubsampling = 2;
280
281 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
282 + (params.nStride * params.nSliceHeight / 4);
283 image.mPlane[image.V].mColInc = 1;
284 image.mPlane[image.V].mRowInc = params.nStride / 2;
285 image.mPlane[image.V].mHorizSubsampling = 2;
286 image.mPlane[image.V].mVertSubsampling = 2;
287 break;
288
289 case OMX_COLOR_FormatYUV420SemiPlanar:
290 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
291 case OMX_COLOR_FormatYUV420PackedSemiPlanar:
292 // NV12
293 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
294 image.mPlane[image.U].mColInc = 2;
295 image.mPlane[image.U].mRowInc = params.nStride;
296 image.mPlane[image.U].mHorizSubsampling = 2;
297 image.mPlane[image.U].mVertSubsampling = 2;
298
299 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
300 image.mPlane[image.V].mColInc = 2;
301 image.mPlane[image.V].mRowInc = params.nStride;
302 image.mPlane[image.V].mHorizSubsampling = 2;
303 image.mPlane[image.V].mVertSubsampling = 2;
304 break;
305
306 default:
307 TRESPASS();
308 }
309 return true;
310 }
311
DescribeColorFormat(const sp<IOMXNode> & omxNode,DescribeColorFormat2Params & describeParams)312 bool DescribeColorFormat(
313 const sp<IOMXNode> &omxNode,
314 DescribeColorFormat2Params &describeParams)
315 {
316 OMX_INDEXTYPE describeColorFormatIndex;
317 if (omxNode->getExtensionIndex(
318 "OMX.google.android.index.describeColorFormat",
319 &describeColorFormatIndex) == OK) {
320 DescribeColorFormatParams describeParamsV1(describeParams);
321 if (omxNode->getParameter(
322 describeColorFormatIndex,
323 &describeParamsV1, sizeof(describeParamsV1)) == OK) {
324 describeParams.initFromV1(describeParamsV1);
325 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
326 }
327 } else if (omxNode->getExtensionIndex(
328 "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
329 && omxNode->getParameter(
330 describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
331 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
332 }
333
334 return DescribeDefaultColorFormat(describeParams);
335 }
336
337 // static
IsFlexibleColorFormat(const sp<IOMXNode> & omxNode,uint32_t colorFormat,bool usingNativeBuffers,OMX_U32 * flexibleEquivalent)338 bool IsFlexibleColorFormat(
339 const sp<IOMXNode> &omxNode,
340 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
341 DescribeColorFormat2Params describeParams;
342 InitOMXParams(&describeParams);
343 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
344 // reasonable dummy values
345 describeParams.nFrameWidth = 128;
346 describeParams.nFrameHeight = 128;
347 describeParams.nStride = 128;
348 describeParams.nSliceHeight = 128;
349 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
350
351 CHECK(flexibleEquivalent != NULL);
352
353 if (!DescribeColorFormat(omxNode, describeParams)) {
354 return false;
355 }
356
357 const MediaImage2 &img = describeParams.sMediaImage;
358 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
359 if (img.mNumPlanes != 3
360 || img.mPlane[img.Y].mHorizSubsampling != 1
361 || img.mPlane[img.Y].mVertSubsampling != 1) {
362 return false;
363 }
364
365 // YUV 420
366 if (img.mPlane[img.U].mHorizSubsampling == 2
367 && img.mPlane[img.U].mVertSubsampling == 2
368 && img.mPlane[img.V].mHorizSubsampling == 2
369 && img.mPlane[img.V].mVertSubsampling == 2) {
370 // possible flexible YUV420 format
371 if (img.mBitDepth <= 8) {
372 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
373 return true;
374 }
375 }
376 }
377 return false;
378 }
379
380 } // namespace android
381
382