1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 //#define LOG_NDEBUG 0
18 #define LOG_TAG "FrameDecoder"
19
20 #include "include/FrameDecoder.h"
21 #include <binder/MemoryBase.h>
22 #include <binder/MemoryHeapBase.h>
23 #include <gui/Surface.h>
24 #include <inttypes.h>
25 #include <media/ICrypto.h>
26 #include <media/IMediaSource.h>
27 #include <media/MediaCodecBuffer.h>
28 #include <media/stagefright/foundation/avc_utils.h>
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/foundation/AMessage.h>
31 #include <media/stagefright/ColorConverter.h>
32 #include <media/stagefright/MediaBuffer.h>
33 #include <media/stagefright/MediaCodec.h>
34 #include <media/stagefright/MediaDefs.h>
35 #include <media/stagefright/MediaErrors.h>
36 #include <media/stagefright/Utils.h>
37 #include <private/media/VideoFrame.h>
38 #include <utils/Log.h>
39
40 namespace android {
41
42 static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
43 static const size_t kRetryCount = 50; // must be >0
44
allocVideoFrame(const sp<MetaData> & trackMeta,int32_t width,int32_t height,int32_t tileWidth,int32_t tileHeight,int32_t dstBpp,bool metaOnly=false)45 sp<IMemory> allocVideoFrame(const sp<MetaData>& trackMeta,
46 int32_t width, int32_t height, int32_t tileWidth, int32_t tileHeight,
47 int32_t dstBpp, bool metaOnly = false) {
48 int32_t rotationAngle;
49 if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) {
50 rotationAngle = 0; // By default, no rotation
51 }
52 uint32_t type;
53 const void *iccData;
54 size_t iccSize;
55 if (!trackMeta->findData(kKeyIccProfile, &type, &iccData, &iccSize)){
56 iccData = NULL;
57 iccSize = 0;
58 }
59
60 int32_t sarWidth, sarHeight;
61 int32_t displayWidth, displayHeight;
62 if (trackMeta->findInt32(kKeySARWidth, &sarWidth)
63 && trackMeta->findInt32(kKeySARHeight, &sarHeight)
64 && sarHeight != 0) {
65 displayWidth = (width * sarWidth) / sarHeight;
66 displayHeight = height;
67 } else if (trackMeta->findInt32(kKeyDisplayWidth, &displayWidth)
68 && trackMeta->findInt32(kKeyDisplayHeight, &displayHeight)
69 && displayWidth > 0 && displayHeight > 0
70 && width > 0 && height > 0) {
71 ALOGV("found display size %dx%d", displayWidth, displayHeight);
72 } else {
73 displayWidth = width;
74 displayHeight = height;
75 }
76
77 VideoFrame frame(width, height, displayWidth, displayHeight,
78 tileWidth, tileHeight, rotationAngle, dstBpp, !metaOnly, iccSize);
79
80 size_t size = frame.getFlattenedSize();
81 sp<MemoryHeapBase> heap = new MemoryHeapBase(size, 0, "MetadataRetrieverClient");
82 if (heap == NULL) {
83 ALOGE("failed to create MemoryDealer");
84 return NULL;
85 }
86 sp<IMemory> frameMem = new MemoryBase(heap, 0, size);
87 if (frameMem == NULL) {
88 ALOGE("not enough memory for VideoFrame size=%zu", size);
89 return NULL;
90 }
91 VideoFrame* frameCopy = static_cast<VideoFrame*>(frameMem->pointer());
92 frameCopy->init(frame, iccData, iccSize);
93
94 return frameMem;
95 }
96
findThumbnailInfo(const sp<MetaData> & trackMeta,int32_t * width,int32_t * height,uint32_t * type=NULL,const void ** data=NULL,size_t * size=NULL)97 bool findThumbnailInfo(
98 const sp<MetaData> &trackMeta, int32_t *width, int32_t *height,
99 uint32_t *type = NULL, const void **data = NULL, size_t *size = NULL) {
100 uint32_t dummyType;
101 const void *dummyData;
102 size_t dummySize;
103 return trackMeta->findInt32(kKeyThumbnailWidth, width)
104 && trackMeta->findInt32(kKeyThumbnailHeight, height)
105 && trackMeta->findData(kKeyThumbnailHVCC,
106 type ?: &dummyType, data ?: &dummyData, size ?: &dummySize);
107 }
108
findGridInfo(const sp<MetaData> & trackMeta,int32_t * tileWidth,int32_t * tileHeight,int32_t * gridRows,int32_t * gridCols)109 bool findGridInfo(const sp<MetaData> &trackMeta,
110 int32_t *tileWidth, int32_t *tileHeight, int32_t *gridRows, int32_t *gridCols) {
111 return trackMeta->findInt32(kKeyTileWidth, tileWidth) && (*tileWidth > 0)
112 && trackMeta->findInt32(kKeyTileHeight, tileHeight) && (*tileHeight > 0)
113 && trackMeta->findInt32(kKeyGridRows, gridRows) && (*gridRows > 0)
114 && trackMeta->findInt32(kKeyGridCols, gridCols) && (*gridCols > 0);
115 }
116
getDstColorFormat(android_pixel_format_t colorFormat,OMX_COLOR_FORMATTYPE * dstFormat,int32_t * dstBpp)117 bool getDstColorFormat(
118 android_pixel_format_t colorFormat,
119 OMX_COLOR_FORMATTYPE *dstFormat,
120 int32_t *dstBpp) {
121 switch (colorFormat) {
122 case HAL_PIXEL_FORMAT_RGB_565:
123 {
124 *dstFormat = OMX_COLOR_Format16bitRGB565;
125 *dstBpp = 2;
126 return true;
127 }
128 case HAL_PIXEL_FORMAT_RGBA_8888:
129 {
130 *dstFormat = OMX_COLOR_Format32BitRGBA8888;
131 *dstBpp = 4;
132 return true;
133 }
134 case HAL_PIXEL_FORMAT_BGRA_8888:
135 {
136 *dstFormat = OMX_COLOR_Format32bitBGRA8888;
137 *dstBpp = 4;
138 return true;
139 }
140 default:
141 {
142 ALOGE("Unsupported color format: %d", colorFormat);
143 break;
144 }
145 }
146 return false;
147 }
148
149 //static
getMetadataOnly(const sp<MetaData> & trackMeta,int colorFormat,bool thumbnail)150 sp<IMemory> FrameDecoder::getMetadataOnly(
151 const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail) {
152 OMX_COLOR_FORMATTYPE dstFormat;
153 int32_t dstBpp;
154 if (!getDstColorFormat(
155 (android_pixel_format_t)colorFormat, &dstFormat, &dstBpp)) {
156 return NULL;
157 }
158
159 int32_t width, height, tileWidth = 0, tileHeight = 0;
160 if (thumbnail) {
161 if (!findThumbnailInfo(trackMeta, &width, &height)) {
162 return NULL;
163 }
164 } else {
165 CHECK(trackMeta->findInt32(kKeyWidth, &width));
166 CHECK(trackMeta->findInt32(kKeyHeight, &height));
167
168 int32_t gridRows, gridCols;
169 if (!findGridInfo(trackMeta, &tileWidth, &tileHeight, &gridRows, &gridCols)) {
170 tileWidth = tileHeight = 0;
171 }
172 }
173 return allocVideoFrame(trackMeta,
174 width, height, tileWidth, tileHeight, dstBpp, true /*metaOnly*/);
175 }
176
FrameDecoder(const AString & componentName,const sp<MetaData> & trackMeta,const sp<IMediaSource> & source)177 FrameDecoder::FrameDecoder(
178 const AString &componentName,
179 const sp<MetaData> &trackMeta,
180 const sp<IMediaSource> &source)
181 : mComponentName(componentName),
182 mTrackMeta(trackMeta),
183 mSource(source),
184 mDstFormat(OMX_COLOR_Format16bitRGB565),
185 mDstBpp(2),
186 mHaveMoreInputs(true),
187 mFirstSample(true) {
188 }
189
~FrameDecoder()190 FrameDecoder::~FrameDecoder() {
191 if (mDecoder != NULL) {
192 mDecoder->release();
193 mSource->stop();
194 }
195 }
196
init(int64_t frameTimeUs,size_t numFrames,int option,int colorFormat)197 status_t FrameDecoder::init(
198 int64_t frameTimeUs, size_t numFrames, int option, int colorFormat) {
199 if (!getDstColorFormat(
200 (android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
201 return ERROR_UNSUPPORTED;
202 }
203
204 sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
205 frameTimeUs, numFrames, option, &mReadOptions);
206 if (videoFormat == NULL) {
207 ALOGE("video format or seek mode not supported");
208 return ERROR_UNSUPPORTED;
209 }
210
211 status_t err;
212 sp<ALooper> looper = new ALooper;
213 looper->start();
214 sp<MediaCodec> decoder = MediaCodec::CreateByComponentName(
215 looper, mComponentName, &err);
216 if (decoder.get() == NULL || err != OK) {
217 ALOGW("Failed to instantiate decoder [%s]", mComponentName.c_str());
218 return (decoder.get() == NULL) ? NO_MEMORY : err;
219 }
220
221 err = decoder->configure(
222 videoFormat, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
223 if (err != OK) {
224 ALOGW("configure returned error %d (%s)", err, asString(err));
225 decoder->release();
226 return err;
227 }
228
229 err = decoder->start();
230 if (err != OK) {
231 ALOGW("start returned error %d (%s)", err, asString(err));
232 decoder->release();
233 return err;
234 }
235
236 err = mSource->start();
237 if (err != OK) {
238 ALOGW("source failed to start: %d (%s)", err, asString(err));
239 decoder->release();
240 return err;
241 }
242 mDecoder = decoder;
243
244 return OK;
245 }
246
extractFrame(FrameRect * rect)247 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
248 status_t err = onExtractRect(rect);
249 if (err == OK) {
250 err = extractInternal();
251 }
252 if (err != OK) {
253 return NULL;
254 }
255
256 return mFrames.size() > 0 ? mFrames[0] : NULL;
257 }
258
extractFrames(std::vector<sp<IMemory>> * frames)259 status_t FrameDecoder::extractFrames(std::vector<sp<IMemory> >* frames) {
260 status_t err = extractInternal();
261 if (err != OK) {
262 return err;
263 }
264
265 for (size_t i = 0; i < mFrames.size(); i++) {
266 frames->push_back(mFrames[i]);
267 }
268 return OK;
269 }
270
extractInternal()271 status_t FrameDecoder::extractInternal() {
272 status_t err = OK;
273 bool done = false;
274 size_t retriesLeft = kRetryCount;
275 do {
276 size_t index;
277 int64_t ptsUs = 0LL;
278 uint32_t flags = 0;
279
280 // Queue as many inputs as we possibly can, then block on dequeuing
281 // outputs. After getting each output, come back and queue the inputs
282 // again to keep the decoder busy.
283 while (mHaveMoreInputs) {
284 err = mDecoder->dequeueInputBuffer(&index, 0);
285 if (err != OK) {
286 ALOGV("Timed out waiting for input");
287 if (retriesLeft) {
288 err = OK;
289 }
290 break;
291 }
292 sp<MediaCodecBuffer> codecBuffer;
293 err = mDecoder->getInputBuffer(index, &codecBuffer);
294 if (err != OK) {
295 ALOGE("failed to get input buffer %zu", index);
296 break;
297 }
298
299 MediaBufferBase *mediaBuffer = NULL;
300
301 err = mSource->read(&mediaBuffer, &mReadOptions);
302 mReadOptions.clearSeekTo();
303 if (err != OK) {
304 mHaveMoreInputs = false;
305 if (!mFirstSample && err == ERROR_END_OF_STREAM) {
306 (void)mDecoder->queueInputBuffer(
307 index, 0, 0, 0, MediaCodec::BUFFER_FLAG_EOS);
308 err = OK;
309 } else {
310 ALOGW("Input Error: err=%d", err);
311 }
312 break;
313 }
314
315 if (mediaBuffer->range_length() > codecBuffer->capacity()) {
316 ALOGE("buffer size (%zu) too large for codec input size (%zu)",
317 mediaBuffer->range_length(), codecBuffer->capacity());
318 mHaveMoreInputs = false;
319 err = BAD_VALUE;
320 } else {
321 codecBuffer->setRange(0, mediaBuffer->range_length());
322
323 CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
324 memcpy(codecBuffer->data(),
325 (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
326 mediaBuffer->range_length());
327
328 onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
329 mFirstSample = false;
330 }
331
332 mediaBuffer->release();
333
334 if (mHaveMoreInputs) {
335 ALOGV("QueueInput: size=%zu ts=%" PRId64 " us flags=%x",
336 codecBuffer->size(), ptsUs, flags);
337
338 err = mDecoder->queueInputBuffer(
339 index,
340 codecBuffer->offset(),
341 codecBuffer->size(),
342 ptsUs,
343 flags);
344
345 if (flags & MediaCodec::BUFFER_FLAG_EOS) {
346 mHaveMoreInputs = false;
347 }
348 }
349 }
350
351 while (err == OK) {
352 size_t offset, size;
353 // wait for a decoded buffer
354 err = mDecoder->dequeueOutputBuffer(
355 &index,
356 &offset,
357 &size,
358 &ptsUs,
359 &flags,
360 kBufferTimeOutUs);
361
362 if (err == INFO_FORMAT_CHANGED) {
363 ALOGV("Received format change");
364 err = mDecoder->getOutputFormat(&mOutputFormat);
365 } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
366 ALOGV("Output buffers changed");
367 err = OK;
368 } else {
369 if (err == -EAGAIN /* INFO_TRY_AGAIN_LATER */ && --retriesLeft > 0) {
370 ALOGV("Timed-out waiting for output.. retries left = %zu", retriesLeft);
371 err = OK;
372 } else if (err == OK) {
373 // If we're seeking with CLOSEST option and obtained a valid targetTimeUs
374 // from the extractor, decode to the specified frame. Otherwise we're done.
375 ALOGV("Received an output buffer, timeUs=%lld", (long long)ptsUs);
376 sp<MediaCodecBuffer> videoFrameBuffer;
377 err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
378 if (err != OK) {
379 ALOGE("failed to get output buffer %zu", index);
380 break;
381 }
382 err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
383 mDecoder->releaseOutputBuffer(index);
384 } else {
385 ALOGW("Received error %d (%s) instead of output", err, asString(err));
386 done = true;
387 }
388 break;
389 }
390 }
391 } while (err == OK && !done);
392
393 if (err != OK) {
394 ALOGE("failed to get video frame (err %d)", err);
395 }
396
397 return err;
398 }
399
400 //////////////////////////////////////////////////////////////////////
401
VideoFrameDecoder(const AString & componentName,const sp<MetaData> & trackMeta,const sp<IMediaSource> & source)402 VideoFrameDecoder::VideoFrameDecoder(
403 const AString &componentName,
404 const sp<MetaData> &trackMeta,
405 const sp<IMediaSource> &source)
406 : FrameDecoder(componentName, trackMeta, source),
407 mIsAvcOrHevc(false),
408 mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
409 mTargetTimeUs(-1LL),
410 mNumFrames(0),
411 mNumFramesDecoded(0) {
412 }
413
onGetFormatAndSeekOptions(int64_t frameTimeUs,size_t numFrames,int seekMode,MediaSource::ReadOptions * options)414 sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
415 int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
416 mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
417 if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
418 mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
419 ALOGE("Unknown seek mode: %d", mSeekMode);
420 return NULL;
421 }
422 mNumFrames = numFrames;
423
424 const char *mime;
425 if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
426 ALOGE("Could not find mime type");
427 return NULL;
428 }
429
430 mIsAvcOrHevc = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)
431 || !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
432
433 if (frameTimeUs < 0) {
434 int64_t thumbNailTime = -1ll;
435 if (!trackMeta()->findInt64(kKeyThumbnailTime, &thumbNailTime)
436 || thumbNailTime < 0) {
437 thumbNailTime = 0;
438 }
439 options->setSeekTo(thumbNailTime, mSeekMode);
440 } else {
441 options->setSeekTo(frameTimeUs, mSeekMode);
442 }
443
444 sp<AMessage> videoFormat;
445 if (convertMetaDataToMessage(trackMeta(), &videoFormat) != OK) {
446 ALOGE("b/23680780");
447 ALOGW("Failed to convert meta data to message");
448 return NULL;
449 }
450
451 // TODO: Use Flexible color instead
452 videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
453
454 // For the thumbnail extraction case, try to allocate single buffer in both
455 // input and output ports, if seeking to a sync frame. NOTE: This request may
456 // fail if component requires more than that for decoding.
457 bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
458 || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
459 if (!isSeekingClosest) {
460 videoFormat->setInt32("android._num-input-buffers", 1);
461 videoFormat->setInt32("android._num-output-buffers", 1);
462 }
463 return videoFormat;
464 }
465
onInputReceived(const sp<MediaCodecBuffer> & codecBuffer,MetaDataBase & sampleMeta,bool firstSample,uint32_t * flags)466 status_t VideoFrameDecoder::onInputReceived(
467 const sp<MediaCodecBuffer> &codecBuffer,
468 MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
469 bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
470 || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
471
472 if (firstSample && isSeekingClosest) {
473 sampleMeta.findInt64(kKeyTargetTime, &mTargetTimeUs);
474 ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
475 }
476
477 if (mIsAvcOrHevc && !isSeekingClosest
478 && IsIDR(codecBuffer->data(), codecBuffer->size())) {
479 // Only need to decode one IDR frame, unless we're seeking with CLOSEST
480 // option, in which case we need to actually decode to targetTimeUs.
481 *flags |= MediaCodec::BUFFER_FLAG_EOS;
482 }
483 return OK;
484 }
485
onOutputReceived(const sp<MediaCodecBuffer> & videoFrameBuffer,const sp<AMessage> & outputFormat,int64_t timeUs,bool * done)486 status_t VideoFrameDecoder::onOutputReceived(
487 const sp<MediaCodecBuffer> &videoFrameBuffer,
488 const sp<AMessage> &outputFormat,
489 int64_t timeUs, bool *done) {
490 bool shouldOutput = (mTargetTimeUs < 0LL) || (timeUs >= mTargetTimeUs);
491
492 // If this is not the target frame, skip color convert.
493 if (!shouldOutput) {
494 *done = false;
495 return OK;
496 }
497
498 *done = (++mNumFramesDecoded >= mNumFrames);
499
500 if (outputFormat == NULL) {
501 return ERROR_MALFORMED;
502 }
503
504 int32_t width, height, stride, srcFormat;
505 if (!outputFormat->findInt32("width", &width) ||
506 !outputFormat->findInt32("height", &height) ||
507 !outputFormat->findInt32("stride", &stride) ||
508 !outputFormat->findInt32("color-format", &srcFormat)) {
509 ALOGE("format missing dimension or color: %s",
510 outputFormat->debugString().c_str());
511 return ERROR_MALFORMED;
512 }
513
514 int32_t crop_left, crop_top, crop_right, crop_bottom;
515 if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
516 crop_left = crop_top = 0;
517 crop_right = width - 1;
518 crop_bottom = height - 1;
519 }
520
521 sp<IMemory> frameMem = allocVideoFrame(
522 trackMeta(),
523 (crop_right - crop_left + 1),
524 (crop_bottom - crop_top + 1),
525 0,
526 0,
527 dstBpp());
528 addFrame(frameMem);
529 VideoFrame* frame = static_cast<VideoFrame*>(frameMem->pointer());
530
531 ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
532
533 uint32_t standard, range, transfer;
534 if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
535 standard = 0;
536 }
537 if (!outputFormat->findInt32("color-range", (int32_t*)&range)) {
538 range = 0;
539 }
540 if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
541 transfer = 0;
542 }
543 converter.setSrcColorSpace(standard, range, transfer);
544
545 if (converter.isValid()) {
546 converter.convert(
547 (const uint8_t *)videoFrameBuffer->data(),
548 width, height, stride,
549 crop_left, crop_top, crop_right, crop_bottom,
550 frame->getFlattenedData(),
551 frame->mWidth, frame->mHeight, frame->mRowBytes,
552 crop_left, crop_top, crop_right, crop_bottom);
553 return OK;
554 }
555
556 ALOGE("Unable to convert from format 0x%08x to 0x%08x",
557 srcFormat, dstFormat());
558 return ERROR_UNSUPPORTED;
559 }
560
561 ////////////////////////////////////////////////////////////////////////
562
ImageDecoder(const AString & componentName,const sp<MetaData> & trackMeta,const sp<IMediaSource> & source)563 ImageDecoder::ImageDecoder(
564 const AString &componentName,
565 const sp<MetaData> &trackMeta,
566 const sp<IMediaSource> &source)
567 : FrameDecoder(componentName, trackMeta, source),
568 mFrame(NULL),
569 mWidth(0),
570 mHeight(0),
571 mGridRows(1),
572 mGridCols(1),
573 mTileWidth(0),
574 mTileHeight(0),
575 mTilesDecoded(0),
576 mTargetTiles(0) {
577 }
578
onGetFormatAndSeekOptions(int64_t frameTimeUs,size_t,int,MediaSource::ReadOptions * options)579 sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
580 int64_t frameTimeUs, size_t /*numFrames*/,
581 int /*seekMode*/, MediaSource::ReadOptions *options) {
582 sp<MetaData> overrideMeta;
583 if (frameTimeUs < 0) {
584 uint32_t type;
585 const void *data;
586 size_t size;
587
588 // if we have a stand-alone thumbnail, set up the override meta,
589 // and set seekTo time to -1.
590 if (!findThumbnailInfo(trackMeta(), &mWidth, &mHeight, &type, &data, &size)) {
591 ALOGE("Thumbnail not available");
592 return NULL;
593 }
594 overrideMeta = new MetaData(*(trackMeta()));
595 overrideMeta->remove(kKeyDisplayWidth);
596 overrideMeta->remove(kKeyDisplayHeight);
597 overrideMeta->setInt32(kKeyWidth, mWidth);
598 overrideMeta->setInt32(kKeyHeight, mHeight);
599 overrideMeta->setData(kKeyHVCC, type, data, size);
600 options->setSeekTo(-1);
601 } else {
602 CHECK(trackMeta()->findInt32(kKeyWidth, &mWidth));
603 CHECK(trackMeta()->findInt32(kKeyHeight, &mHeight));
604
605 options->setSeekTo(frameTimeUs);
606 }
607
608 mGridRows = mGridCols = 1;
609 if (overrideMeta == NULL) {
610 // check if we're dealing with a tiled heif
611 int32_t tileWidth, tileHeight, gridRows, gridCols;
612 if (findGridInfo(trackMeta(), &tileWidth, &tileHeight, &gridRows, &gridCols)) {
613 if (mWidth <= tileWidth * gridCols && mHeight <= tileHeight * gridRows) {
614 ALOGV("grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
615 gridCols, gridRows, tileWidth, tileHeight, mWidth, mHeight);
616
617 overrideMeta = new MetaData(*(trackMeta()));
618 overrideMeta->setInt32(kKeyWidth, tileWidth);
619 overrideMeta->setInt32(kKeyHeight, tileHeight);
620 mTileWidth = tileWidth;
621 mTileHeight = tileHeight;
622 mGridCols = gridCols;
623 mGridRows = gridRows;
624 } else {
625 ALOGW("ignore bad grid: %dx%d, tile size: %dx%d, picture size: %dx%d",
626 gridCols, gridRows, tileWidth, tileHeight, mWidth, mHeight);
627 }
628 }
629 if (overrideMeta == NULL) {
630 overrideMeta = trackMeta();
631 }
632 }
633 mTargetTiles = mGridCols * mGridRows;
634
635 sp<AMessage> videoFormat;
636 if (convertMetaDataToMessage(overrideMeta, &videoFormat) != OK) {
637 ALOGE("b/23680780");
638 ALOGW("Failed to convert meta data to message");
639 return NULL;
640 }
641
642 // TODO: Use Flexible color instead
643 videoFormat->setInt32("color-format", OMX_COLOR_FormatYUV420Planar);
644
645 if ((mGridRows == 1) && (mGridCols == 1)) {
646 videoFormat->setInt32("android._num-input-buffers", 1);
647 videoFormat->setInt32("android._num-output-buffers", 1);
648 }
649 return videoFormat;
650 }
651
onExtractRect(FrameRect * rect)652 status_t ImageDecoder::onExtractRect(FrameRect *rect) {
653 // TODO:
654 // This callback is for verifying whether we can decode the rect,
655 // and if so, set up the internal variables for decoding.
656 // Currently, rect decoding is restricted to sequentially decoding one
657 // row of tiles at a time. We can't decode arbitrary rects, as the image
658 // track doesn't yet support seeking by tiles. So all we do here is to
659 // verify the rect against what we expect.
660 // When seeking by tile is supported, this code should be updated to
661 // set the seek parameters.
662 if (rect == NULL) {
663 if (mTilesDecoded > 0) {
664 return ERROR_UNSUPPORTED;
665 }
666 mTargetTiles = mGridRows * mGridCols;
667 return OK;
668 }
669
670 if (mTileWidth <= 0 || mTileHeight <=0) {
671 return ERROR_UNSUPPORTED;
672 }
673
674 int32_t row = mTilesDecoded / mGridCols;
675 int32_t expectedTop = row * mTileHeight;
676 int32_t expectedBot = (row + 1) * mTileHeight;
677 if (expectedBot > mHeight) {
678 expectedBot = mHeight;
679 }
680 if (rect->left != 0 || rect->top != expectedTop
681 || rect->right != mWidth || rect->bottom != expectedBot) {
682 ALOGE("currently only support sequential decoding of slices");
683 return ERROR_UNSUPPORTED;
684 }
685
686 // advance one row
687 mTargetTiles = mTilesDecoded + mGridCols;
688 return OK;
689 }
690
onOutputReceived(const sp<MediaCodecBuffer> & videoFrameBuffer,const sp<AMessage> & outputFormat,int64_t,bool * done)691 status_t ImageDecoder::onOutputReceived(
692 const sp<MediaCodecBuffer> &videoFrameBuffer,
693 const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
694 if (outputFormat == NULL) {
695 return ERROR_MALFORMED;
696 }
697
698 int32_t width, height, stride;
699 CHECK(outputFormat->findInt32("width", &width));
700 CHECK(outputFormat->findInt32("height", &height));
701 CHECK(outputFormat->findInt32("stride", &stride));
702
703 if (mFrame == NULL) {
704 sp<IMemory> frameMem = allocVideoFrame(
705 trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
706 mFrame = static_cast<VideoFrame*>(frameMem->pointer());
707
708 addFrame(frameMem);
709 }
710
711 int32_t srcFormat;
712 CHECK(outputFormat->findInt32("color-format", &srcFormat));
713
714 ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
715
716 uint32_t standard, range, transfer;
717 if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
718 standard = 0;
719 }
720 if (!outputFormat->findInt32("color-range", (int32_t*)&range)) {
721 range = 0;
722 }
723 if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
724 transfer = 0;
725 }
726 converter.setSrcColorSpace(standard, range, transfer);
727
728 int32_t dstLeft, dstTop, dstRight, dstBottom;
729 dstLeft = mTilesDecoded % mGridCols * width;
730 dstTop = mTilesDecoded / mGridCols * height;
731 dstRight = dstLeft + width - 1;
732 dstBottom = dstTop + height - 1;
733
734 int32_t crop_left, crop_top, crop_right, crop_bottom;
735 if (!outputFormat->findRect("crop", &crop_left, &crop_top, &crop_right, &crop_bottom)) {
736 crop_left = crop_top = 0;
737 crop_right = width - 1;
738 crop_bottom = height - 1;
739 }
740
741 // apply crop on bottom-right
742 // TODO: need to move this into the color converter itself.
743 if (dstRight >= mWidth) {
744 crop_right = mWidth - dstLeft - 1;
745 dstRight = dstLeft + crop_right;
746 }
747 if (dstBottom >= mHeight) {
748 crop_bottom = mHeight - dstTop - 1;
749 dstBottom = dstTop + crop_bottom;
750 }
751
752 *done = (++mTilesDecoded >= mTargetTiles);
753
754 if (converter.isValid()) {
755 converter.convert(
756 (const uint8_t *)videoFrameBuffer->data(),
757 width, height, stride,
758 crop_left, crop_top, crop_right, crop_bottom,
759 mFrame->getFlattenedData(),
760 mFrame->mWidth, mFrame->mHeight, mFrame->mRowBytes,
761 dstLeft, dstTop, dstRight, dstBottom);
762 return OK;
763 }
764
765 ALOGE("Unable to convert from format 0x%08x to 0x%08x",
766 srcFormat, dstFormat());
767 return ERROR_UNSUPPORTED;
768 }
769
770 } // namespace android
771