• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 
18 #include <utils/Log.h>
19 
20 #define DEBUG  0
21 #if DEBUG
22 #  define  DDD(...)    ALOGD(__VA_ARGS__)
23 #else
24 #  define  DDD(...)    ((void)0)
25 #endif
26 
27 #include "GoldfishAVCDec.h"
28 
29 #include <media/stagefright/foundation/ADebug.h>
30 #include <media/stagefright/MediaDefs.h>
31 #include <OMX_VideoExt.h>
32 #include <inttypes.h>
33 
34 #include <nativebase/nativebase.h>
35 
36 #include <android/hardware/graphics/allocator/3.0/IAllocator.h>
37 #include <android/hardware/graphics/mapper/3.0/IMapper.h>
38 #include <hidl/LegacySupport.h>
39 
40 using ::android::hardware::graphics::common::V1_2::PixelFormat;
41 using ::android::hardware::graphics::common::V1_0::BufferUsage;
42 
43 namespace android {
44 
45 #define componentName                   "video_decoder.avc"
46 #define codingType                      OMX_VIDEO_CodingAVC
47 #define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_AVC
48 
49 /** Function and structure definitions to keep code similar for each codec */
50 #define ivdec_api_function              ih264d_api_function
51 #define ivdext_create_ip_t              ih264d_create_ip_t
52 #define ivdext_create_op_t              ih264d_create_op_t
53 #define ivdext_delete_ip_t              ih264d_delete_ip_t
54 #define ivdext_delete_op_t              ih264d_delete_op_t
55 #define ivdext_ctl_set_num_cores_ip_t   ih264d_ctl_set_num_cores_ip_t
56 #define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
57 
58 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
59         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
60 
61 static const CodecProfileLevel kProfileLevels[] = {
62     { OMX_VIDEO_AVCProfileConstrainedBaseline, OMX_VIDEO_AVCLevel52 },
63 
64     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
65 
66     { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
67 
68     { OMX_VIDEO_AVCProfileConstrainedHigh,     OMX_VIDEO_AVCLevel52 },
69 
70     { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
71 };
72 
GoldfishAVCDec(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component,RenderMode renderMode)73 GoldfishAVCDec::GoldfishAVCDec(
74         const char *name,
75         const OMX_CALLBACKTYPE *callbacks,
76         OMX_PTR appData,
77         OMX_COMPONENTTYPE **component, RenderMode renderMode)
78     : GoldfishVideoDecoderOMXComponent(
79             name, componentName, codingType,
80             kProfileLevels, ARRAY_SIZE(kProfileLevels),
81             320 /* width */, 240 /* height */, callbacks,
82             appData, component),
83       mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
84       mChangingResolution(false),
85       mSignalledError(false),
86       mInputOffset(0), mRenderMode(renderMode){
87     initPorts(
88             1 /* numMinInputBuffers */, kNumBuffers, INPUT_BUF_SIZE,
89             1 /* numMinOutputBuffers */, kNumBuffers, CODEC_MIME_TYPE);
90 
91     mTimeStart = mTimeEnd = systemTime();
92 
93     // If input dump is enabled, then open create an empty file
94     GENERATE_FILE_NAMES();
95     CREATE_DUMP_FILE(mInFile);
96     ALOGI("created %s %d object %p", __func__, __LINE__, this);
97 }
98 
~GoldfishAVCDec()99 GoldfishAVCDec::~GoldfishAVCDec() {
100     CHECK_EQ(deInitDecoder(), (status_t)OK);
101     DDD("destroyed %s %d object %p", __func__, __LINE__, this);
102 }
103 
logVersion()104 void GoldfishAVCDec::logVersion() {
105     // TODO: get emulation decoder implementation version from the host.
106     ALOGI("GoldfishAVC decoder version 1.0");
107 }
108 
resetPlugin()109 status_t GoldfishAVCDec::resetPlugin() {
110     mIsInFlush = false;
111     mReceivedEOS = false;
112 
113     /* Initialize both start and end times */
114     mTimeStart = mTimeEnd = systemTime();
115 
116     return OK;
117 }
118 
resetDecoder()119 status_t GoldfishAVCDec::resetDecoder() {
120     if (mContext) {
121     // The resolution may have changed, so our safest bet is to just destroy the
122     // current context and recreate another one, with the new width and height.
123     mContext->destroyH264Context();
124     mContext.reset(nullptr);
125 
126     }
127     return OK;
128 }
129 
setFlushMode()130 status_t GoldfishAVCDec::setFlushMode() {
131     /* Set the decoder in Flush mode, subsequent decode() calls will flush */
132     mIsInFlush = true;
133     mContext->flush();
134     return OK;
135 }
136 
initDecoder()137 status_t GoldfishAVCDec::initDecoder() {
138     /* Initialize the decoder */
139     if (mEnableAndroidNativeBuffers == false) {
140         mRenderMode = RenderMode::RENDER_BY_GUEST_CPU;
141     }
142     mContext.reset(new MediaH264Decoder(mRenderMode));
143     mContext->initH264Context(mWidth,
144                               mHeight,
145                               mWidth,
146                               mHeight,
147                               MediaH264Decoder::PixelFormat::YUV420P);
148 
149     /* Reset the plugin state */
150     resetPlugin();
151 
152     /* Get codec version */
153     logVersion();
154 
155     return OK;
156 }
157 
deInitDecoder()158 status_t GoldfishAVCDec::deInitDecoder() {
159     if (mContext) {
160         mContext->destroyH264Context();
161         mContext.reset();
162     }
163 
164     mChangingResolution = false;
165 
166     return OK;
167 }
168 
onReset()169 void GoldfishAVCDec::onReset() {
170     GoldfishVideoDecoderOMXComponent::onReset();
171 
172     mSignalledError = false;
173     mInputOffset = 0;
174     resetDecoder();
175     resetPlugin();
176 }
177 
getVUIParams(h264_image_t & img)178 bool GoldfishAVCDec::getVUIParams(h264_image_t& img) {
179     int32_t primaries = img.color_primaries;
180     bool fullRange = img.color_range == 2 ? true : false;
181     int32_t transfer = img.color_trc;
182     int32_t coeffs = img.colorspace;
183 
184     ColorAspects colorAspects;
185     ColorUtils::convertIsoColorAspectsToCodecAspects(
186             primaries, transfer, coeffs, fullRange, colorAspects);
187 
188     DDD("img pts %lld, primaries %d, range %d transfer %d colorspace %d", (long long)img.pts,
189             (int)img.color_primaries, (int)img.color_range, (int)img.color_trc, (int)img.colorspace);
190 
191     // Update color aspects if necessary.
192     if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
193         mBitstreamColorAspects = colorAspects;
194         status_t err = handleColorAspectsChange();
195         CHECK(err == OK);
196     }
197     return true;
198 }
199 
setDecodeArgs(OMX_BUFFERHEADERTYPE * inHeader,OMX_BUFFERHEADERTYPE * outHeader)200 bool GoldfishAVCDec::setDecodeArgs(
201         OMX_BUFFERHEADERTYPE *inHeader,
202         OMX_BUFFERHEADERTYPE *outHeader) {
203     size_t sizeY = outputBufferWidth() * outputBufferHeight();
204     size_t sizeUV = sizeY / 4;
205 
206     /* When in flush and after EOS with zero byte input,
207      * inHeader is set to zero. Hence check for non-null */
208     if (inHeader) {
209         mConsumedBytes = inHeader->nFilledLen - mInputOffset;
210         mInPBuffer = inHeader->pBuffer + inHeader->nOffset + mInputOffset;
211         DDD("got input timestamp %lld in-addr-base %p real-data-offset %d inputoffset %d", (long long)(inHeader->nTimeStamp),
212                 inHeader->pBuffer, (int)(inHeader->nOffset + mInputOffset), (int)mInputOffset);
213     } else {
214         mConsumedBytes = 0;
215         mInPBuffer = nullptr;
216     }
217 
218     if (outHeader) {
219         if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
220             ALOGE("outHeader->nAllocLen %d < needed size %d", outHeader->nAllocLen, (int)(sizeY + sizeUV * 2));
221             android_errorWriteLog(0x534e4554, "27833616");
222             return false;
223         }
224         mOutHeaderBuf = outHeader->pBuffer;
225     } else {
226         // We flush out on the host side
227         mOutHeaderBuf = nullptr;
228     }
229 
230     return true;
231 }
232 
readAndDiscardAllHostBuffers()233 void GoldfishAVCDec::readAndDiscardAllHostBuffers() {
234     while (mContext) {
235         h264_image_t img = mContext->getImage();
236         if (img.data != nullptr) {
237             DDD("img pts %lld is discarded", (long long)img.pts);
238         } else {
239             return;
240         }
241     }
242 }
243 
onPortFlushCompleted(OMX_U32 portIndex)244 void GoldfishAVCDec::onPortFlushCompleted(OMX_U32 portIndex) {
245     /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
246     if (kOutputPortIndex == portIndex) {
247         setFlushMode();
248         DDD("%s %d", __func__, __LINE__);
249         readAndDiscardAllHostBuffers();
250         mContext->resetH264Context(mWidth, mHeight, mWidth, mHeight, MediaH264Decoder::PixelFormat::YUV420P);
251         if (!mCsd0.empty() && !mCsd1.empty()) {
252             mContext->decodeFrame(&(mCsd0[0]), mCsd0.size(), 0);
253             mContext->getImage();
254             mContext->decodeFrame(&(mCsd1[0]), mCsd1.size(), 0);
255             mContext->getImage();
256         }
257         resetPlugin();
258     } else {
259         mInputOffset = 0;
260     }
261 }
262 
copyImageData(OMX_BUFFERHEADERTYPE * outHeader,h264_image_t & img)263 void GoldfishAVCDec::copyImageData( OMX_BUFFERHEADERTYPE *outHeader, h264_image_t & img) {
264     int myStride = outputBufferWidth();
265     for (int i=0; i < mHeight; ++i) {
266         memcpy(outHeader->pBuffer + i * myStride, img.data + i * mWidth, mWidth);
267     }
268     int Y = myStride * outputBufferHeight();
269     for (int i=0; i < mHeight/2; ++i) {
270         memcpy(outHeader->pBuffer + Y + i * myStride / 2 , img.data + mWidth * mHeight + i * mWidth/2, mWidth/2);
271     }
272     int UV = Y/4;
273     for (int i=0; i < mHeight/2; ++i) {
274         memcpy(outHeader->pBuffer + Y + UV + i * myStride / 2 , img.data + mWidth * mHeight * 5/4 + i * mWidth/2, mWidth/2);
275     }
276 }
277 
getHostColorBufferId(void * header)278 int GoldfishAVCDec::getHostColorBufferId(void* header) {
279   if (mNWBuffers.find(header) == mNWBuffers.end()) {
280       DDD("cannot find color buffer for header %p", header);
281     return -1;
282   }
283   sp<ANativeWindowBuffer> nBuf = mNWBuffers[header];
284   cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
285   DDD("found color buffer for header %p --> %d", header, handle->hostHandle);
286   return handle->hostHandle;
287 }
288 
onQueueFilled(OMX_U32 portIndex)289 void GoldfishAVCDec::onQueueFilled(OMX_U32 portIndex) {
290     static int count1=0;
291     DDD("calling %s count %d object %p", __func__, ++count1, this);
292     UNUSED(portIndex);
293     OMX_BUFFERHEADERTYPE *inHeader = NULL;
294     BufferInfo *inInfo = NULL;
295 
296     if (mSignalledError) {
297         return;
298     }
299     if (mOutputPortSettingsChange != NONE) {
300         return;
301     }
302 
303     if (mContext == nullptr) {
304         if (OK != initDecoder()) {
305             ALOGE("Failed to initialize decoder");
306             notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
307             mSignalledError = true;
308             return;
309         }
310     }
311 
312     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
313     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
314 
315     int count2=0;
316     while (!outQueue.empty()) {
317         DDD("calling %s in while loop count %d", __func__, ++count2);
318         BufferInfo *outInfo;
319         OMX_BUFFERHEADERTYPE *outHeader;
320 
321         if (!mIsInFlush && (NULL == inHeader)) {
322             if (!inQueue.empty()) {
323                 inInfo = *inQueue.begin();
324                 inHeader = inInfo->mHeader;
325                 if (inHeader == NULL) {
326                     inQueue.erase(inQueue.begin());
327                     inInfo->mOwnedByUs = false;
328                     continue;
329                 }
330             } else {
331                 break;
332             }
333         }
334 
335         outInfo = *outQueue.begin();
336         outHeader = outInfo->mHeader;
337         outHeader->nFlags = 0;
338         outHeader->nTimeStamp = 0;
339         outHeader->nOffset = 0;
340 
341         if (inHeader != NULL) {
342             if (inHeader->nFilledLen == 0) {
343                 // An empty buffer can be end of stream (EOS) buffer, so
344                 // we'll set the decoder in flush mode if so. If it's not EOS,
345                 // then just release the buffer.
346                 inQueue.erase(inQueue.begin());
347                 inInfo->mOwnedByUs = false;
348                 notifyEmptyBufferDone(inHeader);
349 
350                 if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
351                     return;
352                 }
353 
354                 mReceivedEOS = true;
355                 inHeader = NULL;
356                 setFlushMode();
357             } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
358                 mReceivedEOS = true;
359             }
360         }
361 
362         {
363 
364             if (!setDecodeArgs(inHeader, outHeader)) {
365                 ALOGE("Decoder arg setup failed");
366                 notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
367                 mSignalledError = true;
368                 return;
369             }
370 
371             // TODO: We also need to send the timestamp
372             h264_result_t h264Res = {(int)MediaH264Decoder::Err::NoErr, 0};
373             if (inHeader != nullptr) {
374                 if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
375                     unsigned long mysize = (inHeader->nFilledLen - mInputOffset);
376                     uint8_t* mydata = mInPBuffer;
377                     if (mCsd0.empty()) {
378                         mCsd0.assign(mydata, mydata + mysize);
379                     } else if (mCsd1.empty()) {
380                         mCsd1.assign(mydata, mydata + mysize);
381                     }
382                 }
383                 DDD("Decoding frame(sz=%lu)", (unsigned long)(inHeader->nFilledLen - mInputOffset));
384                 h264Res = mContext->decodeFrame(mInPBuffer,
385                                                 inHeader->nFilledLen - mInputOffset,
386                                                 inHeader->nTimeStamp);
387                 mConsumedBytes = h264Res.bytesProcessed;
388                 if (h264Res.ret == (int)MediaH264Decoder::Err::DecoderRestarted) {
389                     mChangingResolution = true;
390                 }
391             } else {
392                 DDD("No more input data. Attempting to get a decoded frame, if any.");
393             }
394             h264_image_t img = {};
395 
396             bool readBackPixels = true;
397             if (mRenderMode == RenderMode::RENDER_BY_GUEST_CPU) {
398               img = mContext->getImage();
399             } else {
400                 int hostColorBufferId = getHostColorBufferId(outHeader);
401                 if (hostColorBufferId >= 0) {
402                     img = mContext->renderOnHostAndReturnImageMetadata(getHostColorBufferId(outHeader));
403                     readBackPixels = false;
404                 } else {
405                     img = mContext->getImage();
406                 }
407             }
408 
409 
410             if (img.data != nullptr) {
411                 getVUIParams(img);
412             }
413 
414 
415             if (inHeader) {
416                 DDD("input time stamp %lld flag %d", inHeader->nTimeStamp, (int)(inHeader->nFlags));
417             }
418 
419             // If the decoder is in the changing resolution mode and there is no output present,
420             // that means the switching is done and it's ready to reset the decoder and the plugin.
421             if (mChangingResolution && img.data == nullptr) {
422                 mChangingResolution = false;
423                 DDD("re-create decoder because resolution changed");
424                 bool portWillReset = false;
425                 handlePortSettingsChange(&portWillReset, img.width, img.height);
426                 {
427                     DDD("handling port reset");
428                     DDD("port resetting (img.width=%u, img.height=%u, mWidth=%u, mHeight=%u)",
429                           img.width, img.height, mWidth, mHeight);
430                     //resetDecoder();
431                     resetPlugin();
432 
433                 //mContext->destroyH264Context();
434                 //mContext.reset(new MediaH264Decoder());
435                 mContext->resetH264Context(mWidth,
436                               mHeight,
437                               mWidth,
438                               mHeight,
439                               MediaH264Decoder::PixelFormat::YUV420P);
440                 //mInputOffset += mConsumedBytes;
441                 return;
442                 }
443             }
444 
445             if (img.data != nullptr) {
446                 int myWidth = img.width;
447                 int myHeight = img.height;
448                 if (myWidth != mWidth || myHeight != mHeight) {
449                     bool portWillReset = false;
450                     handlePortSettingsChange(&portWillReset, myWidth, myHeight);
451                     resetPlugin();
452                     mWidth = myWidth;
453                     mHeight = myHeight;
454                     if (portWillReset) {
455                         DDD("port will reset return now");
456                         return;
457                     } else {
458                         DDD("port will NOT reset keep going now");
459                     }
460                 }
461                 outHeader->nFilledLen =  (outputBufferWidth() * outputBufferHeight() * 3) / 2;
462                 if (readBackPixels) {
463                   if (outputBufferWidth() == mWidth && outputBufferHeight() == mHeight) {
464                     memcpy(outHeader->pBuffer, img.data, outHeader->nFilledLen);
465                   } else {
466                     copyImageData(outHeader, img);
467                   }
468                 }
469 
470                 outHeader->nTimeStamp = img.pts;
471                 DDD("got output timestamp %lld", (long long)(img.pts));
472 
473                 outInfo->mOwnedByUs = false;
474                 outQueue.erase(outQueue.begin());
475                 outInfo = NULL;
476                 notifyFillBufferDone(outHeader);
477                 outHeader = NULL;
478             } else if (mIsInFlush) {
479                 DDD("not img.data and it is in flush mode");
480                 /* If in flush mode and no output is returned by the codec,
481                  * then come out of flush mode */
482                 mIsInFlush = false;
483 
484                 /* If EOS was recieved on input port and there is no output
485                  * from the codec, then signal EOS on output port */
486                 if (mReceivedEOS) {
487                     ALOGI("received EOS, re-create host context");
488                     outHeader->nFilledLen = 0;
489                     outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
490 
491                     outInfo->mOwnedByUs = false;
492                     outQueue.erase(outQueue.begin());
493                     outInfo = NULL;
494                     notifyFillBufferDone(outHeader);
495                     outHeader = NULL;
496                     resetPlugin();
497 
498                     //mContext->destroyH264Context();
499                 //mContext.reset(new MediaH264Decoder());
500                     mContext->resetH264Context(mWidth,
501                               mHeight,
502                               mWidth,
503                               mHeight,
504                               MediaH264Decoder::PixelFormat::YUV420P);
505 
506                 }
507             }
508             mInputOffset += mConsumedBytes;
509         }
510 
511         // If more than 4 bytes are remaining in input, then do not release it
512         if (inHeader != NULL && ((inHeader->nFilledLen - mInputOffset) <= 4)) {
513             inInfo->mOwnedByUs = false;
514             inQueue.erase(inQueue.begin());
515             inInfo = NULL;
516             notifyEmptyBufferDone(inHeader);
517             inHeader = NULL;
518             mInputOffset = 0;
519 
520             /* If input EOS is seen and decoder is not in flush mode,
521              * set the decoder in flush mode.
522              * There can be a case where EOS is sent along with last picture data
523              * In that case, only after decoding that input data, decoder has to be
524              * put in flush. This case is handled here  */
525 
526             if (mReceivedEOS && !mIsInFlush) {
527                 setFlushMode();
528             }
529         }
530     }
531 }
532 
internalGetParameter(OMX_INDEXTYPE index,OMX_PTR params)533 OMX_ERRORTYPE GoldfishAVCDec::internalGetParameter(
534         OMX_INDEXTYPE index, OMX_PTR params) {
535     const int32_t indexFull = index;
536     switch (indexFull) {
537         case kGetAndroidNativeBufferUsageIndex:
538         {
539             DDD("calling kGetAndroidNativeBufferUsageIndex");
540             GetAndroidNativeBufferUsageParams* nativeBuffersUsage = (GetAndroidNativeBufferUsageParams *) params;
541             nativeBuffersUsage->nUsage = (unsigned int)(BufferUsage::GPU_DATA_BUFFER);
542             return OMX_ErrorNone;
543         }
544 
545         default:
546             return GoldfishVideoDecoderOMXComponent::internalGetParameter(index, params);
547     }
548 }
549 
internalSetParameter(OMX_INDEXTYPE index,const OMX_PTR params)550 OMX_ERRORTYPE GoldfishAVCDec::internalSetParameter(
551         OMX_INDEXTYPE index, const OMX_PTR params) {
552     // Include extension index OMX_INDEXEXTTYPE.
553     const int32_t indexFull = index;
554 
555     switch (indexFull) {
556         case kEnableAndroidNativeBuffersIndex:
557         {
558             DDD("calling kEnableAndroidNativeBuffersIndex");
559             EnableAndroidNativeBuffersParams* enableNativeBuffers = (EnableAndroidNativeBuffersParams *) params;
560             if (enableNativeBuffers) {
561                 mEnableAndroidNativeBuffers = enableNativeBuffers->enable;
562                 if (mEnableAndroidNativeBuffers == false) {
563                     mNWBuffers.clear();
564                     DDD("disabled kEnableAndroidNativeBuffersIndex");
565                 } else {
566                     DDD("enabled kEnableAndroidNativeBuffersIndex");
567                 }
568             }
569             return OMX_ErrorNone;
570         }
571 
572         case kUseAndroidNativeBufferIndex:
573         {
574             if (mEnableAndroidNativeBuffers == false) {
575                 ALOGE("Error: not enabled Android Native Buffers");
576                 return OMX_ErrorBadParameter;
577             }
578             UseAndroidNativeBufferParams *use_buffer_params = (UseAndroidNativeBufferParams *)params;
579             if (use_buffer_params) {
580                 sp<ANativeWindowBuffer> nBuf = use_buffer_params->nativeBuffer;
581                 cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
582                 void* dst = NULL;
583                 DDD("kUseAndroidNativeBufferIndex with handle %p host color handle %d calling usebuffer", handle,
584                       handle->hostHandle);
585                 useBufferCallerLockedAlready(use_buffer_params->bufferHeader,use_buffer_params->nPortIndex,
586                         use_buffer_params->pAppPrivate,handle->allocatedSize(), (OMX_U8*)dst);
587                 mNWBuffers[*(use_buffer_params->bufferHeader)] = use_buffer_params->nativeBuffer;;
588             }
589             return OMX_ErrorNone;
590         }
591 
592         default:
593             return GoldfishVideoDecoderOMXComponent::internalSetParameter(index, params);
594     }
595 }
596 
getExtensionIndex(const char * name,OMX_INDEXTYPE * index)597 OMX_ERRORTYPE GoldfishAVCDec::getExtensionIndex(
598         const char *name, OMX_INDEXTYPE *index) {
599 
600     if (mRenderMode == RenderMode::RENDER_BY_HOST_GPU) {
601         if (!strcmp(name, "OMX.google.android.index.enableAndroidNativeBuffers")) {
602             DDD("calling getExtensionIndex for enable ANB");
603             *(int32_t*)index = kEnableAndroidNativeBuffersIndex;
604             return OMX_ErrorNone;
605         } else if (!strcmp(name, "OMX.google.android.index.useAndroidNativeBuffer")) {
606             *(int32_t*)index = kUseAndroidNativeBufferIndex;
607             return OMX_ErrorNone;
608         } else if (!strcmp(name, "OMX.google.android.index.getAndroidNativeBufferUsage")) {
609             *(int32_t*)index = kGetAndroidNativeBufferUsageIndex;
610             return OMX_ErrorNone;
611         }
612     }
613     return GoldfishVideoDecoderOMXComponent::getExtensionIndex(name, index);
614 }
615 
getColorAspectPreference()616 int GoldfishAVCDec::getColorAspectPreference() {
617     return kPreferBitstream;
618 }
619 
620 }  // namespace android
621 
createGoldfishOMXComponent(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component)622 android::GoldfishOMXComponent *createGoldfishOMXComponent(
623         const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
624         OMX_COMPONENTTYPE **component) {
625     if (!strncmp("OMX.android.goldfish", name, 20)) {
626       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_HOST_GPU);
627     } else {
628       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_GUEST_CPU);
629     }
630 }
631 
632