• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #include <utils/Log.h>
19 
20 #include "GoldfishAVCDec.h"
21 
22 #include <media/stagefright/foundation/ADebug.h>
23 #include <media/stagefright/MediaDefs.h>
24 #include <OMX_VideoExt.h>
25 #include <inttypes.h>
26 
27 #include <nativebase/nativebase.h>
28 
29 #include <android/hardware/graphics/allocator/3.0/IAllocator.h>
30 #include <android/hardware/graphics/mapper/3.0/IMapper.h>
31 #include <hidl/LegacySupport.h>
32 
33 using ::android::hardware::graphics::common::V1_2::PixelFormat;
34 using ::android::hardware::graphics::common::V1_0::BufferUsage;
35 
36 namespace android {
37 
38 #define componentName                   "video_decoder.avc"
39 #define codingType                      OMX_VIDEO_CodingAVC
40 #define CODEC_MIME_TYPE                 MEDIA_MIMETYPE_VIDEO_AVC
41 
42 /** Function and structure definitions to keep code similar for each codec */
43 #define ivdec_api_function              ih264d_api_function
44 #define ivdext_create_ip_t              ih264d_create_ip_t
45 #define ivdext_create_op_t              ih264d_create_op_t
46 #define ivdext_delete_ip_t              ih264d_delete_ip_t
47 #define ivdext_delete_op_t              ih264d_delete_op_t
48 #define ivdext_ctl_set_num_cores_ip_t   ih264d_ctl_set_num_cores_ip_t
49 #define ivdext_ctl_set_num_cores_op_t   ih264d_ctl_set_num_cores_op_t
50 
51 #define IVDEXT_CMD_CTL_SET_NUM_CORES    \
52         (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES
53 
54 static const CodecProfileLevel kProfileLevels[] = {
55     { OMX_VIDEO_AVCProfileConstrainedBaseline, OMX_VIDEO_AVCLevel52 },
56 
57     { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 },
58 
59     { OMX_VIDEO_AVCProfileMain,     OMX_VIDEO_AVCLevel52 },
60 
61     { OMX_VIDEO_AVCProfileConstrainedHigh,     OMX_VIDEO_AVCLevel52 },
62 
63     { OMX_VIDEO_AVCProfileHigh,     OMX_VIDEO_AVCLevel52 },
64 };
65 
GoldfishAVCDec(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component,RenderMode renderMode)66 GoldfishAVCDec::GoldfishAVCDec(
67         const char *name,
68         const OMX_CALLBACKTYPE *callbacks,
69         OMX_PTR appData,
70         OMX_COMPONENTTYPE **component, RenderMode renderMode)
71     : GoldfishVideoDecoderOMXComponent(
72             name, componentName, codingType,
73             kProfileLevels, ARRAY_SIZE(kProfileLevels),
74             320 /* width */, 240 /* height */, callbacks,
75             appData, component),
76       mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
77       mChangingResolution(false),
78       mSignalledError(false),
79       mInputOffset(0), mRenderMode(renderMode){
80     initPorts(
81             1 /* numMinInputBuffers */, kNumBuffers, INPUT_BUF_SIZE,
82             1 /* numMinOutputBuffers */, kNumBuffers, CODEC_MIME_TYPE);
83 
84     mTimeStart = mTimeEnd = systemTime();
85 
86     // If input dump is enabled, then open create an empty file
87     GENERATE_FILE_NAMES();
88     CREATE_DUMP_FILE(mInFile);
89     ALOGD("created %s %d object %p", __func__, __LINE__, this);
90 }
91 
~GoldfishAVCDec()92 GoldfishAVCDec::~GoldfishAVCDec() {
93     CHECK_EQ(deInitDecoder(), (status_t)OK);
94     ALOGD("destroyed %s %d object %p", __func__, __LINE__, this);
95 }
96 
logVersion()97 void GoldfishAVCDec::logVersion() {
98     // TODO: get emulation decoder implementation version from the host.
99     ALOGV("GoldfishAVC decoder version 1.0");
100 }
101 
resetPlugin()102 status_t GoldfishAVCDec::resetPlugin() {
103     mIsInFlush = false;
104     mReceivedEOS = false;
105 
106     /* Initialize both start and end times */
107     mTimeStart = mTimeEnd = systemTime();
108 
109     return OK;
110 }
111 
resetDecoder()112 status_t GoldfishAVCDec::resetDecoder() {
113     if (mContext) {
114     // The resolution may have changed, so our safest bet is to just destroy the
115     // current context and recreate another one, with the new width and height.
116     mContext->destroyH264Context();
117     mContext.reset(nullptr);
118 
119     }
120     return OK;
121 }
122 
setFlushMode()123 status_t GoldfishAVCDec::setFlushMode() {
124     /* Set the decoder in Flush mode, subsequent decode() calls will flush */
125     mIsInFlush = true;
126     mContext->flush();
127     return OK;
128 }
129 
initDecoder()130 status_t GoldfishAVCDec::initDecoder() {
131     /* Initialize the decoder */
132     mContext.reset(new MediaH264Decoder(mRenderMode));
133     mContext->initH264Context(mWidth,
134                               mHeight,
135                               mWidth,
136                               mHeight,
137                               MediaH264Decoder::PixelFormat::YUV420P);
138 
139     /* Reset the plugin state */
140     resetPlugin();
141 
142     /* Get codec version */
143     logVersion();
144 
145     return OK;
146 }
147 
deInitDecoder()148 status_t GoldfishAVCDec::deInitDecoder() {
149     if (mContext) {
150         mContext->destroyH264Context();
151         mContext.reset();
152     }
153 
154     mChangingResolution = false;
155 
156     return OK;
157 }
158 
onReset()159 void GoldfishAVCDec::onReset() {
160     GoldfishVideoDecoderOMXComponent::onReset();
161 
162     mSignalledError = false;
163     mInputOffset = 0;
164     resetDecoder();
165     resetPlugin();
166 }
167 
getVUIParams(h264_image_t & img)168 bool GoldfishAVCDec::getVUIParams(h264_image_t& img) {
169     int32_t primaries = img.color_primaries;
170     bool fullRange = img.color_range == 2 ? true : false;
171     int32_t transfer = img.color_trc;
172     int32_t coeffs = img.colorspace;
173 
174     ColorAspects colorAspects;
175     ColorUtils::convertIsoColorAspectsToCodecAspects(
176             primaries, transfer, coeffs, fullRange, colorAspects);
177 
178     ALOGD("img pts %lld, primaries %d, range %d transfer %d colorspace %d", (long long)img.pts,
179             (int)img.color_primaries, (int)img.color_range, (int)img.color_trc, (int)img.colorspace);
180 
181     // Update color aspects if necessary.
182     if (colorAspectsDiffer(colorAspects, mBitstreamColorAspects)) {
183         mBitstreamColorAspects = colorAspects;
184         status_t err = handleColorAspectsChange();
185         CHECK(err == OK);
186     }
187     return true;
188 }
189 
setDecodeArgs(OMX_BUFFERHEADERTYPE * inHeader,OMX_BUFFERHEADERTYPE * outHeader)190 bool GoldfishAVCDec::setDecodeArgs(
191         OMX_BUFFERHEADERTYPE *inHeader,
192         OMX_BUFFERHEADERTYPE *outHeader) {
193     size_t sizeY = outputBufferWidth() * outputBufferHeight();
194     size_t sizeUV = sizeY / 4;
195 
196     /* When in flush and after EOS with zero byte input,
197      * inHeader is set to zero. Hence check for non-null */
198     if (inHeader) {
199         mConsumedBytes = inHeader->nFilledLen - mInputOffset;
200         mInPBuffer = inHeader->pBuffer + inHeader->nOffset + mInputOffset;
201         ALOGD("got input timestamp %lld in-addr-base %p real-data-offset %d inputoffset %d", (long long)(inHeader->nTimeStamp),
202                 inHeader->pBuffer, (int)(inHeader->nOffset + mInputOffset), (int)mInputOffset);
203     } else {
204         mConsumedBytes = 0;
205         mInPBuffer = nullptr;
206     }
207 
208     if (outHeader) {
209         if (outHeader->nAllocLen < sizeY + (sizeUV * 2)) {
210             ALOGE("outHeader->nAllocLen %d < needed size %d", outHeader->nAllocLen, (int)(sizeY + sizeUV * 2));
211             android_errorWriteLog(0x534e4554, "27833616");
212             return false;
213         }
214         mOutHeaderBuf = outHeader->pBuffer;
215     } else {
216         // We flush out on the host side
217         mOutHeaderBuf = nullptr;
218     }
219 
220     return true;
221 }
222 
readAndDiscardAllHostBuffers()223 void GoldfishAVCDec::readAndDiscardAllHostBuffers() {
224     while (mContext) {
225         h264_image_t img = mContext->getImage();
226         if (img.data != nullptr) {
227             ALOGD("img pts %lld is discarded", (long long)img.pts);
228         } else {
229             return;
230         }
231     }
232 }
233 
onPortFlushCompleted(OMX_U32 portIndex)234 void GoldfishAVCDec::onPortFlushCompleted(OMX_U32 portIndex) {
235     /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
236     if (kOutputPortIndex == portIndex) {
237         setFlushMode();
238         ALOGD("%s %d", __func__, __LINE__);
239         readAndDiscardAllHostBuffers();
240         mContext->resetH264Context(mWidth, mHeight, mWidth, mHeight, MediaH264Decoder::PixelFormat::YUV420P);
241         if (!mCsd0.empty() && !mCsd1.empty()) {
242             mContext->decodeFrame(&(mCsd0[0]), mCsd0.size(), 0);
243             mContext->getImage();
244             mContext->decodeFrame(&(mCsd1[0]), mCsd1.size(), 0);
245             mContext->getImage();
246         }
247         resetPlugin();
248     } else {
249         mInputOffset = 0;
250     }
251 }
252 
copyImageData(OMX_BUFFERHEADERTYPE * outHeader,h264_image_t & img)253 void GoldfishAVCDec::copyImageData( OMX_BUFFERHEADERTYPE *outHeader, h264_image_t & img) {
254     int myStride = outputBufferWidth();
255     for (int i=0; i < mHeight; ++i) {
256         memcpy(outHeader->pBuffer + i * myStride, img.data + i * mWidth, mWidth);
257     }
258     int Y = myStride * outputBufferHeight();
259     for (int i=0; i < mHeight/2; ++i) {
260         memcpy(outHeader->pBuffer + Y + i * myStride / 2 , img.data + mWidth * mHeight + i * mWidth/2, mWidth/2);
261     }
262     int UV = Y/4;
263     for (int i=0; i < mHeight/2; ++i) {
264         memcpy(outHeader->pBuffer + Y + UV + i * myStride / 2 , img.data + mWidth * mHeight * 5/4 + i * mWidth/2, mWidth/2);
265     }
266 }
267 
getHostColorBufferId(void * header)268 int GoldfishAVCDec::getHostColorBufferId(void* header) {
269   if (mNWBuffers.find(header) == mNWBuffers.end()) {
270       ALOGD("cannot find color buffer for header %p", header);
271     return -1;
272   }
273   sp<ANativeWindowBuffer> nBuf = mNWBuffers[header];
274   cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
275   ALOGD("found color buffer for header %p --> %d", header, handle->hostHandle);
276   return handle->hostHandle;
277 }
278 
onQueueFilled(OMX_U32 portIndex)279 void GoldfishAVCDec::onQueueFilled(OMX_U32 portIndex) {
280     static int count1=0;
281     ALOGD("calling %s count %d object %p", __func__, ++count1, this);
282     UNUSED(portIndex);
283     OMX_BUFFERHEADERTYPE *inHeader = NULL;
284     BufferInfo *inInfo = NULL;
285 
286     if (mSignalledError) {
287         return;
288     }
289     if (mOutputPortSettingsChange != NONE) {
290         return;
291     }
292 
293     if (mContext == nullptr) {
294         if (OK != initDecoder()) {
295             ALOGE("Failed to initialize decoder");
296             notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
297             mSignalledError = true;
298             return;
299         }
300     }
301 
302     List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
303     List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
304 
305     int count2=0;
306     while (!outQueue.empty()) {
307         ALOGD("calling %s in while loop count %d", __func__, ++count2);
308         BufferInfo *outInfo;
309         OMX_BUFFERHEADERTYPE *outHeader;
310 
311         if (!mIsInFlush && (NULL == inHeader)) {
312             if (!inQueue.empty()) {
313                 inInfo = *inQueue.begin();
314                 inHeader = inInfo->mHeader;
315                 if (inHeader == NULL) {
316                     inQueue.erase(inQueue.begin());
317                     inInfo->mOwnedByUs = false;
318                     continue;
319                 }
320             } else {
321                 break;
322             }
323         }
324 
325         outInfo = *outQueue.begin();
326         outHeader = outInfo->mHeader;
327         outHeader->nFlags = 0;
328         outHeader->nTimeStamp = 0;
329         outHeader->nOffset = 0;
330 
331         if (inHeader != NULL) {
332             if (inHeader->nFilledLen == 0) {
333                 // An empty buffer can be end of stream (EOS) buffer, so
334                 // we'll set the decoder in flush mode if so. If it's not EOS,
335                 // then just release the buffer.
336                 inQueue.erase(inQueue.begin());
337                 inInfo->mOwnedByUs = false;
338                 notifyEmptyBufferDone(inHeader);
339 
340                 if (!(inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
341                     return;
342                 }
343 
344                 mReceivedEOS = true;
345                 inHeader = NULL;
346                 setFlushMode();
347             } else if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
348                 mReceivedEOS = true;
349             }
350         }
351 
352         {
353             nsecs_t timeDelay, timeTaken;
354 
355             if (!setDecodeArgs(inHeader, outHeader)) {
356                 ALOGE("Decoder arg setup failed");
357                 notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
358                 mSignalledError = true;
359                 return;
360             }
361 
362             mTimeStart = systemTime();
363             /* Compute time elapsed between end of previous decode()
364              * to start of current decode() */
365             timeDelay = mTimeStart - mTimeEnd;
366 
367             // TODO: We also need to send the timestamp
368             h264_result_t h264Res = {(int)MediaH264Decoder::Err::NoErr, 0};
369             if (inHeader != nullptr) {
370                 if (inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
371                     unsigned long mysize = (inHeader->nFilledLen - mInputOffset);
372                     uint8_t* mydata = mInPBuffer;
373                     if (mCsd0.empty()) {
374                         mCsd0.assign(mydata, mydata + mysize);
375                     } else if (mCsd1.empty()) {
376                         mCsd1.assign(mydata, mydata + mysize);
377                     }
378                 }
379                 ALOGD("Decoding frame(sz=%lu)", (unsigned long)(inHeader->nFilledLen - mInputOffset));
380                 h264Res = mContext->decodeFrame(mInPBuffer,
381                                                 inHeader->nFilledLen - mInputOffset,
382                                                 inHeader->nTimeStamp);
383                 mConsumedBytes = h264Res.bytesProcessed;
384                 if (h264Res.ret == (int)MediaH264Decoder::Err::DecoderRestarted) {
385                     mChangingResolution = true;
386                 }
387             } else {
388                 ALOGD("No more input data. Attempting to get a decoded frame, if any.");
389             }
390             h264_image_t img = {};
391 
392             bool readBackPixels = true;
393             if (mRenderMode == RenderMode::RENDER_BY_GUEST_CPU) {
394               img = mContext->getImage();
395             } else {
396                 int hostColorBufferId = getHostColorBufferId(outHeader);
397                 if (hostColorBufferId >= 0) {
398                     img = mContext->renderOnHostAndReturnImageMetadata(getHostColorBufferId(outHeader));
399                     readBackPixels = false;
400                 } else {
401                     img = mContext->getImage();
402                 }
403             }
404 
405 
406             if (img.data != nullptr) {
407                 getVUIParams(img);
408             }
409 
410             mTimeEnd = systemTime();
411             /* Compute time taken for decode() */
412             timeTaken = mTimeEnd - mTimeStart;
413 
414 
415             if (inHeader) {
416                 ALOGD("input time stamp %lld flag %d", inHeader->nTimeStamp, (int)(inHeader->nFlags));
417             }
418 
419             // If the decoder is in the changing resolution mode and there is no output present,
420             // that means the switching is done and it's ready to reset the decoder and the plugin.
421             if (mChangingResolution && img.data == nullptr) {
422                 mChangingResolution = false;
423                 ALOGD("re-create decoder because resolution changed");
424                 bool portWillReset = false;
425                 handlePortSettingsChange(&portWillReset, img.width, img.height);
426                 {
427                     ALOGD("handling port reset");
428                     ALOGD("port resetting (img.width=%u, img.height=%u, mWidth=%u, mHeight=%u)",
429                           img.width, img.height, mWidth, mHeight);
430                     //resetDecoder();
431                     resetPlugin();
432 
433                 //mContext->destroyH264Context();
434                 //mContext.reset(new MediaH264Decoder());
435                 mContext->resetH264Context(mWidth,
436                               mHeight,
437                               mWidth,
438                               mHeight,
439                               MediaH264Decoder::PixelFormat::YUV420P);
440                 //mInputOffset += mConsumedBytes;
441                 return;
442                 }
443             }
444 
445             if (img.data != nullptr) {
446                 int myWidth = img.width;
447                 int myHeight = img.height;
448                 if (myWidth != mWidth || myHeight != mHeight) {
449                     bool portWillReset = false;
450                     handlePortSettingsChange(&portWillReset, myWidth, myHeight);
451                     resetPlugin();
452                     mWidth = myWidth;
453                     mHeight = myHeight;
454                     if (portWillReset) {
455                         ALOGD("port will reset return now");
456                         return;
457                     } else {
458                         ALOGD("port will NOT reset keep going now");
459                     }
460                 }
461                 outHeader->nFilledLen =  (outputBufferWidth() * outputBufferHeight() * 3) / 2;
462                 if (readBackPixels) {
463                   if (outputBufferWidth() == mWidth && outputBufferHeight() == mHeight) {
464                     memcpy(outHeader->pBuffer, img.data, outHeader->nFilledLen);
465                   } else {
466                     copyImageData(outHeader, img);
467                   }
468                 }
469 
470                 outHeader->nTimeStamp = img.pts;
471                 ALOGD("got output timestamp %lld", (long long)(img.pts));
472 
473                 outInfo->mOwnedByUs = false;
474                 outQueue.erase(outQueue.begin());
475                 outInfo = NULL;
476                 notifyFillBufferDone(outHeader);
477                 outHeader = NULL;
478             } else if (mIsInFlush) {
479                 ALOGD("not img.data and it is in flush mode");
480                 /* If in flush mode and no output is returned by the codec,
481                  * then come out of flush mode */
482                 mIsInFlush = false;
483 
484                 /* If EOS was recieved on input port and there is no output
485                  * from the codec, then signal EOS on output port */
486                 if (mReceivedEOS) {
487                     ALOGD("recived EOS, re-create host context");
488                     outHeader->nFilledLen = 0;
489                     outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
490 
491                     outInfo->mOwnedByUs = false;
492                     outQueue.erase(outQueue.begin());
493                     outInfo = NULL;
494                     notifyFillBufferDone(outHeader);
495                     outHeader = NULL;
496                     resetPlugin();
497 
498                     //mContext->destroyH264Context();
499                 //mContext.reset(new MediaH264Decoder());
500                     mContext->resetH264Context(mWidth,
501                               mHeight,
502                               mWidth,
503                               mHeight,
504                               MediaH264Decoder::PixelFormat::YUV420P);
505 
506                 }
507             }
508             mInputOffset += mConsumedBytes;
509         }
510 
511         // If more than 4 bytes are remaining in input, then do not release it
512         if (inHeader != NULL && ((inHeader->nFilledLen - mInputOffset) <= 4)) {
513             inInfo->mOwnedByUs = false;
514             inQueue.erase(inQueue.begin());
515             inInfo = NULL;
516             notifyEmptyBufferDone(inHeader);
517             inHeader = NULL;
518             mInputOffset = 0;
519 
520             /* If input EOS is seen and decoder is not in flush mode,
521              * set the decoder in flush mode.
522              * There can be a case where EOS is sent along with last picture data
523              * In that case, only after decoding that input data, decoder has to be
524              * put in flush. This case is handled here  */
525 
526             if (mReceivedEOS && !mIsInFlush) {
527                 setFlushMode();
528             }
529         }
530     }
531 }
532 
internalGetParameter(OMX_INDEXTYPE index,OMX_PTR params)533 OMX_ERRORTYPE GoldfishAVCDec::internalGetParameter(
534         OMX_INDEXTYPE index, OMX_PTR params) {
535     const int32_t indexFull = index;
536     switch (indexFull) {
537         case kGetAndroidNativeBufferUsageIndex:
538         {
539             ALOGD("calling kGetAndroidNativeBufferUsageIndex");
540             GetAndroidNativeBufferUsageParams* nativeBuffersUsage = (GetAndroidNativeBufferUsageParams *) params;
541             nativeBuffersUsage->nUsage = (unsigned int)(BufferUsage::GPU_DATA_BUFFER);
542             return OMX_ErrorNone;
543         }
544 
545         default:
546             return GoldfishVideoDecoderOMXComponent::internalGetParameter(index, params);
547     }
548 }
549 
internalSetParameter(OMX_INDEXTYPE index,const OMX_PTR params)550 OMX_ERRORTYPE GoldfishAVCDec::internalSetParameter(
551         OMX_INDEXTYPE index, const OMX_PTR params) {
552     // Include extension index OMX_INDEXEXTTYPE.
553     const int32_t indexFull = index;
554 
555     switch (indexFull) {
556         case kEnableAndroidNativeBuffersIndex:
557         {
558             ALOGD("calling kEnableAndroidNativeBuffersIndex");
559             EnableAndroidNativeBuffersParams* enableNativeBuffers = (EnableAndroidNativeBuffersParams *) params;
560             if (enableNativeBuffers) {
561                 mEnableAndroidNativeBuffers = enableNativeBuffers->enable;
562                 if (mEnableAndroidNativeBuffers == false) {
563                     mNWBuffers.clear();
564                     ALOGD("disabled kEnableAndroidNativeBuffersIndex");
565                 } else {
566                     ALOGD("enabled kEnableAndroidNativeBuffersIndex");
567                 }
568             }
569             return OMX_ErrorNone;
570         }
571 
572         case kUseAndroidNativeBufferIndex:
573         {
574             if (mEnableAndroidNativeBuffers == false) {
575                 ALOGE("Error: not enabled Android Native Buffers");
576                 return OMX_ErrorBadParameter;
577             }
578             UseAndroidNativeBufferParams *use_buffer_params = (UseAndroidNativeBufferParams *)params;
579             if (use_buffer_params) {
580                 sp<ANativeWindowBuffer> nBuf = use_buffer_params->nativeBuffer;
581                 cb_handle_t *handle = (cb_handle_t*)nBuf->handle;
582                 void* dst = NULL;
583                 ALOGD("kUseAndroidNativeBufferIndex with handle %p host color handle %d calling usebuffer", handle,
584                       handle->hostHandle);
585                 useBufferCallerLockedAlready(use_buffer_params->bufferHeader,use_buffer_params->nPortIndex,
586                         use_buffer_params->pAppPrivate,handle->allocatedSize(), (OMX_U8*)dst);
587                 mNWBuffers[*(use_buffer_params->bufferHeader)] = use_buffer_params->nativeBuffer;;
588             }
589             return OMX_ErrorNone;
590         }
591 
592         default:
593             return GoldfishVideoDecoderOMXComponent::internalSetParameter(index, params);
594     }
595 }
596 
getExtensionIndex(const char * name,OMX_INDEXTYPE * index)597 OMX_ERRORTYPE GoldfishAVCDec::getExtensionIndex(
598         const char *name, OMX_INDEXTYPE *index) {
599 
600     if (mRenderMode == RenderMode::RENDER_BY_HOST_GPU) {
601         if (!strcmp(name, "OMX.google.android.index.enableAndroidNativeBuffers")) {
602             ALOGD("calling getExtensionIndex for enable ANB");
603             *(int32_t*)index = kEnableAndroidNativeBuffersIndex;
604             return OMX_ErrorNone;
605         } else if (!strcmp(name, "OMX.google.android.index.useAndroidNativeBuffer")) {
606             *(int32_t*)index = kUseAndroidNativeBufferIndex;
607             return OMX_ErrorNone;
608         } else if (!strcmp(name, "OMX.google.android.index.getAndroidNativeBufferUsage")) {
609             *(int32_t*)index = kGetAndroidNativeBufferUsageIndex;
610             return OMX_ErrorNone;
611         }
612     }
613     return GoldfishVideoDecoderOMXComponent::getExtensionIndex(name, index);
614 }
615 
getColorAspectPreference()616 int GoldfishAVCDec::getColorAspectPreference() {
617     return kPreferBitstream;
618 }
619 
620 }  // namespace android
621 
createGoldfishOMXComponent(const char * name,const OMX_CALLBACKTYPE * callbacks,OMX_PTR appData,OMX_COMPONENTTYPE ** component)622 android::GoldfishOMXComponent *createGoldfishOMXComponent(
623         const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
624         OMX_COMPONENTTYPE **component) {
625     if (!strncmp("OMX.android.goldfish", name, 20)) {
626       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_HOST_GPU);
627     } else {
628       return new android::GoldfishAVCDec(name, callbacks, appData, component, RenderMode::RENDER_BY_GUEST_CPU);
629     }
630 }
631 
632