• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 
17 #include "VideoDecoderBase.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <va/va_android.h>
21 #include <va/va_tpi.h>
22 #ifdef  __SSE4_1__
23 #include "use_util_sse4.h"
24 #endif
25 
26 #define INVALID_PTS ((uint64_t)-1)
27 #define MAXIMUM_POC  0x7FFFFFFF
28 #define MINIMUM_POC  0x80000000
29 #define ANDROID_DISPLAY_HANDLE 0x18C34078
30 
VideoDecoderBase(const char * mimeType,_vbp_parser_type type)31 VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
32     : mInitialized(false),
33       mLowDelay(false),
34       mDisplay(NULL),
35       mVADisplay(NULL),
36       mVAContext(VA_INVALID_ID),
37       mVAConfig(VA_INVALID_ID),
38       mVAStarted(false),
39       mCurrentPTS(INVALID_PTS),
40       mAcquiredBuffer(NULL),
41       mLastReference(NULL),
42       mForwardReference(NULL),
43       mDecodingFrame(false),
44       mSizeChanged(false),
45       mShowFrame(true),
46       mOutputWindowSize(OUTPUT_WINDOW_SIZE),
47       mRotationDegrees(0),
48       mErrReportEnabled(false),
49       mWiDiOn(false),
50       mRawOutput(false),
51       mManageReference(true),
52       mOutputMethod(OUTPUT_BY_PCT),
53       mNumSurfaces(0),
54       mSurfaceBuffers(NULL),
55       mOutputHead(NULL),
56       mOutputTail(NULL),
57       mSurfaces(NULL),
58       mVASurfaceAttrib(NULL),
59       mSurfaceUserPtr(NULL),
60       mSurfaceAcquirePos(0),
61       mNextOutputPOC(MINIMUM_POC),
62       mParserType(type),
63       mParserHandle(NULL),
64       mSignalBufferSize(0) {
65 
66     memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo));
67     memset(&mConfigBuffer, 0, sizeof(mConfigBuffer));
68     for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
69          mSignalBufferPre[i] = NULL;
70     }
71     pthread_mutex_init(&mLock, NULL);
72     mVideoFormatInfo.mimeType = strdup(mimeType);
73     mUseGEN = false;
74     mLibHandle = NULL;
75     mParserOpen = NULL;
76     mParserClose = NULL;
77     mParserParse = NULL;
78     mParserQuery = NULL;
79     mParserFlush = NULL;
80     mParserUpdate = NULL;
81 }
82 
~VideoDecoderBase()83 VideoDecoderBase::~VideoDecoderBase() {
84     pthread_mutex_destroy(&mLock);
85     stop();
86     free(mVideoFormatInfo.mimeType);
87 }
88 
start(VideoConfigBuffer * buffer)89 Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) {
90     if (buffer == NULL) {
91         return DECODE_INVALID_DATA;
92     }
93 
94     if (mParserHandle != NULL) {
95         WTRACE("Decoder has already started.");
96         return DECODE_SUCCESS;
97     }
98     mLibHandle = dlopen("libmixvbp.so", RTLD_NOW);
99     if (mLibHandle == NULL) {
100        return DECODE_NO_PARSER;
101     }
102     mParserOpen = (OpenFunc)dlsym(mLibHandle, "vbp_open");
103     mParserClose = (CloseFunc)dlsym(mLibHandle, "vbp_close");
104     mParserParse = (ParseFunc)dlsym(mLibHandle, "vbp_parse");
105     mParserQuery = (QueryFunc)dlsym(mLibHandle, "vbp_query");
106     mParserFlush = (FlushFunc)dlsym(mLibHandle, "vbp_flush");
107     if (mParserOpen == NULL || mParserClose == NULL || mParserParse == NULL
108         || mParserQuery == NULL || mParserFlush == NULL) {
109         return DECODE_NO_PARSER;
110     }
111 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
112     mParserUpdate = (UpdateFunc)dlsym(mLibHandle, "vbp_update");
113     if (mParserUpdate == NULL) {
114         return DECODE_NO_PARSER;
115     }
116 #endif
117     if ((int32_t)mParserType != VBP_INVALID) {
118         ITRACE("mParserType = %d", mParserType);
119         if (mParserOpen(mParserType, &mParserHandle) != VBP_OK) {
120             ETRACE("Failed to open VBP parser.");
121             return DECODE_NO_PARSER;
122         }
123     }
124     // keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter.
125     mConfigBuffer = *buffer;
126     mConfigBuffer.data = NULL;
127     mConfigBuffer.size = 0;
128 
129     mVideoFormatInfo.width = buffer->width;
130     mVideoFormatInfo.height = buffer->height;
131     if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
132         mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
133         mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
134     }
135     mLowDelay = buffer->flag & WANT_LOW_DELAY;
136     mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
137     if (mRawOutput) {
138         WTRACE("Output is raw data.");
139     }
140 
141     return DECODE_SUCCESS;
142 }
143 
144 
reset(VideoConfigBuffer * buffer)145 Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) {
146     if (buffer == NULL) {
147         return DECODE_INVALID_DATA;
148     }
149 
150     // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
151     terminateVA();
152 
153     // reset the mconfigBuffer to pass it for startVA.
154     mConfigBuffer = *buffer;
155     mConfigBuffer.data = NULL;
156     mConfigBuffer.size = 0;
157 
158     mVideoFormatInfo.width = buffer->width;
159     mVideoFormatInfo.height = buffer->height;
160     if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
161         mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
162         mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
163     }
164     mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
165     mLowDelay = buffer->flag & WANT_LOW_DELAY;
166     mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
167     if (mRawOutput) {
168         WTRACE("Output is raw data.");
169     }
170     return DECODE_SUCCESS;
171 }
172 
173 
174 
stop(void)175 void VideoDecoderBase::stop(void) {
176     terminateVA();
177 
178     mCurrentPTS = INVALID_PTS;
179     mAcquiredBuffer = NULL;
180     mLastReference = NULL;
181     mForwardReference = NULL;
182     mDecodingFrame = false;
183     mSizeChanged = false;
184 
185     // private variables
186     mLowDelay = false;
187     mRawOutput = false;
188     mNumSurfaces = 0;
189     mSurfaceAcquirePos = 0;
190     mNextOutputPOC = MINIMUM_POC;
191     mVideoFormatInfo.valid = false;
192     if (mParserHandle){
193         mParserClose(mParserHandle);
194         mParserHandle = NULL;
195     }
196     if (mLibHandle) {
197         dlclose(mLibHandle);
198         mLibHandle = NULL;
199     }
200 }
201 
flush(void)202 void VideoDecoderBase::flush(void) {
203     if (mVAStarted == false) {
204         // nothing to flush at this stage
205         return;
206     }
207 
208     endDecodingFrame(true);
209 
210     VideoSurfaceBuffer *p = mOutputHead;
211     // check if there's buffer with DRC flag in the output queue
212     while (p) {
213         if (p->renderBuffer.flag & IS_RESOLUTION_CHANGE) {
214             mSizeChanged = true;
215             break;
216         }
217         p = p->next;
218     }
219     // avoid setting mSurfaceAcquirePos  to 0 as it may cause tearing
220     // (surface is still being rendered)
221     mSurfaceAcquirePos = (mSurfaceAcquirePos  + 1) % mNumSurfaces;
222     mNextOutputPOC = MINIMUM_POC;
223     mCurrentPTS = INVALID_PTS;
224     mAcquiredBuffer = NULL;
225     mLastReference = NULL;
226     mForwardReference = NULL;
227     mOutputHead = NULL;
228     mOutputTail = NULL;
229     mDecodingFrame = false;
230 
231     // flush vbp parser
232     if (mParserHandle && (mParserFlush(mParserHandle) != VBP_OK)) {
233         WTRACE("Failed to flush parser. Continue");
234     }
235 
236     // initialize surface buffer without resetting mapped/raw data
237     initSurfaceBuffer(false);
238 
239 }
240 
freeSurfaceBuffers(void)241 void VideoDecoderBase::freeSurfaceBuffers(void) {
242     if (mVAStarted == false) {
243         // nothing to free surface buffers at this stage
244         return;
245     }
246 
247     pthread_mutex_lock(&mLock);
248 
249     endDecodingFrame(true);
250 
251     // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
252     terminateVA();
253 
254     pthread_mutex_unlock(&mLock);
255 }
256 
getFormatInfo(void)257 const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) {
258     return &mVideoFormatInfo;
259 }
260 
getOutput(bool draining,VideoErrorBuffer * outErrBuf)261 const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) {
262     VAStatus vaStatus;
263     if (mVAStarted == false) {
264         return NULL;
265     }
266     bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
267 
268     if (draining) {
269         // complete decoding the last frame and ignore return
270         endDecodingFrame(false);
271     }
272 
273     if (mOutputHead == NULL) {
274         return NULL;
275     }
276 
277     // output by position (the first buffer)
278     VideoSurfaceBuffer *outputByPos = mOutputHead;
279 
280     if (mLowDelay) {
281         mOutputHead = mOutputHead->next;
282         if (mOutputHead == NULL) {
283             mOutputTail = NULL;
284         }
285         vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp);
286         if (useGraphicBuffer && !mUseGEN) {
287             vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface);
288             fillDecodingErrors(&(outputByPos->renderBuffer));
289         }
290         if (draining && mOutputTail == NULL) {
291             outputByPos->renderBuffer.flag |= IS_EOS;
292         }
293         drainDecodingErrors(outErrBuf, &(outputByPos->renderBuffer));
294 
295         return &(outputByPos->renderBuffer);
296     }
297 
298     VideoSurfaceBuffer *output = NULL;
299     if (mOutputMethod == OUTPUT_BY_POC) {
300         output = findOutputByPoc(draining);
301     } else if (mOutputMethod == OUTPUT_BY_PCT) {
302         output = findOutputByPct(draining);
303     } else {
304         ETRACE("Invalid output method.");
305         return NULL;
306     }
307 
308     if (output == NULL) {
309         return NULL;
310     }
311 
312     if (output != outputByPos) {
313         // remove this output from middle or end of the list
314         VideoSurfaceBuffer *p = outputByPos;
315         while (p->next != output) {
316             p = p->next;
317         }
318         p->next = output->next;
319         if (mOutputTail == output) {
320             mOutputTail = p;
321         }
322     } else {
323         // remove this output from head of the list
324         mOutputHead = mOutputHead->next;
325         if (mOutputHead == NULL) {
326             mOutputTail = NULL;
327         }
328     }
329     //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6);
330     vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp);
331 
332     if (useGraphicBuffer && !mUseGEN) {
333         vaSyncSurface(mVADisplay, output->renderBuffer.surface);
334         fillDecodingErrors(&(output->renderBuffer));
335     }
336 
337     if (draining && mOutputTail == NULL) {
338         output->renderBuffer.flag |= IS_EOS;
339     }
340 
341     drainDecodingErrors(outErrBuf, &(output->renderBuffer));
342 
343     return &(output->renderBuffer);
344 }
345 
findOutputByPts()346 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPts() {
347     // output by presentation time stamp - buffer with the smallest time stamp is output
348     VideoSurfaceBuffer *p = mOutputHead;
349     VideoSurfaceBuffer *outputByPts = NULL;
350     uint64_t pts = INVALID_PTS;
351     do {
352         if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) {
353             // find buffer with the smallest PTS
354             pts = p->renderBuffer.timeStamp;
355             outputByPts = p;
356         }
357         p = p->next;
358     } while (p != NULL);
359 
360     return outputByPts;
361 }
362 
findOutputByPct(bool draining)363 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPct(bool draining) {
364     // output by picture coding type (PCT)
365     // if there is more than one reference frame, the first reference frame is ouput, otherwise,
366     // output non-reference frame if there is any.
367 
368     VideoSurfaceBuffer *p = mOutputHead;
369     VideoSurfaceBuffer *outputByPct = NULL;
370     int32_t reference = 0;
371     do {
372         if (p->referenceFrame) {
373             reference++;
374             if (reference > 1) {
375                 // mOutputHead must be a reference frame
376                 outputByPct = mOutputHead;
377                 break;
378             }
379         } else {
380             // first non-reference frame
381             outputByPct = p;
382             break;
383         }
384         p = p->next;
385     } while (p != NULL);
386 
387     if (outputByPct == NULL && draining) {
388         outputByPct = mOutputHead;
389     }
390     return  outputByPct;
391 }
392 
393 #if 0
394 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
395     // output by picture order count (POC)
396     // Output criteria:
397     // if there is IDR frame (POC == 0), all the frames before IDR must be output;
398     // Otherwise, if draining flag is set or list is full, frame with the least POC is output;
399     // Otherwise, NOTHING is output
400 
401     int32_t dpbFullness = 0;
402     for (int32_t i = 0; i < mNumSurfaces; i++) {
403         // count num of reference frames
404         if (mSurfaceBuffers[i].asReferernce) {
405             dpbFullness++;
406         }
407     }
408 
409     if (mAcquiredBuffer && mAcquiredBuffer->asReferernce) {
410         // frame is being decoded and is not ready for output yet
411         dpbFullness--;
412     }
413 
414     VideoSurfaceBuffer *p = mOutputHead;
415     while (p != NULL) {
416         // count dpbFullness with non-reference frame in the output queue
417         if (p->asReferernce == false) {
418             dpbFullness++;
419         }
420         p = p->next;
421     }
422 
423 Retry:
424     p = mOutputHead;
425     VideoSurfaceBuffer *outputByPoc = NULL;
426     int32_t count = 0;
427     int32_t poc = MAXIMUM_POC;
428 
429     do {
430         if (p->pictureOrder == 0) {
431             // output picture with the least POC before IDR
432             if (outputByPoc != NULL) {
433                 mNextOutputPOC = outputByPoc->pictureOrder + 1;
434                 return outputByPoc;
435             } else {
436                 mNextOutputPOC = MINIMUM_POC;
437             }
438         }
439 
440         // POC of  the output candidate must not be less than mNextOutputPOC
441         if (p->pictureOrder < mNextOutputPOC) {
442             break;
443         }
444 
445         if (p->pictureOrder < poc) {
446             // update the least POC.
447             poc = p->pictureOrder;
448             outputByPoc = p;
449         }
450         count++;
451         p = p->next;
452     } while (p != NULL && count < mOutputWindowSize);
453 
454     if (draining == false && dpbFullness < mOutputWindowSize) {
455         // list is not  full and we are not  in draining state
456         // if DPB is already full, one frame must be output
457         return NULL;
458     }
459 
460     if (outputByPoc == NULL) {
461         mNextOutputPOC = MINIMUM_POC;
462         goto Retry;
463     }
464 
465     // for debugging purpose
466     if (outputByPoc->pictureOrder != 0 && outputByPoc->pictureOrder < mNextOutputPOC) {
467         ETRACE("Output POC is not incremental, expected %d, actual %d", mNextOutputPOC, outputByPoc->pictureOrder);
468         //gaps_in_frame_num_value_allowed_flag is not currently supported
469     }
470 
471     mNextOutputPOC = outputByPoc->pictureOrder + 1;
472 
473     return outputByPoc;
474 }
475 #else
findOutputByPoc(bool draining)476 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
477     VideoSurfaceBuffer *output = NULL;
478     VideoSurfaceBuffer *p = mOutputHead;
479     int32_t count = 0;
480     int32_t poc = MAXIMUM_POC;
481     VideoSurfaceBuffer *outputleastpoc = mOutputHead;
482     do {
483         count++;
484         if (p->pictureOrder == 0) {
485             // any picture before this POC (new IDR) must be output
486             if (output == NULL) {
487                 mNextOutputPOC = MINIMUM_POC;
488                 // looking for any POC with negative value
489             } else {
490                 mNextOutputPOC = output->pictureOrder + 1;
491                 break;
492             }
493         }
494         if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) {
495             // this POC meets ouput criteria.
496             poc = p->pictureOrder;
497             output = p;
498             outputleastpoc = p;
499         }
500         if (poc == mNextOutputPOC || count == mOutputWindowSize) {
501             if (output != NULL) {
502                 // this indicates two cases:
503                 // 1) the next output POC is found.
504                 // 2) output queue is full and there is at least one buffer meeting the output criteria.
505                 mNextOutputPOC = output->pictureOrder + 1;
506                 break;
507             } else {
508                 // this indicates output queue is full and no buffer in the queue meets the output criteria
509                 // restart processing as queue is FULL and output criteria is changed. (next output POC is 0)
510                 mNextOutputPOC = MINIMUM_POC;
511                 count = 0;
512                 poc = MAXIMUM_POC;
513                 p = mOutputHead;
514                 continue;
515             }
516         }
517         if (p->next == NULL) {
518             output = NULL;
519         }
520 
521         p = p->next;
522     } while (p != NULL);
523 
524     if (draining == true && output == NULL) {
525         output = outputleastpoc;
526     }
527 
528     return output;
529 }
530 #endif
531 
checkBufferAvail(void)532 bool VideoDecoderBase::checkBufferAvail(void) {
533     if (!mInitialized) {
534         if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) {
535             return true;
536         }
537         for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
538             if (mSignalBufferPre[i] != NULL) {
539                 return true;
540             }
541         }
542         return false;
543     }
544     // check whether there is buffer available for decoding
545     // TODO: check frame being referenced for frame skipping
546     VideoSurfaceBuffer *buffer = NULL;
547     for (int32_t i = 0; i < mNumSurfaces; i++) {
548         buffer = mSurfaceBuffers + i;
549 
550         if (buffer->asReferernce == false &&
551             buffer->renderBuffer.renderDone == true) {
552             querySurfaceRenderStatus(buffer);
553             if (buffer->renderBuffer.driverRenderDone == true)
554                 return true;
555         }
556      }
557     return false;
558 }
559 
acquireSurfaceBuffer(void)560 Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) {
561     if (mVAStarted == false) {
562         return DECODE_FAIL;
563     }
564 
565     if (mAcquiredBuffer != NULL) {
566         ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
567         return DECODE_FAIL;
568     }
569 
570     int nextAcquire = mSurfaceAcquirePos;
571     VideoSurfaceBuffer *acquiredBuffer = NULL;
572     bool acquired = false;
573 
574     while (acquired == false) {
575         acquiredBuffer = mSurfaceBuffers + nextAcquire;
576 
577         querySurfaceRenderStatus(acquiredBuffer);
578 
579         if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true && acquiredBuffer->renderBuffer.driverRenderDone == true) {
580             // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping
581             VideoSurfaceBuffer *temp;
582             acquired = true;
583             for (int i = 0; i < mNumSurfaces; i++) {
584                 if (i == nextAcquire) {
585                     continue;
586                 }
587                 temp = mSurfaceBuffers + i;
588                 // use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use.
589                 if (temp->renderBuffer.surface == mSurfaces[nextAcquire] &&
590                     temp->renderBuffer.renderDone == false) {
591                     ITRACE("Surface is referenced by other surface buffer.");
592                     acquired = false;
593                     break;
594                 }
595             }
596         }
597         if (acquired) {
598             break;
599         }
600         nextAcquire++;
601         if (nextAcquire == mNumSurfaces) {
602             nextAcquire = 0;
603         }
604         if (nextAcquire == mSurfaceAcquirePos) {
605             return DECODE_NO_SURFACE;
606         }
607     }
608 
609     if (acquired == false) {
610         return DECODE_NO_SURFACE;
611     }
612 
613     mAcquiredBuffer = acquiredBuffer;
614     mSurfaceAcquirePos = nextAcquire;
615 
616     // set surface again as surface maybe reset by skipped frame.
617     // skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output.
618     mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos];
619     if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) {
620         mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos];
621     }
622     mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS;
623     mAcquiredBuffer->renderBuffer.display = mVADisplay;
624     mAcquiredBuffer->renderBuffer.flag = 0;
625     mAcquiredBuffer->renderBuffer.renderDone = false;
626     mAcquiredBuffer->asReferernce = false;
627     mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 0;
628     mAcquiredBuffer->renderBuffer.errBuf.timeStamp = INVALID_PTS;
629 
630     return DECODE_SUCCESS;
631 }
632 
outputSurfaceBuffer(void)633 Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) {
634     Decode_Status status;
635     if (mAcquiredBuffer == NULL) {
636         ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
637         return DECODE_FAIL;
638     }
639 
640     if (mRawOutput) {
641         status = getRawDataFromSurface();
642         CHECK_STATUS();
643     }
644 
645     // frame is successfly decoded to the current surface,  it is ready for output
646     if (mShowFrame) {
647         mAcquiredBuffer->renderBuffer.renderDone = false;
648     } else {
649         mAcquiredBuffer->renderBuffer.renderDone = true;
650     }
651 
652     // decoder must set "asReference and referenceFrame" flags properly
653 
654     // update reference frames
655     if (mAcquiredBuffer->referenceFrame) {
656         if (mManageReference) {
657             // managing reference for MPEG4/H.263/WMV.
658             // AVC should manage reference frame in a different way
659             if (mForwardReference != NULL) {
660                 // this foward reference is no longer needed
661                 mForwardReference->asReferernce = false;
662             }
663             // Forware reference for either P or B frame prediction
664             mForwardReference = mLastReference;
665             mAcquiredBuffer->asReferernce = true;
666         }
667 
668         // the last reference frame.
669         mLastReference = mAcquiredBuffer;
670     }
671     // add to the output list
672     if (mShowFrame) {
673         if (mOutputHead == NULL) {
674             mOutputHead = mAcquiredBuffer;
675         } else {
676             mOutputTail->next = mAcquiredBuffer;
677         }
678         mOutputTail = mAcquiredBuffer;
679         mOutputTail->next = NULL;
680     }
681 
682     //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6);
683 
684     mAcquiredBuffer = NULL;
685     mSurfaceAcquirePos = (mSurfaceAcquirePos  + 1 ) % mNumSurfaces;
686     return DECODE_SUCCESS;
687 }
688 
releaseSurfaceBuffer(void)689 Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) {
690     if (mAcquiredBuffer == NULL) {
691         // this is harmless error
692         return DECODE_SUCCESS;
693     }
694 
695     // frame is not decoded to the acquired buffer, current surface is invalid, and can't be output.
696     mAcquiredBuffer->asReferernce = false;
697     mAcquiredBuffer->renderBuffer.renderDone = true;
698     mAcquiredBuffer = NULL;
699     return DECODE_SUCCESS;
700 }
701 
flushSurfaceBuffers(void)702 void VideoDecoderBase::flushSurfaceBuffers(void) {
703     endDecodingFrame(true);
704     VideoSurfaceBuffer *p = NULL;
705     while (mOutputHead) {
706         mOutputHead->renderBuffer.renderDone = true;
707         p = mOutputHead;
708         mOutputHead = mOutputHead->next;
709         p->next = NULL;
710     }
711     mOutputHead = NULL;
712     mOutputTail = NULL;
713 }
714 
endDecodingFrame(bool dropFrame)715 Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) {
716     Decode_Status status = DECODE_SUCCESS;
717     VAStatus vaStatus;
718 
719     if (mDecodingFrame == false) {
720         if (mAcquiredBuffer != NULL) {
721             //ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
722             releaseSurfaceBuffer();
723             status = DECODE_FAIL;
724         }
725         return status;
726     }
727     // return through exit label to reset mDecodingFrame
728     if (mAcquiredBuffer == NULL) {
729         ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
730         status = DECODE_FAIL;
731         goto exit;
732     }
733 
734     vaStatus = vaEndPicture(mVADisplay, mVAContext);
735     if (vaStatus != VA_STATUS_SUCCESS) {
736         releaseSurfaceBuffer();
737         ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus);
738         status = DECODE_DRIVER_FAIL;
739         goto exit;
740     }
741 
742     if (dropFrame) {
743         // we are asked to drop this decoded picture
744         VTRACE("Frame dropped in endDecodingFrame");
745         vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface);
746         releaseSurfaceBuffer();
747         goto exit;
748     }
749     status = outputSurfaceBuffer();
750     // fall through
751 exit:
752     mDecodingFrame = false;
753     return status;
754 }
755 
756 
setupVA(uint32_t numSurface,VAProfile profile,uint32_t numExtraSurface)757 Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile, uint32_t numExtraSurface) {
758     VAStatus vaStatus = VA_STATUS_SUCCESS;
759     Decode_Status status;
760     VAConfigAttrib attrib;
761 
762     if (mVAStarted) {
763         return DECODE_SUCCESS;
764     }
765 
766     mRotationDegrees = 0;
767     if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){
768 #ifdef TARGET_HAS_ISV
769         if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum)
770 #else
771         if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)
772 #endif
773             return DECODE_FORMAT_CHANGE;
774 
775         numSurface = mConfigBuffer.surfaceNumber;
776         // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode,
777         // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs
778         if (mSizeChanged) {
779             if (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height) {
780                 mSizeChanged = false;
781                 return DECODE_FORMAT_CHANGE;
782             }
783         }
784     }
785 
786     // TODO: validate profile
787     if (numSurface == 0) {
788         return DECODE_FAIL;
789     }
790 
791     if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) {
792         if (numSurface < mConfigBuffer.surfaceNumber) {
793             WTRACE("surface to allocated %d is less than minimum number required %d",
794                     numSurface, mConfigBuffer.surfaceNumber);
795             numSurface = mConfigBuffer.surfaceNumber;
796         }
797     }
798 
799     if (mVADisplay != NULL) {
800         ETRACE("VA is partially started.");
801         return DECODE_FAIL;
802     }
803 
804     // Display is defined as "unsigned int"
805 #ifndef USE_HYBRID_DRIVER
806     mDisplay = new Display;
807     *mDisplay = ANDROID_DISPLAY_HANDLE;
808 #else
809     if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) {
810         ITRACE("Using GEN driver");
811         mDisplay = "libva_driver_name=i965";
812         mUseGEN = true;
813     } else {
814         ITRACE("Using PVR driver");
815         mDisplay = "libva_driver_name=pvr";
816         mUseGEN = false;
817     }
818 
819 #endif
820     mVADisplay = vaGetDisplay(mDisplay);
821     if (mVADisplay == NULL) {
822         ETRACE("vaGetDisplay failed.");
823         return DECODE_DRIVER_FAIL;
824     }
825 
826     int majorVersion, minorVersion;
827     vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
828     CHECK_VA_STATUS("vaInitialize");
829 
830     if ((int32_t)profile != VAProfileSoftwareDecoding) {
831 
832         status = checkHardwareCapability();
833         CHECK_STATUS("checkHardwareCapability");
834 
835 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
836         status = getCodecSpecificConfigs(profile, &mVAConfig);
837         CHECK_STATUS("getCodecSpecificAttributes");
838 #else
839         //We are requesting RT attributes
840         attrib.type = VAConfigAttribRTFormat;
841         attrib.value = VA_RT_FORMAT_YUV420;
842 
843         vaStatus = vaCreateConfig(
844                 mVADisplay,
845                 profile,
846                 VAEntrypointVLD,
847                 &attrib,
848                 1,
849                 &mVAConfig);
850         CHECK_VA_STATUS("vaCreateConfig");
851 #endif
852     }
853 
854     mNumSurfaces = numSurface;
855     mNumExtraSurfaces = numExtraSurface;
856     mSurfaces = new VASurfaceID [mNumSurfaces + mNumExtraSurfaces];
857     mExtraSurfaces = mSurfaces + mNumSurfaces;
858     if (mSurfaces == NULL) {
859         return DECODE_MEMORY_FAIL;
860     }
861 
862     setRenderRect();
863 
864     int32_t format = VA_RT_FORMAT_YUV420;
865     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
866 #ifndef USE_AVC_SHORT_FORMAT
867         format |= VA_RT_FORMAT_PROTECTED;
868         WTRACE("Surface is protected.");
869 #endif
870     }
871     if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) {
872         VASurfaceAttrib attribs[2];
873         mVASurfaceAttrib = new VASurfaceAttribExternalBuffers;
874         if (mVASurfaceAttrib == NULL) {
875             return DECODE_MEMORY_FAIL;
876         }
877 
878         mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces);
879         if (mVASurfaceAttrib->buffers == NULL) {
880             return DECODE_MEMORY_FAIL;
881         }
882         mVASurfaceAttrib->num_buffers = mNumSurfaces;
883         mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12;
884         mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth;
885         mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight;
886         mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5;
887         mVASurfaceAttrib->num_planes = 2;
888         mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride;
889         mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride;
890         mVASurfaceAttrib->pitches[2] = 0;
891         mVASurfaceAttrib->pitches[3] = 0;
892         mVASurfaceAttrib->offsets[0] = 0;
893         mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight;
894         mVASurfaceAttrib->offsets[2] = 0;
895         mVASurfaceAttrib->offsets[3] = 0;
896         mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow;
897         mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
898         if (mConfigBuffer.flag & USE_TILING_MEMORY)
899             mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
900 
901         for (int i = 0; i < mNumSurfaces; i++) {
902             mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i];
903         }
904 
905         attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
906         attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
907         attribs[0].value.type = VAGenericValueTypeInteger;
908         attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
909 
910         attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
911         attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
912         attribs[1].value.type = VAGenericValueTypePointer;
913         attribs[1].value.value.p = (void *)mVASurfaceAttrib;
914 
915         vaStatus = vaCreateSurfaces(
916             mVADisplay,
917             format,
918             mVideoFormatInfo.surfaceWidth,
919             mVideoFormatInfo.surfaceHeight,
920             mSurfaces,
921             mNumSurfaces,
922             attribs,
923             2);
924 
925     } else {
926         vaStatus = vaCreateSurfaces(
927             mVADisplay,
928             format,
929             mVideoFormatInfo.width,
930             mVideoFormatInfo.height,
931             mSurfaces,
932             mNumSurfaces,
933             NULL,
934             0);
935         mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width;
936         mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height;
937     }
938     CHECK_VA_STATUS("vaCreateSurfaces");
939 
940     if (mNumExtraSurfaces != 0) {
941         vaStatus = vaCreateSurfaces(
942             mVADisplay,
943             format,
944             mVideoFormatInfo.surfaceWidth,
945             mVideoFormatInfo.surfaceHeight,
946             mExtraSurfaces,
947             mNumExtraSurfaces,
948             NULL,
949             0);
950         CHECK_VA_STATUS("vaCreateSurfaces");
951     }
952 
953     mVideoFormatInfo.surfaceNumber = mNumSurfaces;
954     mVideoFormatInfo.ctxSurfaces = mSurfaces;
955 
956     if ((int32_t)profile != VAProfileSoftwareDecoding) {
957         vaStatus = vaCreateContext(
958                 mVADisplay,
959                 mVAConfig,
960                 mVideoFormatInfo.surfaceWidth,
961                 mVideoFormatInfo.surfaceHeight,
962                 0,
963                 mSurfaces,
964                 mNumSurfaces + mNumExtraSurfaces,
965                 &mVAContext);
966         CHECK_VA_STATUS("vaCreateContext");
967     }
968 
969     mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces];
970     if (mSurfaceBuffers == NULL) {
971         return DECODE_MEMORY_FAIL;
972     }
973     initSurfaceBuffer(true);
974 
975     if ((int32_t)profile == VAProfileSoftwareDecoding) {
976         // derive user pointer from surface for direct access
977         status = mapSurface();
978         CHECK_STATUS("mapSurface")
979     }
980 
981     setRotationDegrees(mConfigBuffer.rotationDegrees);
982 
983     mVAStarted = true;
984     return DECODE_SUCCESS;
985 }
986 
terminateVA(void)987 Decode_Status VideoDecoderBase::terminateVA(void) {
988     mSignalBufferSize = 0;
989     for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
990          mSignalBufferPre[i] = NULL;
991     }
992 
993     if (mVAStarted == false) {
994         // VA hasn't been started yet
995         return DECODE_SUCCESS;
996     }
997 
998     if (mSurfaceBuffers) {
999         for (int32_t i = 0; i < mNumSurfaces; i++) {
1000             if (mSurfaceBuffers[i].renderBuffer.rawData) {
1001                 if (mSurfaceBuffers[i].renderBuffer.rawData->data) {
1002                     delete [] mSurfaceBuffers[i].renderBuffer.rawData->data;
1003                 }
1004                 delete mSurfaceBuffers[i].renderBuffer.rawData;
1005             }
1006             if (mSurfaceBuffers[i].mappedData) {
1007                 // don't  delete data pointer as it is mapped from surface
1008                 delete mSurfaceBuffers[i].mappedData;
1009             }
1010         }
1011         delete [] mSurfaceBuffers;
1012         mSurfaceBuffers = NULL;
1013     }
1014 
1015     if (mVASurfaceAttrib) {
1016         if (mVASurfaceAttrib->buffers) free(mVASurfaceAttrib->buffers);
1017         delete mVASurfaceAttrib;
1018         mVASurfaceAttrib = NULL;
1019     }
1020 
1021 
1022     if (mSurfaceUserPtr) {
1023         delete [] mSurfaceUserPtr;
1024         mSurfaceUserPtr = NULL;
1025     }
1026 
1027     if (mSurfaces)
1028     {
1029         vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces + mNumExtraSurfaces);
1030         delete [] mSurfaces;
1031         mSurfaces = NULL;
1032     }
1033 
1034     if (mVAContext != VA_INVALID_ID) {
1035          vaDestroyContext(mVADisplay, mVAContext);
1036          mVAContext = VA_INVALID_ID;
1037     }
1038 
1039     if (mVAConfig != VA_INVALID_ID) {
1040         vaDestroyConfig(mVADisplay, mVAConfig);
1041         mVAConfig = VA_INVALID_ID;
1042     }
1043 
1044     if (mVADisplay) {
1045         vaTerminate(mVADisplay);
1046         mVADisplay = NULL;
1047     }
1048 
1049     if (mDisplay) {
1050 #ifndef USE_HYBRID_DRIVER
1051         delete mDisplay;
1052 #endif
1053         mDisplay = NULL;
1054     }
1055 
1056     mVAStarted = false;
1057     mInitialized = false;
1058     mErrReportEnabled = false;
1059     return DECODE_SUCCESS;
1060 }
1061 
parseBuffer(uint8_t * buffer,int32_t size,bool config,void ** vbpData)1062 Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) {
1063      // DON'T check if mVAStarted == true
1064     if (mParserHandle == NULL) {
1065         return DECODE_NO_PARSER;
1066     }
1067 
1068     uint32_t vbpStatus;
1069     if (buffer == NULL || size <= 0) {
1070         return DECODE_INVALID_DATA;
1071     }
1072 
1073     uint8_t configFlag = config ? 1 : 0;
1074     vbpStatus = mParserParse(mParserHandle, buffer, size, configFlag);
1075     CHECK_VBP_STATUS("vbp_parse");
1076 
1077     vbpStatus = mParserQuery(mParserHandle, vbpData);
1078     CHECK_VBP_STATUS("vbp_query");
1079 
1080     return DECODE_SUCCESS;
1081 }
1082 
1083 
1084 
mapSurface(void)1085 Decode_Status VideoDecoderBase::mapSurface(void) {
1086     VAStatus vaStatus = VA_STATUS_SUCCESS;
1087     VAImage image;
1088     uint8_t *userPtr;
1089     mSurfaceUserPtr = new uint8_t* [mNumSurfaces];
1090     if (mSurfaceUserPtr == NULL) {
1091         return DECODE_MEMORY_FAIL;
1092     }
1093 
1094     for (int32_t i = 0; i< mNumSurfaces; i++) {
1095         vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image);
1096         CHECK_VA_STATUS("vaDeriveImage");
1097         vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr);
1098         CHECK_VA_STATUS("vaMapBuffer");
1099         mSurfaceUserPtr[i] = userPtr;
1100         mSurfaceBuffers[i].mappedData = new VideoFrameRawData;
1101         if (mSurfaceBuffers[i].mappedData == NULL) {
1102             return DECODE_MEMORY_FAIL;
1103         }
1104         mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released
1105         mSurfaceBuffers[i].mappedData->data = NULL;  // specified during acquireSurfaceBuffer
1106         mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc;
1107         mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width;
1108         mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height;
1109         mSurfaceBuffers[i].mappedData->size = image.data_size;
1110         for (int pi = 0; pi < 3; pi++) {
1111             mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi];
1112             mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi];
1113         }
1114         // debug information
1115         if (image.pitches[0] != image.pitches[1] ||
1116             image.width != mVideoFormatInfo.width ||
1117             image.height != mVideoFormatInfo.height ||
1118             image.offsets[0] != 0) {
1119             WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]);
1120         }
1121         // TODO: do we need to unmap buffer?
1122         //vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
1123         //CHECK_VA_STATUS("vaMapBuffer");
1124         vaStatus = vaDestroyImage(mVADisplay,image.image_id);
1125         CHECK_VA_STATUS("vaDestroyImage");
1126 
1127     }
1128     return DECODE_SUCCESS;
1129 }
1130 
getRawDataFromSurface(VideoRenderBuffer * renderBuffer,uint8_t * pRawData,uint32_t * pSize,bool internal)1131 Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderBuffer, uint8_t *pRawData, uint32_t *pSize, bool internal) {
1132     if (internal) {
1133         if (mAcquiredBuffer == NULL) {
1134             return DECODE_FAIL;
1135         }
1136         renderBuffer = &(mAcquiredBuffer->renderBuffer);
1137     }
1138 
1139     VAStatus vaStatus;
1140     VAImageFormat imageFormat;
1141     VAImage vaImage;
1142     vaStatus = vaSyncSurface(renderBuffer->display, renderBuffer->surface);
1143     CHECK_VA_STATUS("vaSyncSurface");
1144 
1145     vaStatus = vaDeriveImage(renderBuffer->display, renderBuffer->surface, &vaImage);
1146     CHECK_VA_STATUS("vaDeriveImage");
1147 
1148     void *pBuf = NULL;
1149     vaStatus = vaMapBuffer(renderBuffer->display, vaImage.buf, &pBuf);
1150     CHECK_VA_STATUS("vaMapBuffer");
1151 
1152 
1153     // size in NV12 format
1154     uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
1155     uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
1156     int32_t size = cropWidth  * cropHeight * 3 / 2;
1157 
1158     if (internal) {
1159         VideoFrameRawData *rawData = NULL;
1160         if (renderBuffer->rawData == NULL) {
1161             rawData = new VideoFrameRawData;
1162             if (rawData == NULL) {
1163                 return DECODE_MEMORY_FAIL;
1164             }
1165             memset(rawData, 0, sizeof(VideoFrameRawData));
1166             renderBuffer->rawData = rawData;
1167         } else {
1168             rawData = renderBuffer->rawData;
1169         }
1170 
1171         if (rawData->data != NULL && rawData->size != size) {
1172             delete [] rawData->data;
1173             rawData->data = NULL;
1174             rawData->size = 0;
1175         }
1176         if (rawData->data == NULL) {
1177             rawData->data = new uint8_t [size];
1178             if (rawData->data == NULL) {
1179                 return DECODE_MEMORY_FAIL;
1180             }
1181         }
1182 
1183         rawData->own = true; // allocated by this library
1184         rawData->width = cropWidth;
1185         rawData->height = cropHeight;
1186         rawData->pitch[0] = cropWidth;
1187         rawData->pitch[1] = cropWidth;
1188         rawData->pitch[2] = 0;  // interleaved U/V, two planes
1189         rawData->offset[0] = 0;
1190         rawData->offset[1] = cropWidth * cropHeight;
1191         rawData->offset[2] = cropWidth * cropHeight * 3 / 2;
1192         rawData->size = size;
1193         rawData->fourcc = 'NV12';
1194 
1195         pRawData = rawData->data;
1196     } else {
1197         *pSize = size;
1198     }
1199 
1200     if (size == (int32_t)vaImage.data_size) {
1201 #ifdef  __SSE4_1__
1202         stream_memcpy(pRawData, pBuf, size);
1203 #else
1204         memcpy(pRawData, pBuf, size);
1205 #endif
1206     } else {
1207         // copy Y data
1208         uint8_t *src = (uint8_t*)pBuf;
1209         uint8_t *dst = pRawData;
1210         uint32_t row = 0;
1211         for (row = 0; row < cropHeight; row++) {
1212 #ifdef  __SSE4_1__
1213             stream_memcpy(dst, src, cropWidth);
1214 #else
1215             memcpy(dst, src, cropWidth);
1216 #endif
1217             dst += cropWidth;
1218             src += vaImage.pitches[0];
1219         }
1220         // copy interleaved V and  U data
1221         src = (uint8_t*)pBuf + vaImage.offsets[1];
1222         for (row = 0; row < cropHeight / 2; row++) {
1223 #ifdef  __SSE4_1__
1224             stream_memcpy(dst, src, cropWidth);
1225 #else
1226             memcpy(dst, src, cropWidth);
1227 #endif
1228             dst += cropWidth;
1229             src += vaImage.pitches[1];
1230         }
1231     }
1232 
1233     vaStatus = vaUnmapBuffer(renderBuffer->display, vaImage.buf);
1234     CHECK_VA_STATUS("vaUnmapBuffer");
1235 
1236     vaStatus = vaDestroyImage(renderBuffer->display, vaImage.image_id);
1237     CHECK_VA_STATUS("vaDestroyImage");
1238 
1239     return DECODE_SUCCESS;
1240 }
1241 
initSurfaceBuffer(bool reset)1242 void VideoDecoderBase::initSurfaceBuffer(bool reset) {
1243     bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
1244     if (useGraphicBuffer && reset) {
1245         pthread_mutex_lock(&mLock);
1246     }
1247     for (int32_t i = 0; i < mNumSurfaces; i++) {
1248         mSurfaceBuffers[i].renderBuffer.display = mVADisplay;
1249         mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE;  // set in acquireSurfaceBuffer
1250         mSurfaceBuffers[i].renderBuffer.flag = 0;
1251         mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE;
1252         mSurfaceBuffers[i].renderBuffer.timeStamp = 0;
1253         mSurfaceBuffers[i].referenceFrame = false;
1254         mSurfaceBuffers[i].asReferernce= false;
1255         mSurfaceBuffers[i].pictureOrder = 0;
1256         mSurfaceBuffers[i].next = NULL;
1257         if (reset == true) {
1258             mSurfaceBuffers[i].renderBuffer.rawData = NULL;
1259             mSurfaceBuffers[i].mappedData = NULL;
1260         }
1261         if (useGraphicBuffer) {
1262             if (reset) {
1263                mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i];
1264                mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false
1265                for (uint32_t j = 0; j < mSignalBufferSize; j++) {
1266                    if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle) {
1267                       mSurfaceBuffers[i].renderBuffer.renderDone = true;
1268                       VTRACE("initSurfaceBuffer set renderDone = true index = %d", i);
1269                       mSignalBufferPre[j] = NULL;
1270                       break;
1271                    }
1272                }
1273             } else {
1274                mSurfaceBuffers[i].renderBuffer.renderDone = false;
1275             }
1276         } else {
1277             mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL;
1278             mSurfaceBuffers[i].renderBuffer.renderDone = true;
1279         }
1280         mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i;
1281     }
1282 
1283     if (useGraphicBuffer && reset) {
1284         mInitialized = true;
1285         mSignalBufferSize = 0;
1286         pthread_mutex_unlock(&mLock);
1287     }
1288 }
1289 
signalRenderDone(void * graphichandler)1290 Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) {
1291     if (graphichandler == NULL) {
1292         return DECODE_SUCCESS;
1293     }
1294     pthread_mutex_lock(&mLock);
1295     int i = 0;
1296     if (!mInitialized) {
1297         if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) {
1298             pthread_mutex_unlock(&mLock);
1299             return DECODE_INVALID_DATA;
1300         }
1301         mSignalBufferPre[mSignalBufferSize++] = graphichandler;
1302         VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize);
1303     } else {
1304         if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
1305             pthread_mutex_unlock(&mLock);
1306             return DECODE_SUCCESS;
1307         }
1308         for (i = 0; i < mNumSurfaces; i++) {
1309             if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) {
1310                 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1311                 VTRACE("SignalRenderDoneFlag mInitialized = true index = %d", i);
1312                break;
1313            }
1314         }
1315     }
1316     pthread_mutex_unlock(&mLock);
1317 
1318     return DECODE_SUCCESS;
1319 
1320 }
1321 
querySurfaceRenderStatus(VideoSurfaceBuffer * surface)1322 void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) {
1323     VASurfaceStatus surfStat = VASurfaceReady;
1324     VAStatus    vaStat = VA_STATUS_SUCCESS;
1325 
1326     if (!surface) {
1327         LOGW("SurfaceBuffer not ready yet");
1328         return;
1329     }
1330     surface->renderBuffer.driverRenderDone = true;
1331 
1332 #ifndef USE_GEN_HW
1333     if (surface->renderBuffer.surface != VA_INVALID_SURFACE &&
1334        (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
1335 
1336         vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat);
1337 
1338         if ((vaStat == VA_STATUS_SUCCESS) && (surfStat != VASurfaceReady))
1339             surface->renderBuffer.driverRenderDone = false;
1340 
1341     }
1342 #endif
1343 
1344 }
1345 
1346 // This function should be called before start() to load different type of parsers
1347 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
setParserType(_vbp_parser_type type)1348 Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) {
1349     if ((int32_t)type != VBP_INVALID) {
1350         ITRACE("Parser Type = %d", (int32_t)type);
1351         mParserType = type;
1352         return DECODE_SUCCESS;
1353     } else {
1354         ETRACE("Invalid parser type = %d", (int32_t)type);
1355         return DECODE_NO_PARSER;
1356     }
1357 }
1358 
updateBuffer(uint8_t * buffer,int32_t size,void ** vbpData)1359 Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void** vbpData) {
1360     if (mParserHandle == NULL) {
1361         return DECODE_NO_PARSER;
1362     }
1363 
1364     uint32_t vbpStatus;
1365     if (buffer == NULL || size <= 0) {
1366         return DECODE_INVALID_DATA;
1367     }
1368 
1369     vbpStatus = mParserUpdate(mParserHandle, buffer, size, vbpData);
1370     CHECK_VBP_STATUS("vbp_update");
1371 
1372     return DECODE_SUCCESS;
1373 }
1374 
queryBuffer(void ** vbpData)1375 Decode_Status VideoDecoderBase::queryBuffer(void** vbpData) {
1376     if (mParserHandle == NULL) {
1377         return DECODE_NO_PARSER;
1378     }
1379 
1380     uint32_t vbpStatus;
1381     vbpStatus = mParserQuery(mParserHandle, vbpData);
1382     CHECK_VBP_STATUS("vbp_query");
1383 
1384     return DECODE_SUCCESS;
1385 }
1386 
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)1387 Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) {
1388     VAStatus vaStatus;
1389     VAConfigAttrib attrib;
1390     attrib.type = VAConfigAttribRTFormat;
1391     attrib.value = VA_RT_FORMAT_YUV420;
1392 
1393     if (config == NULL) {
1394         ETRACE("Invalid parameter!");
1395         return DECODE_FAIL;
1396     }
1397 
1398     vaStatus = vaCreateConfig(
1399             mVADisplay,
1400             profile,
1401             VAEntrypointVLD,
1402             &attrib,
1403             1,
1404             config);
1405 
1406     CHECK_VA_STATUS("vaCreateConfig");
1407 
1408     return DECODE_SUCCESS;
1409 }
1410 #endif
checkHardwareCapability()1411 Decode_Status VideoDecoderBase::checkHardwareCapability() {
1412     return DECODE_SUCCESS;
1413 }
1414 
drainDecodingErrors(VideoErrorBuffer * outErrBuf,VideoRenderBuffer * currentSurface)1415 void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface) {
1416     if (mErrReportEnabled && outErrBuf && currentSurface) {
1417         memcpy(outErrBuf, &(currentSurface->errBuf), sizeof(VideoErrorBuffer));
1418 
1419         currentSurface->errBuf.errorNumber = 0;
1420         currentSurface->errBuf.timeStamp = INVALID_PTS;
1421     }
1422     if (outErrBuf)
1423         VTRACE("%s: error number is %d", __FUNCTION__, outErrBuf->errorNumber);
1424 }
1425 
fillDecodingErrors(VideoRenderBuffer * currentSurface)1426 void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) {
1427     VAStatus ret;
1428 
1429     if (mErrReportEnabled) {
1430         currentSurface->errBuf.timeStamp = currentSurface->timeStamp;
1431         // TODO: is 10 a suitable number?
1432         VASurfaceDecodeMBErrors *err_drv_output = NULL;
1433         ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output);
1434         if (ret || !err_drv_output) {
1435             WTRACE("vaQuerySurfaceError failed.");
1436             return;
1437         }
1438 
1439         int offset =  0x1 & currentSurface->errBuf.errorNumber;// offset is either 0 or 1
1440         for (int i = 0; i < MAX_ERR_NUM - offset; i++) {
1441             if (err_drv_output[i].status != -1) {
1442                 currentSurface->errBuf.errorNumber++;
1443                 currentSurface->errBuf.errorArray[i + offset].type = DecodeMBError;
1444                 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb;
1445                 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb;
1446                 currentSurface->errBuf.errorArray[i + offset].num_mbs = err_drv_output[i].end_mb - err_drv_output[i].start_mb + 1;
1447                 ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d",
1448                     currentSurface->errBuf.errorNumber - 1,
1449                     currentSurface->errBuf.errorArray[i + offset].type,
1450                     currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb,
1451                     currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb);
1452             } else break;
1453         }
1454         ITRACE("%s: error number of current surface is %d, timestamp @%llu",
1455             __FUNCTION__, currentSurface->errBuf.errorNumber, currentSurface->timeStamp);
1456     }
1457 }
1458 
setRotationDegrees(int32_t rotationDegrees)1459 void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) {
1460     if (mRotationDegrees == rotationDegrees) {
1461         return;
1462     }
1463 
1464     ITRACE("set new rotation degree: %d", rotationDegrees);
1465     VADisplayAttribute rotate;
1466     rotate.type = VADisplayAttribRotation;
1467     rotate.value = VA_ROTATION_NONE;
1468     if (rotationDegrees == 0)
1469         rotate.value = VA_ROTATION_NONE;
1470     else if (rotationDegrees == 90)
1471         rotate.value = VA_ROTATION_90;
1472     else if (rotationDegrees == 180)
1473         rotate.value = VA_ROTATION_180;
1474     else if (rotationDegrees == 270)
1475         rotate.value = VA_ROTATION_270;
1476 
1477     VAStatus ret = vaSetDisplayAttributes(mVADisplay, &rotate, 1);
1478     if (ret) {
1479         ETRACE("Failed to set rotation degree.");
1480     }
1481     mRotationDegrees = rotationDegrees;
1482 }
1483 
setRenderRect()1484 void VideoDecoderBase::setRenderRect() {
1485 
1486     if (!mVADisplay)
1487         return;
1488 
1489     VAStatus ret;
1490     VARectangle rect;
1491     rect.x = mVideoFormatInfo.cropLeft;
1492     rect.y = mVideoFormatInfo.cropTop;
1493     rect.width = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
1494     rect.height = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
1495 
1496     VADisplayAttribute render_rect;
1497     render_rect.type = VADisplayAttribRenderRect;
1498     render_rect.value = (long)&rect;
1499 
1500     ret = vaSetDisplayAttributes(mVADisplay, &render_rect, 1);
1501     if (ret) {
1502         ETRACE("Failed to set rotation degree.");
1503     }
1504 }
1505