1 /*
2 * Copyright (c) 2009-2011 Intel Corporation. All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "VideoDecoderBase.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <va/va_android.h>
21 #include <va/va_tpi.h>
22 #ifdef __SSE4_1__
23 #include "use_util_sse4.h"
24 #endif
25
26 #define INVALID_PTS ((uint64_t)-1)
27 #define MAXIMUM_POC 0x7FFFFFFF
28 #define MINIMUM_POC 0x80000000
29 #define ANDROID_DISPLAY_HANDLE 0x18C34078
30
VideoDecoderBase(const char * mimeType,_vbp_parser_type type)31 VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
32 : mInitialized(false),
33 mLowDelay(false),
34 mDisplay(NULL),
35 mVADisplay(NULL),
36 mVAContext(VA_INVALID_ID),
37 mVAConfig(VA_INVALID_ID),
38 mVAStarted(false),
39 mCurrentPTS(INVALID_PTS),
40 mAcquiredBuffer(NULL),
41 mLastReference(NULL),
42 mForwardReference(NULL),
43 mDecodingFrame(false),
44 mSizeChanged(false),
45 mShowFrame(true),
46 mOutputWindowSize(OUTPUT_WINDOW_SIZE),
47 mRotationDegrees(0),
48 mErrReportEnabled(false),
49 mWiDiOn(false),
50 mRawOutput(false),
51 mManageReference(true),
52 mOutputMethod(OUTPUT_BY_PCT),
53 mNumSurfaces(0),
54 mSurfaceBuffers(NULL),
55 mOutputHead(NULL),
56 mOutputTail(NULL),
57 mSurfaces(NULL),
58 mVASurfaceAttrib(NULL),
59 mSurfaceUserPtr(NULL),
60 mSurfaceAcquirePos(0),
61 mNextOutputPOC(MINIMUM_POC),
62 mParserType(type),
63 mParserHandle(NULL),
64 mSignalBufferSize(0) {
65
66 memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo));
67 memset(&mConfigBuffer, 0, sizeof(mConfigBuffer));
68 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
69 mSignalBufferPre[i] = NULL;
70 }
71 pthread_mutex_init(&mLock, NULL);
72 mVideoFormatInfo.mimeType = strdup(mimeType);
73 mUseGEN = false;
74 mLibHandle = NULL;
75 mParserOpen = NULL;
76 mParserClose = NULL;
77 mParserParse = NULL;
78 mParserQuery = NULL;
79 mParserFlush = NULL;
80 mParserUpdate = NULL;
81 }
82
~VideoDecoderBase()83 VideoDecoderBase::~VideoDecoderBase() {
84 pthread_mutex_destroy(&mLock);
85 stop();
86 free(mVideoFormatInfo.mimeType);
87 }
88
start(VideoConfigBuffer * buffer)89 Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) {
90 if (buffer == NULL) {
91 return DECODE_INVALID_DATA;
92 }
93
94 if (mParserHandle != NULL) {
95 WTRACE("Decoder has already started.");
96 return DECODE_SUCCESS;
97 }
98 mLibHandle = dlopen("libmixvbp.so", RTLD_NOW);
99 if (mLibHandle == NULL) {
100 return DECODE_NO_PARSER;
101 }
102 mParserOpen = (OpenFunc)dlsym(mLibHandle, "vbp_open");
103 mParserClose = (CloseFunc)dlsym(mLibHandle, "vbp_close");
104 mParserParse = (ParseFunc)dlsym(mLibHandle, "vbp_parse");
105 mParserQuery = (QueryFunc)dlsym(mLibHandle, "vbp_query");
106 mParserFlush = (FlushFunc)dlsym(mLibHandle, "vbp_flush");
107 if (mParserOpen == NULL || mParserClose == NULL || mParserParse == NULL
108 || mParserQuery == NULL || mParserFlush == NULL) {
109 return DECODE_NO_PARSER;
110 }
111 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
112 mParserUpdate = (UpdateFunc)dlsym(mLibHandle, "vbp_update");
113 if (mParserUpdate == NULL) {
114 return DECODE_NO_PARSER;
115 }
116 #endif
117 if ((int32_t)mParserType != VBP_INVALID) {
118 ITRACE("mParserType = %d", mParserType);
119 if (mParserOpen(mParserType, &mParserHandle) != VBP_OK) {
120 ETRACE("Failed to open VBP parser.");
121 return DECODE_NO_PARSER;
122 }
123 }
124 // keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter.
125 mConfigBuffer = *buffer;
126 mConfigBuffer.data = NULL;
127 mConfigBuffer.size = 0;
128
129 mVideoFormatInfo.width = buffer->width;
130 mVideoFormatInfo.height = buffer->height;
131 if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
132 mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
133 mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
134 }
135 mLowDelay = buffer->flag & WANT_LOW_DELAY;
136 mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
137 if (mRawOutput) {
138 WTRACE("Output is raw data.");
139 }
140
141 return DECODE_SUCCESS;
142 }
143
144
reset(VideoConfigBuffer * buffer)145 Decode_Status VideoDecoderBase::reset(VideoConfigBuffer *buffer) {
146 if (buffer == NULL) {
147 return DECODE_INVALID_DATA;
148 }
149
150 // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
151 terminateVA();
152
153 // reset the mconfigBuffer to pass it for startVA.
154 mConfigBuffer = *buffer;
155 mConfigBuffer.data = NULL;
156 mConfigBuffer.size = 0;
157
158 mVideoFormatInfo.width = buffer->width;
159 mVideoFormatInfo.height = buffer->height;
160 if (buffer->flag & USE_NATIVE_GRAPHIC_BUFFER) {
161 mVideoFormatInfo.surfaceWidth = buffer->graphicBufferWidth;
162 mVideoFormatInfo.surfaceHeight = buffer->graphicBufferHeight;
163 }
164 mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
165 mLowDelay = buffer->flag & WANT_LOW_DELAY;
166 mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
167 if (mRawOutput) {
168 WTRACE("Output is raw data.");
169 }
170 return DECODE_SUCCESS;
171 }
172
173
174
stop(void)175 void VideoDecoderBase::stop(void) {
176 terminateVA();
177
178 mCurrentPTS = INVALID_PTS;
179 mAcquiredBuffer = NULL;
180 mLastReference = NULL;
181 mForwardReference = NULL;
182 mDecodingFrame = false;
183 mSizeChanged = false;
184
185 // private variables
186 mLowDelay = false;
187 mRawOutput = false;
188 mNumSurfaces = 0;
189 mSurfaceAcquirePos = 0;
190 mNextOutputPOC = MINIMUM_POC;
191 mVideoFormatInfo.valid = false;
192 if (mParserHandle){
193 mParserClose(mParserHandle);
194 mParserHandle = NULL;
195 }
196 if (mLibHandle) {
197 dlclose(mLibHandle);
198 mLibHandle = NULL;
199 }
200 }
201
flush(void)202 void VideoDecoderBase::flush(void) {
203 if (mVAStarted == false) {
204 // nothing to flush at this stage
205 return;
206 }
207
208 endDecodingFrame(true);
209
210 VideoSurfaceBuffer *p = mOutputHead;
211 // check if there's buffer with DRC flag in the output queue
212 while (p) {
213 if (p->renderBuffer.flag & IS_RESOLUTION_CHANGE) {
214 mSizeChanged = true;
215 break;
216 }
217 p = p->next;
218 }
219 // avoid setting mSurfaceAcquirePos to 0 as it may cause tearing
220 // (surface is still being rendered)
221 mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces;
222 mNextOutputPOC = MINIMUM_POC;
223 mCurrentPTS = INVALID_PTS;
224 mAcquiredBuffer = NULL;
225 mLastReference = NULL;
226 mForwardReference = NULL;
227 mOutputHead = NULL;
228 mOutputTail = NULL;
229 mDecodingFrame = false;
230
231 // flush vbp parser
232 if (mParserHandle && (mParserFlush(mParserHandle) != VBP_OK)) {
233 WTRACE("Failed to flush parser. Continue");
234 }
235
236 // initialize surface buffer without resetting mapped/raw data
237 initSurfaceBuffer(false);
238
239 }
240
freeSurfaceBuffers(void)241 void VideoDecoderBase::freeSurfaceBuffers(void) {
242 if (mVAStarted == false) {
243 // nothing to free surface buffers at this stage
244 return;
245 }
246
247 pthread_mutex_lock(&mLock);
248
249 endDecodingFrame(true);
250
251 // if VA is already started, terminate VA as graphic buffers are reallocated by omxcodec
252 terminateVA();
253
254 pthread_mutex_unlock(&mLock);
255 }
256
getFormatInfo(void)257 const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) {
258 return &mVideoFormatInfo;
259 }
260
getOutput(bool draining,VideoErrorBuffer * outErrBuf)261 const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining, VideoErrorBuffer *outErrBuf) {
262 VAStatus vaStatus;
263 if (mVAStarted == false) {
264 return NULL;
265 }
266 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
267
268 if (draining) {
269 // complete decoding the last frame and ignore return
270 endDecodingFrame(false);
271 }
272
273 if (mOutputHead == NULL) {
274 return NULL;
275 }
276
277 // output by position (the first buffer)
278 VideoSurfaceBuffer *outputByPos = mOutputHead;
279
280 if (mLowDelay) {
281 mOutputHead = mOutputHead->next;
282 if (mOutputHead == NULL) {
283 mOutputTail = NULL;
284 }
285 vaStatus = vaSetTimestampForSurface(mVADisplay, outputByPos->renderBuffer.surface, outputByPos->renderBuffer.timeStamp);
286 if (useGraphicBuffer && !mUseGEN) {
287 vaSyncSurface(mVADisplay, outputByPos->renderBuffer.surface);
288 fillDecodingErrors(&(outputByPos->renderBuffer));
289 }
290 if (draining && mOutputTail == NULL) {
291 outputByPos->renderBuffer.flag |= IS_EOS;
292 }
293 drainDecodingErrors(outErrBuf, &(outputByPos->renderBuffer));
294
295 return &(outputByPos->renderBuffer);
296 }
297
298 // output by presentation time stamp (the smallest pts)
299 VideoSurfaceBuffer *outputByPts = findOutputByPts();
300
301 VideoSurfaceBuffer *output = NULL;
302 if (mOutputMethod == OUTPUT_BY_POC) {
303 output = findOutputByPoc(draining);
304 } else if (mOutputMethod == OUTPUT_BY_PCT) {
305 output = findOutputByPct(draining);
306 } else {
307 ETRACE("Invalid output method.");
308 return NULL;
309 }
310
311 if (output == NULL) {
312 return NULL;
313 }
314
315 if (output != outputByPts) {
316 // swap time stamp
317 uint64_t ts = output->renderBuffer.timeStamp;
318 output->renderBuffer.timeStamp = outputByPts->renderBuffer.timeStamp;
319 outputByPts->renderBuffer.timeStamp = ts;
320 }
321
322 if (output != outputByPos) {
323 // remove this output from middle or end of the list
324 VideoSurfaceBuffer *p = outputByPos;
325 while (p->next != output) {
326 p = p->next;
327 }
328 p->next = output->next;
329 if (mOutputTail == output) {
330 mOutputTail = p;
331 }
332 } else {
333 // remove this output from head of the list
334 mOutputHead = mOutputHead->next;
335 if (mOutputHead == NULL) {
336 mOutputTail = NULL;
337 }
338 }
339 //VTRACE("Output POC %d for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6);
340 vaStatus = vaSetTimestampForSurface(mVADisplay, output->renderBuffer.surface, output->renderBuffer.timeStamp);
341
342 if (useGraphicBuffer && !mUseGEN) {
343 vaSyncSurface(mVADisplay, output->renderBuffer.surface);
344 fillDecodingErrors(&(output->renderBuffer));
345 }
346
347 if (draining && mOutputTail == NULL) {
348 output->renderBuffer.flag |= IS_EOS;
349 }
350
351 drainDecodingErrors(outErrBuf, &(output->renderBuffer));
352
353 return &(output->renderBuffer);
354 }
355
findOutputByPts()356 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPts() {
357 // output by presentation time stamp - buffer with the smallest time stamp is output
358 VideoSurfaceBuffer *p = mOutputHead;
359 VideoSurfaceBuffer *outputByPts = NULL;
360 uint64_t pts = INVALID_PTS;
361 do {
362 if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) {
363 // find buffer with the smallest PTS
364 pts = p->renderBuffer.timeStamp;
365 outputByPts = p;
366 }
367 p = p->next;
368 } while (p != NULL);
369
370 return outputByPts;
371 }
372
findOutputByPct(bool draining)373 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPct(bool draining) {
374 // output by picture coding type (PCT)
375 // if there is more than one reference frame, the first reference frame is ouput, otherwise,
376 // output non-reference frame if there is any.
377
378 VideoSurfaceBuffer *p = mOutputHead;
379 VideoSurfaceBuffer *outputByPct = NULL;
380 int32_t reference = 0;
381 do {
382 if (p->referenceFrame) {
383 reference++;
384 if (reference > 1) {
385 // mOutputHead must be a reference frame
386 outputByPct = mOutputHead;
387 break;
388 }
389 } else {
390 // first non-reference frame
391 outputByPct = p;
392 break;
393 }
394 p = p->next;
395 } while (p != NULL);
396
397 if (outputByPct == NULL && draining) {
398 outputByPct = mOutputHead;
399 }
400 return outputByPct;
401 }
402
403 #if 0
404 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
405 // output by picture order count (POC)
406 // Output criteria:
407 // if there is IDR frame (POC == 0), all the frames before IDR must be output;
408 // Otherwise, if draining flag is set or list is full, frame with the least POC is output;
409 // Otherwise, NOTHING is output
410
411 int32_t dpbFullness = 0;
412 for (int32_t i = 0; i < mNumSurfaces; i++) {
413 // count num of reference frames
414 if (mSurfaceBuffers[i].asReferernce) {
415 dpbFullness++;
416 }
417 }
418
419 if (mAcquiredBuffer && mAcquiredBuffer->asReferernce) {
420 // frame is being decoded and is not ready for output yet
421 dpbFullness--;
422 }
423
424 VideoSurfaceBuffer *p = mOutputHead;
425 while (p != NULL) {
426 // count dpbFullness with non-reference frame in the output queue
427 if (p->asReferernce == false) {
428 dpbFullness++;
429 }
430 p = p->next;
431 }
432
433 Retry:
434 p = mOutputHead;
435 VideoSurfaceBuffer *outputByPoc = NULL;
436 int32_t count = 0;
437 int32_t poc = MAXIMUM_POC;
438
439 do {
440 if (p->pictureOrder == 0) {
441 // output picture with the least POC before IDR
442 if (outputByPoc != NULL) {
443 mNextOutputPOC = outputByPoc->pictureOrder + 1;
444 return outputByPoc;
445 } else {
446 mNextOutputPOC = MINIMUM_POC;
447 }
448 }
449
450 // POC of the output candidate must not be less than mNextOutputPOC
451 if (p->pictureOrder < mNextOutputPOC) {
452 break;
453 }
454
455 if (p->pictureOrder < poc) {
456 // update the least POC.
457 poc = p->pictureOrder;
458 outputByPoc = p;
459 }
460 count++;
461 p = p->next;
462 } while (p != NULL && count < mOutputWindowSize);
463
464 if (draining == false && dpbFullness < mOutputWindowSize) {
465 // list is not full and we are not in draining state
466 // if DPB is already full, one frame must be output
467 return NULL;
468 }
469
470 if (outputByPoc == NULL) {
471 mNextOutputPOC = MINIMUM_POC;
472 goto Retry;
473 }
474
475 // for debugging purpose
476 if (outputByPoc->pictureOrder != 0 && outputByPoc->pictureOrder < mNextOutputPOC) {
477 ETRACE("Output POC is not incremental, expected %d, actual %d", mNextOutputPOC, outputByPoc->pictureOrder);
478 //gaps_in_frame_num_value_allowed_flag is not currently supported
479 }
480
481 mNextOutputPOC = outputByPoc->pictureOrder + 1;
482
483 return outputByPoc;
484 }
485 #else
findOutputByPoc(bool draining)486 VideoSurfaceBuffer* VideoDecoderBase::findOutputByPoc(bool draining) {
487 VideoSurfaceBuffer *output = NULL;
488 VideoSurfaceBuffer *p = mOutputHead;
489 int32_t count = 0;
490 int32_t poc = MAXIMUM_POC;
491 VideoSurfaceBuffer *outputleastpoc = mOutputHead;
492 do {
493 count++;
494 if (p->pictureOrder == 0) {
495 // any picture before this POC (new IDR) must be output
496 if (output == NULL) {
497 mNextOutputPOC = MINIMUM_POC;
498 // looking for any POC with negative value
499 } else {
500 mNextOutputPOC = output->pictureOrder + 1;
501 break;
502 }
503 }
504 if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) {
505 // this POC meets ouput criteria.
506 poc = p->pictureOrder;
507 output = p;
508 outputleastpoc = p;
509 }
510 if (poc == mNextOutputPOC || count == mOutputWindowSize) {
511 if (output != NULL) {
512 // this indicates two cases:
513 // 1) the next output POC is found.
514 // 2) output queue is full and there is at least one buffer meeting the output criteria.
515 mNextOutputPOC = output->pictureOrder + 1;
516 break;
517 } else {
518 // this indicates output queue is full and no buffer in the queue meets the output criteria
519 // restart processing as queue is FULL and output criteria is changed. (next output POC is 0)
520 mNextOutputPOC = MINIMUM_POC;
521 count = 0;
522 poc = MAXIMUM_POC;
523 p = mOutputHead;
524 continue;
525 }
526 }
527 if (p->next == NULL) {
528 output = NULL;
529 }
530
531 p = p->next;
532 } while (p != NULL);
533
534 if (draining == true && output == NULL) {
535 output = outputleastpoc;
536 }
537
538 return output;
539 }
540 #endif
541
checkBufferAvail(void)542 bool VideoDecoderBase::checkBufferAvail(void) {
543 if (!mInitialized) {
544 if ((mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) == 0) {
545 return true;
546 }
547 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
548 if (mSignalBufferPre[i] != NULL) {
549 return true;
550 }
551 }
552 return false;
553 }
554 // check whether there is buffer available for decoding
555 // TODO: check frame being referenced for frame skipping
556 VideoSurfaceBuffer *buffer = NULL;
557 for (int32_t i = 0; i < mNumSurfaces; i++) {
558 buffer = mSurfaceBuffers + i;
559
560 if (buffer->asReferernce == false &&
561 buffer->renderBuffer.renderDone == true) {
562 querySurfaceRenderStatus(buffer);
563 if (buffer->renderBuffer.driverRenderDone == true)
564 return true;
565 }
566 }
567 return false;
568 }
569
acquireSurfaceBuffer(void)570 Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) {
571 if (mVAStarted == false) {
572 return DECODE_FAIL;
573 }
574
575 if (mAcquiredBuffer != NULL) {
576 ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
577 return DECODE_FAIL;
578 }
579
580 int nextAcquire = mSurfaceAcquirePos;
581 VideoSurfaceBuffer *acquiredBuffer = NULL;
582 bool acquired = false;
583
584 while (acquired == false) {
585 acquiredBuffer = mSurfaceBuffers + nextAcquire;
586
587 querySurfaceRenderStatus(acquiredBuffer);
588
589 if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true && acquiredBuffer->renderBuffer.driverRenderDone == true) {
590 // this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping
591 VideoSurfaceBuffer *temp;
592 acquired = true;
593 for (int i = 0; i < mNumSurfaces; i++) {
594 if (i == nextAcquire) {
595 continue;
596 }
597 temp = mSurfaceBuffers + i;
598 // use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use.
599 if (temp->renderBuffer.surface == mSurfaces[nextAcquire] &&
600 temp->renderBuffer.renderDone == false) {
601 ITRACE("Surface is referenced by other surface buffer.");
602 acquired = false;
603 break;
604 }
605 }
606 }
607 if (acquired) {
608 break;
609 }
610 nextAcquire++;
611 if (nextAcquire == mNumSurfaces) {
612 nextAcquire = 0;
613 }
614 if (nextAcquire == mSurfaceAcquirePos) {
615 return DECODE_NO_SURFACE;
616 }
617 }
618
619 if (acquired == false) {
620 return DECODE_NO_SURFACE;
621 }
622
623 mAcquiredBuffer = acquiredBuffer;
624 mSurfaceAcquirePos = nextAcquire;
625
626 // set surface again as surface maybe reset by skipped frame.
627 // skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output.
628 mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos];
629 if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) {
630 mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos];
631 }
632 mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS;
633 mAcquiredBuffer->renderBuffer.display = mVADisplay;
634 mAcquiredBuffer->renderBuffer.flag = 0;
635 mAcquiredBuffer->renderBuffer.renderDone = false;
636 mAcquiredBuffer->asReferernce = false;
637 mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 0;
638 mAcquiredBuffer->renderBuffer.errBuf.timeStamp = INVALID_PTS;
639
640 return DECODE_SUCCESS;
641 }
642
outputSurfaceBuffer(void)643 Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) {
644 Decode_Status status;
645 if (mAcquiredBuffer == NULL) {
646 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
647 return DECODE_FAIL;
648 }
649
650 if (mRawOutput) {
651 status = getRawDataFromSurface();
652 CHECK_STATUS();
653 }
654
655 // frame is successfly decoded to the current surface, it is ready for output
656 if (mShowFrame) {
657 mAcquiredBuffer->renderBuffer.renderDone = false;
658 } else {
659 mAcquiredBuffer->renderBuffer.renderDone = true;
660 }
661
662 // decoder must set "asReference and referenceFrame" flags properly
663
664 // update reference frames
665 if (mAcquiredBuffer->referenceFrame) {
666 if (mManageReference) {
667 // managing reference for MPEG4/H.263/WMV.
668 // AVC should manage reference frame in a different way
669 if (mForwardReference != NULL) {
670 // this foward reference is no longer needed
671 mForwardReference->asReferernce = false;
672 }
673 // Forware reference for either P or B frame prediction
674 mForwardReference = mLastReference;
675 mAcquiredBuffer->asReferernce = true;
676 }
677
678 // the last reference frame.
679 mLastReference = mAcquiredBuffer;
680 }
681 // add to the output list
682 if (mShowFrame) {
683 if (mOutputHead == NULL) {
684 mOutputHead = mAcquiredBuffer;
685 } else {
686 mOutputTail->next = mAcquiredBuffer;
687 }
688 mOutputTail = mAcquiredBuffer;
689 mOutputTail->next = NULL;
690 }
691
692 //VTRACE("Pushing POC %d to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6);
693
694 mAcquiredBuffer = NULL;
695 mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces;
696 return DECODE_SUCCESS;
697 }
698
releaseSurfaceBuffer(void)699 Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) {
700 if (mAcquiredBuffer == NULL) {
701 // this is harmless error
702 return DECODE_SUCCESS;
703 }
704
705 // frame is not decoded to the acquired buffer, current surface is invalid, and can't be output.
706 mAcquiredBuffer->asReferernce = false;
707 mAcquiredBuffer->renderBuffer.renderDone = true;
708 mAcquiredBuffer = NULL;
709 return DECODE_SUCCESS;
710 }
711
flushSurfaceBuffers(void)712 void VideoDecoderBase::flushSurfaceBuffers(void) {
713 endDecodingFrame(true);
714 VideoSurfaceBuffer *p = NULL;
715 while (mOutputHead) {
716 mOutputHead->renderBuffer.renderDone = true;
717 p = mOutputHead;
718 mOutputHead = mOutputHead->next;
719 p->next = NULL;
720 }
721 mOutputHead = NULL;
722 mOutputTail = NULL;
723 }
724
endDecodingFrame(bool dropFrame)725 Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) {
726 Decode_Status status = DECODE_SUCCESS;
727 VAStatus vaStatus;
728
729 if (mDecodingFrame == false) {
730 if (mAcquiredBuffer != NULL) {
731 //ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
732 releaseSurfaceBuffer();
733 status = DECODE_FAIL;
734 }
735 return status;
736 }
737 // return through exit label to reset mDecodingFrame
738 if (mAcquiredBuffer == NULL) {
739 ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
740 status = DECODE_FAIL;
741 goto exit;
742 }
743
744 vaStatus = vaEndPicture(mVADisplay, mVAContext);
745 if (vaStatus != VA_STATUS_SUCCESS) {
746 releaseSurfaceBuffer();
747 ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus);
748 status = DECODE_DRIVER_FAIL;
749 goto exit;
750 }
751
752 if (dropFrame) {
753 // we are asked to drop this decoded picture
754 VTRACE("Frame dropped in endDecodingFrame");
755 vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface);
756 releaseSurfaceBuffer();
757 goto exit;
758 }
759 status = outputSurfaceBuffer();
760 // fall through
761 exit:
762 mDecodingFrame = false;
763 return status;
764 }
765
766
setupVA(uint32_t numSurface,VAProfile profile,uint32_t numExtraSurface)767 Decode_Status VideoDecoderBase::setupVA(uint32_t numSurface, VAProfile profile, uint32_t numExtraSurface) {
768 VAStatus vaStatus = VA_STATUS_SUCCESS;
769 Decode_Status status;
770 VAConfigAttrib attrib;
771
772 if (mVAStarted) {
773 return DECODE_SUCCESS;
774 }
775
776 mRotationDegrees = 0;
777 if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER){
778 #ifdef TARGET_HAS_VPP
779 if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber - mConfigBuffer.vppBufferNum)
780 #else
781 if (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber)
782 #endif
783 return DECODE_FORMAT_CHANGE;
784
785 numSurface = mConfigBuffer.surfaceNumber;
786 // if format has been changed in USE_NATIVE_GRAPHIC_BUFFER mode,
787 // we can not setupVA here when the graphic buffer resolution is smaller than the resolution decoder really needs
788 if (mSizeChanged) {
789 if (mVideoFormatInfo.surfaceWidth < mVideoFormatInfo.width || mVideoFormatInfo.surfaceHeight < mVideoFormatInfo.height) {
790 mSizeChanged = false;
791 return DECODE_FORMAT_CHANGE;
792 }
793 }
794 }
795
796 // TODO: validate profile
797 if (numSurface == 0) {
798 return DECODE_FAIL;
799 }
800
801 if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) {
802 if (numSurface < mConfigBuffer.surfaceNumber) {
803 WTRACE("surface to allocated %d is less than minimum number required %d",
804 numSurface, mConfigBuffer.surfaceNumber);
805 numSurface = mConfigBuffer.surfaceNumber;
806 }
807 }
808
809 if (mVADisplay != NULL) {
810 ETRACE("VA is partially started.");
811 return DECODE_FAIL;
812 }
813
814 // Display is defined as "unsigned int"
815 #ifndef USE_HYBRID_DRIVER
816 mDisplay = new Display;
817 *mDisplay = ANDROID_DISPLAY_HANDLE;
818 #else
819 if (profile >= VAProfileH264Baseline && profile <= VAProfileVC1Advanced) {
820 ITRACE("Using GEN driver");
821 mDisplay = "libva_driver_name=i965";
822 mUseGEN = true;
823 } else {
824 ITRACE("Using PVR driver");
825 mDisplay = "libva_driver_name=pvr";
826 mUseGEN = false;
827 }
828
829 #endif
830 mVADisplay = vaGetDisplay(mDisplay);
831 if (mVADisplay == NULL) {
832 ETRACE("vaGetDisplay failed.");
833 return DECODE_DRIVER_FAIL;
834 }
835
836 int majorVersion, minorVersion;
837 vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
838 CHECK_VA_STATUS("vaInitialize");
839
840 if ((int32_t)profile != VAProfileSoftwareDecoding) {
841
842 status = checkHardwareCapability();
843 CHECK_STATUS("checkHardwareCapability");
844
845 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
846 status = getCodecSpecificConfigs(profile, &mVAConfig);
847 CHECK_STATUS("getCodecSpecificAttributes");
848 #else
849 //We are requesting RT attributes
850 attrib.type = VAConfigAttribRTFormat;
851 attrib.value = VA_RT_FORMAT_YUV420;
852
853 vaStatus = vaCreateConfig(
854 mVADisplay,
855 profile,
856 VAEntrypointVLD,
857 &attrib,
858 1,
859 &mVAConfig);
860 CHECK_VA_STATUS("vaCreateConfig");
861 #endif
862 }
863
864 mNumSurfaces = numSurface;
865 mNumExtraSurfaces = numExtraSurface;
866 mSurfaces = new VASurfaceID [mNumSurfaces + mNumExtraSurfaces];
867 mExtraSurfaces = mSurfaces + mNumSurfaces;
868 if (mSurfaces == NULL) {
869 return DECODE_MEMORY_FAIL;
870 }
871
872 setRenderRect();
873
874 int32_t format = VA_RT_FORMAT_YUV420;
875 if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
876 #ifndef USE_AVC_SHORT_FORMAT
877 format |= VA_RT_FORMAT_PROTECTED;
878 WTRACE("Surface is protected.");
879 #endif
880 }
881 if (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER) {
882 VASurfaceAttrib attribs[2];
883 mVASurfaceAttrib = new VASurfaceAttribExternalBuffers;
884 if (mVASurfaceAttrib == NULL) {
885 return DECODE_MEMORY_FAIL;
886 }
887
888 mVASurfaceAttrib->buffers= (unsigned long *)malloc(sizeof(unsigned long)*mNumSurfaces);
889 if (mVASurfaceAttrib->buffers == NULL) {
890 return DECODE_MEMORY_FAIL;
891 }
892 mVASurfaceAttrib->num_buffers = mNumSurfaces;
893 mVASurfaceAttrib->pixel_format = VA_FOURCC_NV12;
894 mVASurfaceAttrib->width = mVideoFormatInfo.surfaceWidth;
895 mVASurfaceAttrib->height = mVideoFormatInfo.surfaceHeight;
896 mVASurfaceAttrib->data_size = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight * 1.5;
897 mVASurfaceAttrib->num_planes = 2;
898 mVASurfaceAttrib->pitches[0] = mConfigBuffer.graphicBufferStride;
899 mVASurfaceAttrib->pitches[1] = mConfigBuffer.graphicBufferStride;
900 mVASurfaceAttrib->pitches[2] = 0;
901 mVASurfaceAttrib->pitches[3] = 0;
902 mVASurfaceAttrib->offsets[0] = 0;
903 mVASurfaceAttrib->offsets[1] = mConfigBuffer.graphicBufferStride * mVideoFormatInfo.surfaceHeight;
904 mVASurfaceAttrib->offsets[2] = 0;
905 mVASurfaceAttrib->offsets[3] = 0;
906 mVASurfaceAttrib->private_data = (void *)mConfigBuffer.nativeWindow;
907 mVASurfaceAttrib->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
908 if (mConfigBuffer.flag & USE_TILING_MEMORY)
909 mVASurfaceAttrib->flags |= VA_SURFACE_EXTBUF_DESC_ENABLE_TILING;
910
911 for (int i = 0; i < mNumSurfaces; i++) {
912 mVASurfaceAttrib->buffers[i] = (unsigned long)mConfigBuffer.graphicBufferHandler[i];
913 }
914
915 attribs[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
916 attribs[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
917 attribs[0].value.type = VAGenericValueTypeInteger;
918 attribs[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
919
920 attribs[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
921 attribs[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
922 attribs[1].value.type = VAGenericValueTypePointer;
923 attribs[1].value.value.p = (void *)mVASurfaceAttrib;
924
925 vaStatus = vaCreateSurfaces(
926 mVADisplay,
927 format,
928 mVideoFormatInfo.surfaceWidth,
929 mVideoFormatInfo.surfaceHeight,
930 mSurfaces,
931 mNumSurfaces,
932 attribs,
933 2);
934
935 } else {
936 vaStatus = vaCreateSurfaces(
937 mVADisplay,
938 format,
939 mVideoFormatInfo.width,
940 mVideoFormatInfo.height,
941 mSurfaces,
942 mNumSurfaces,
943 NULL,
944 0);
945 mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width;
946 mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height;
947 }
948 CHECK_VA_STATUS("vaCreateSurfaces");
949
950 if (mNumExtraSurfaces != 0) {
951 vaStatus = vaCreateSurfaces(
952 mVADisplay,
953 format,
954 mVideoFormatInfo.surfaceWidth,
955 mVideoFormatInfo.surfaceHeight,
956 mExtraSurfaces,
957 mNumExtraSurfaces,
958 NULL,
959 0);
960 CHECK_VA_STATUS("vaCreateSurfaces");
961 }
962
963 mVideoFormatInfo.surfaceNumber = mNumSurfaces;
964 mVideoFormatInfo.ctxSurfaces = mSurfaces;
965
966 if ((int32_t)profile != VAProfileSoftwareDecoding) {
967 vaStatus = vaCreateContext(
968 mVADisplay,
969 mVAConfig,
970 mVideoFormatInfo.surfaceWidth,
971 mVideoFormatInfo.surfaceHeight,
972 0,
973 mSurfaces,
974 mNumSurfaces + mNumExtraSurfaces,
975 &mVAContext);
976 CHECK_VA_STATUS("vaCreateContext");
977 }
978
979 mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces];
980 if (mSurfaceBuffers == NULL) {
981 return DECODE_MEMORY_FAIL;
982 }
983 initSurfaceBuffer(true);
984
985 if ((int32_t)profile == VAProfileSoftwareDecoding) {
986 // derive user pointer from surface for direct access
987 status = mapSurface();
988 CHECK_STATUS("mapSurface")
989 }
990
991 setRotationDegrees(mConfigBuffer.rotationDegrees);
992
993 mVAStarted = true;
994 return DECODE_SUCCESS;
995 }
996
terminateVA(void)997 Decode_Status VideoDecoderBase::terminateVA(void) {
998 mSignalBufferSize = 0;
999 for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
1000 mSignalBufferPre[i] = NULL;
1001 }
1002
1003 if (mVAStarted == false) {
1004 // VA hasn't been started yet
1005 return DECODE_SUCCESS;
1006 }
1007
1008 if (mSurfaceBuffers) {
1009 for (int32_t i = 0; i < mNumSurfaces; i++) {
1010 if (mSurfaceBuffers[i].renderBuffer.rawData) {
1011 if (mSurfaceBuffers[i].renderBuffer.rawData->data) {
1012 delete [] mSurfaceBuffers[i].renderBuffer.rawData->data;
1013 }
1014 delete mSurfaceBuffers[i].renderBuffer.rawData;
1015 }
1016 if (mSurfaceBuffers[i].mappedData) {
1017 // don't delete data pointer as it is mapped from surface
1018 delete mSurfaceBuffers[i].mappedData;
1019 }
1020 }
1021 delete [] mSurfaceBuffers;
1022 mSurfaceBuffers = NULL;
1023 }
1024
1025 if (mVASurfaceAttrib) {
1026 if (mVASurfaceAttrib->buffers) free(mVASurfaceAttrib->buffers);
1027 delete mVASurfaceAttrib;
1028 mVASurfaceAttrib = NULL;
1029 }
1030
1031
1032 if (mSurfaceUserPtr) {
1033 delete [] mSurfaceUserPtr;
1034 mSurfaceUserPtr = NULL;
1035 }
1036
1037 if (mSurfaces)
1038 {
1039 vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces + mNumExtraSurfaces);
1040 delete [] mSurfaces;
1041 mSurfaces = NULL;
1042 }
1043
1044 if (mVAContext != VA_INVALID_ID) {
1045 vaDestroyContext(mVADisplay, mVAContext);
1046 mVAContext = VA_INVALID_ID;
1047 }
1048
1049 if (mVAConfig != VA_INVALID_ID) {
1050 vaDestroyConfig(mVADisplay, mVAConfig);
1051 mVAConfig = VA_INVALID_ID;
1052 }
1053
1054 if (mVADisplay) {
1055 vaTerminate(mVADisplay);
1056 mVADisplay = NULL;
1057 }
1058
1059 if (mDisplay) {
1060 #ifndef USE_HYBRID_DRIVER
1061 delete mDisplay;
1062 #endif
1063 mDisplay = NULL;
1064 }
1065
1066 mVAStarted = false;
1067 mInitialized = false;
1068 mErrReportEnabled = false;
1069 return DECODE_SUCCESS;
1070 }
1071
parseBuffer(uint8_t * buffer,int32_t size,bool config,void ** vbpData)1072 Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) {
1073 // DON'T check if mVAStarted == true
1074 if (mParserHandle == NULL) {
1075 return DECODE_NO_PARSER;
1076 }
1077
1078 uint32_t vbpStatus;
1079 if (buffer == NULL || size <= 0) {
1080 return DECODE_INVALID_DATA;
1081 }
1082
1083 uint8_t configFlag = config ? 1 : 0;
1084 vbpStatus = mParserParse(mParserHandle, buffer, size, configFlag);
1085 CHECK_VBP_STATUS("vbp_parse");
1086
1087 vbpStatus = mParserQuery(mParserHandle, vbpData);
1088 CHECK_VBP_STATUS("vbp_query");
1089
1090 return DECODE_SUCCESS;
1091 }
1092
1093
1094
mapSurface(void)1095 Decode_Status VideoDecoderBase::mapSurface(void) {
1096 VAStatus vaStatus = VA_STATUS_SUCCESS;
1097 VAImage image;
1098 uint8_t *userPtr;
1099 mSurfaceUserPtr = new uint8_t* [mNumSurfaces];
1100 if (mSurfaceUserPtr == NULL) {
1101 return DECODE_MEMORY_FAIL;
1102 }
1103
1104 for (int32_t i = 0; i< mNumSurfaces; i++) {
1105 vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image);
1106 CHECK_VA_STATUS("vaDeriveImage");
1107 vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr);
1108 CHECK_VA_STATUS("vaMapBuffer");
1109 mSurfaceUserPtr[i] = userPtr;
1110 mSurfaceBuffers[i].mappedData = new VideoFrameRawData;
1111 if (mSurfaceBuffers[i].mappedData == NULL) {
1112 return DECODE_MEMORY_FAIL;
1113 }
1114 mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released
1115 mSurfaceBuffers[i].mappedData->data = NULL; // specified during acquireSurfaceBuffer
1116 mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc;
1117 mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width;
1118 mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height;
1119 mSurfaceBuffers[i].mappedData->size = image.data_size;
1120 for (int pi = 0; pi < 3; pi++) {
1121 mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi];
1122 mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi];
1123 }
1124 // debug information
1125 if (image.pitches[0] != image.pitches[1] ||
1126 image.width != mVideoFormatInfo.width ||
1127 image.height != mVideoFormatInfo.height ||
1128 image.offsets[0] != 0) {
1129 WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]);
1130 }
1131 // TODO: do we need to unmap buffer?
1132 //vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
1133 //CHECK_VA_STATUS("vaMapBuffer");
1134 vaStatus = vaDestroyImage(mVADisplay,image.image_id);
1135 CHECK_VA_STATUS("vaDestroyImage");
1136
1137 }
1138 return DECODE_SUCCESS;
1139 }
1140
getRawDataFromSurface(VideoRenderBuffer * renderBuffer,uint8_t * pRawData,uint32_t * pSize,bool internal)1141 Decode_Status VideoDecoderBase::getRawDataFromSurface(VideoRenderBuffer *renderBuffer, uint8_t *pRawData, uint32_t *pSize, bool internal) {
1142 if (internal) {
1143 if (mAcquiredBuffer == NULL) {
1144 return DECODE_FAIL;
1145 }
1146 renderBuffer = &(mAcquiredBuffer->renderBuffer);
1147 }
1148
1149 VAStatus vaStatus;
1150 VAImageFormat imageFormat;
1151 VAImage vaImage;
1152 vaStatus = vaSyncSurface(renderBuffer->display, renderBuffer->surface);
1153 CHECK_VA_STATUS("vaSyncSurface");
1154
1155 vaStatus = vaDeriveImage(renderBuffer->display, renderBuffer->surface, &vaImage);
1156 CHECK_VA_STATUS("vaDeriveImage");
1157
1158 void *pBuf = NULL;
1159 vaStatus = vaMapBuffer(renderBuffer->display, vaImage.buf, &pBuf);
1160 CHECK_VA_STATUS("vaMapBuffer");
1161
1162
1163 // size in NV12 format
1164 uint32_t cropWidth = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
1165 uint32_t cropHeight = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
1166 int32_t size = cropWidth * cropHeight * 3 / 2;
1167
1168 if (internal) {
1169 VideoFrameRawData *rawData = NULL;
1170 if (renderBuffer->rawData == NULL) {
1171 rawData = new VideoFrameRawData;
1172 if (rawData == NULL) {
1173 return DECODE_MEMORY_FAIL;
1174 }
1175 memset(rawData, 0, sizeof(VideoFrameRawData));
1176 renderBuffer->rawData = rawData;
1177 } else {
1178 rawData = renderBuffer->rawData;
1179 }
1180
1181 if (rawData->data != NULL && rawData->size != size) {
1182 delete [] rawData->data;
1183 rawData->data = NULL;
1184 rawData->size = 0;
1185 }
1186 if (rawData->data == NULL) {
1187 rawData->data = new uint8_t [size];
1188 if (rawData->data == NULL) {
1189 return DECODE_MEMORY_FAIL;
1190 }
1191 }
1192
1193 rawData->own = true; // allocated by this library
1194 rawData->width = cropWidth;
1195 rawData->height = cropHeight;
1196 rawData->pitch[0] = cropWidth;
1197 rawData->pitch[1] = cropWidth;
1198 rawData->pitch[2] = 0; // interleaved U/V, two planes
1199 rawData->offset[0] = 0;
1200 rawData->offset[1] = cropWidth * cropHeight;
1201 rawData->offset[2] = cropWidth * cropHeight * 3 / 2;
1202 rawData->size = size;
1203 rawData->fourcc = 'NV12';
1204
1205 pRawData = rawData->data;
1206 } else {
1207 *pSize = size;
1208 }
1209
1210 if (size == (int32_t)vaImage.data_size) {
1211 #ifdef __SSE4_1__
1212 stream_memcpy(pRawData, pBuf, size);
1213 #else
1214 memcpy(pRawData, pBuf, size);
1215 #endif
1216 } else {
1217 // copy Y data
1218 uint8_t *src = (uint8_t*)pBuf;
1219 uint8_t *dst = pRawData;
1220 uint32_t row = 0;
1221 for (row = 0; row < cropHeight; row++) {
1222 #ifdef __SSE4_1__
1223 stream_memcpy(dst, src, cropWidth);
1224 #else
1225 memcpy(dst, src, cropWidth);
1226 #endif
1227 dst += cropWidth;
1228 src += vaImage.pitches[0];
1229 }
1230 // copy interleaved V and U data
1231 src = (uint8_t*)pBuf + vaImage.offsets[1];
1232 for (row = 0; row < cropHeight / 2; row++) {
1233 #ifdef __SSE4_1__
1234 stream_memcpy(dst, src, cropWidth);
1235 #else
1236 memcpy(dst, src, cropWidth);
1237 #endif
1238 dst += cropWidth;
1239 src += vaImage.pitches[1];
1240 }
1241 }
1242
1243 vaStatus = vaUnmapBuffer(renderBuffer->display, vaImage.buf);
1244 CHECK_VA_STATUS("vaUnmapBuffer");
1245
1246 vaStatus = vaDestroyImage(renderBuffer->display, vaImage.image_id);
1247 CHECK_VA_STATUS("vaDestroyImage");
1248
1249 return DECODE_SUCCESS;
1250 }
1251
initSurfaceBuffer(bool reset)1252 void VideoDecoderBase::initSurfaceBuffer(bool reset) {
1253 bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
1254 if (useGraphicBuffer && reset) {
1255 pthread_mutex_lock(&mLock);
1256 }
1257 for (int32_t i = 0; i < mNumSurfaces; i++) {
1258 mSurfaceBuffers[i].renderBuffer.display = mVADisplay;
1259 mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer
1260 mSurfaceBuffers[i].renderBuffer.flag = 0;
1261 mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE;
1262 mSurfaceBuffers[i].renderBuffer.timeStamp = 0;
1263 mSurfaceBuffers[i].referenceFrame = false;
1264 mSurfaceBuffers[i].asReferernce= false;
1265 mSurfaceBuffers[i].pictureOrder = 0;
1266 mSurfaceBuffers[i].next = NULL;
1267 if (reset == true) {
1268 mSurfaceBuffers[i].renderBuffer.rawData = NULL;
1269 mSurfaceBuffers[i].mappedData = NULL;
1270 }
1271 if (useGraphicBuffer) {
1272 if (reset) {
1273 mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = mConfigBuffer.graphicBufferHandler[i];
1274 mSurfaceBuffers[i].renderBuffer.renderDone = false; //default false
1275 for (uint32_t j = 0; j < mSignalBufferSize; j++) {
1276 if(mSignalBufferPre[j] != NULL && mSignalBufferPre[j] == mSurfaceBuffers[i].renderBuffer.graphicBufferHandle) {
1277 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1278 VTRACE("initSurfaceBuffer set renderDone = true index = %d", i);
1279 mSignalBufferPre[j] = NULL;
1280 break;
1281 }
1282 }
1283 } else {
1284 mSurfaceBuffers[i].renderBuffer.renderDone = false;
1285 }
1286 } else {
1287 mSurfaceBuffers[i].renderBuffer.graphicBufferHandle = NULL;
1288 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1289 }
1290 mSurfaceBuffers[i].renderBuffer.graphicBufferIndex = i;
1291 }
1292
1293 if (useGraphicBuffer && reset) {
1294 mInitialized = true;
1295 mSignalBufferSize = 0;
1296 pthread_mutex_unlock(&mLock);
1297 }
1298 }
1299
signalRenderDone(void * graphichandler)1300 Decode_Status VideoDecoderBase::signalRenderDone(void * graphichandler) {
1301 if (graphichandler == NULL) {
1302 return DECODE_SUCCESS;
1303 }
1304 pthread_mutex_lock(&mLock);
1305 int i = 0;
1306 if (!mInitialized) {
1307 if (mSignalBufferSize >= MAX_GRAPHIC_BUFFER_NUM) {
1308 pthread_mutex_unlock(&mLock);
1309 return DECODE_INVALID_DATA;
1310 }
1311 mSignalBufferPre[mSignalBufferSize++] = graphichandler;
1312 VTRACE("SignalRenderDoneFlag mInitialized = false graphichandler = %p, mSignalBufferSize = %d", graphichandler, mSignalBufferSize);
1313 } else {
1314 if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
1315 pthread_mutex_unlock(&mLock);
1316 return DECODE_SUCCESS;
1317 }
1318 for (i = 0; i < mNumSurfaces; i++) {
1319 if (mSurfaceBuffers[i].renderBuffer.graphicBufferHandle == graphichandler) {
1320 mSurfaceBuffers[i].renderBuffer.renderDone = true;
1321 VTRACE("SignalRenderDoneFlag mInitialized = true index = %d", i);
1322 break;
1323 }
1324 }
1325 }
1326 pthread_mutex_unlock(&mLock);
1327
1328 return DECODE_SUCCESS;
1329
1330 }
1331
querySurfaceRenderStatus(VideoSurfaceBuffer * surface)1332 void VideoDecoderBase::querySurfaceRenderStatus(VideoSurfaceBuffer* surface) {
1333 VASurfaceStatus surfStat = VASurfaceReady;
1334 VAStatus vaStat = VA_STATUS_SUCCESS;
1335
1336 if (!surface) {
1337 LOGW("SurfaceBuffer not ready yet");
1338 return;
1339 }
1340 surface->renderBuffer.driverRenderDone = true;
1341
1342 #ifndef USE_GEN_HW
1343 if (surface->renderBuffer.surface != VA_INVALID_SURFACE &&
1344 (mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER)) {
1345
1346 vaStat = vaQuerySurfaceStatus(mVADisplay, surface->renderBuffer.surface, &surfStat);
1347
1348 if ((vaStat == VA_STATUS_SUCCESS) && (surfStat != VASurfaceReady))
1349 surface->renderBuffer.driverRenderDone = false;
1350
1351 }
1352 #endif
1353
1354 }
1355
1356 // This function should be called before start() to load different type of parsers
1357 #if (defined USE_AVC_SHORT_FORMAT || defined USE_SLICE_HEADER_PARSING)
setParserType(_vbp_parser_type type)1358 Decode_Status VideoDecoderBase::setParserType(_vbp_parser_type type) {
1359 if ((int32_t)type != VBP_INVALID) {
1360 ITRACE("Parser Type = %d", (int32_t)type);
1361 mParserType = type;
1362 return DECODE_SUCCESS;
1363 } else {
1364 ETRACE("Invalid parser type = %d", (int32_t)type);
1365 return DECODE_NO_PARSER;
1366 }
1367 }
1368
updateBuffer(uint8_t * buffer,int32_t size,void ** vbpData)1369 Decode_Status VideoDecoderBase::updateBuffer(uint8_t *buffer, int32_t size, void** vbpData) {
1370 if (mParserHandle == NULL) {
1371 return DECODE_NO_PARSER;
1372 }
1373
1374 uint32_t vbpStatus;
1375 if (buffer == NULL || size <= 0) {
1376 return DECODE_INVALID_DATA;
1377 }
1378
1379 vbpStatus = mParserUpdate(mParserHandle, buffer, size, vbpData);
1380 CHECK_VBP_STATUS("vbp_update");
1381
1382 return DECODE_SUCCESS;
1383 }
1384
queryBuffer(void ** vbpData)1385 Decode_Status VideoDecoderBase::queryBuffer(void** vbpData) {
1386 if (mParserHandle == NULL) {
1387 return DECODE_NO_PARSER;
1388 }
1389
1390 uint32_t vbpStatus;
1391 vbpStatus = mParserQuery(mParserHandle, vbpData);
1392 CHECK_VBP_STATUS("vbp_query");
1393
1394 return DECODE_SUCCESS;
1395 }
1396
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)1397 Decode_Status VideoDecoderBase::getCodecSpecificConfigs(VAProfile profile, VAConfigID *config) {
1398 VAStatus vaStatus;
1399 VAConfigAttrib attrib;
1400 attrib.type = VAConfigAttribRTFormat;
1401 attrib.value = VA_RT_FORMAT_YUV420;
1402
1403 if (config == NULL) {
1404 ETRACE("Invalid parameter!");
1405 return DECODE_FAIL;
1406 }
1407
1408 vaStatus = vaCreateConfig(
1409 mVADisplay,
1410 profile,
1411 VAEntrypointVLD,
1412 &attrib,
1413 1,
1414 config);
1415
1416 CHECK_VA_STATUS("vaCreateConfig");
1417
1418 return DECODE_SUCCESS;
1419 }
1420 #endif
checkHardwareCapability()1421 Decode_Status VideoDecoderBase::checkHardwareCapability() {
1422 return DECODE_SUCCESS;
1423 }
1424
drainDecodingErrors(VideoErrorBuffer * outErrBuf,VideoRenderBuffer * currentSurface)1425 void VideoDecoderBase::drainDecodingErrors(VideoErrorBuffer *outErrBuf, VideoRenderBuffer *currentSurface) {
1426 if (mErrReportEnabled && outErrBuf && currentSurface) {
1427 memcpy(outErrBuf, &(currentSurface->errBuf), sizeof(VideoErrorBuffer));
1428
1429 currentSurface->errBuf.errorNumber = 0;
1430 currentSurface->errBuf.timeStamp = INVALID_PTS;
1431 }
1432 if (outErrBuf)
1433 VTRACE("%s: error number is %d", __FUNCTION__, outErrBuf->errorNumber);
1434 }
1435
fillDecodingErrors(VideoRenderBuffer * currentSurface)1436 void VideoDecoderBase::fillDecodingErrors(VideoRenderBuffer *currentSurface) {
1437 VAStatus ret;
1438
1439 if (mErrReportEnabled) {
1440 currentSurface->errBuf.timeStamp = currentSurface->timeStamp;
1441 // TODO: is 10 a suitable number?
1442 VASurfaceDecodeMBErrors *err_drv_output = NULL;
1443 ret = vaQuerySurfaceError(mVADisplay, currentSurface->surface, VA_STATUS_ERROR_DECODING_ERROR, (void **)&err_drv_output);
1444 if (ret || !err_drv_output) {
1445 WTRACE("vaQuerySurfaceError failed.");
1446 return;
1447 }
1448
1449 int offset = 0x1 & currentSurface->errBuf.errorNumber;// offset is either 0 or 1
1450 for (int i = 0; i < MAX_ERR_NUM - offset; i++) {
1451 if (err_drv_output[i].status != -1) {
1452 currentSurface->errBuf.errorNumber++;
1453 currentSurface->errBuf.errorArray[i + offset].type = DecodeMBError;
1454 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb = err_drv_output[i].start_mb;
1455 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb = err_drv_output[i].end_mb;
1456 currentSurface->errBuf.errorArray[i + offset].num_mbs = err_drv_output[i].end_mb - err_drv_output[i].start_mb + 1;
1457 ITRACE("Error Index[%d]: type = %d, start_mb = %d, end_mb = %d",
1458 currentSurface->errBuf.errorNumber - 1,
1459 currentSurface->errBuf.errorArray[i + offset].type,
1460 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.start_mb,
1461 currentSurface->errBuf.errorArray[i + offset].error_data.mb_pos.end_mb);
1462 } else break;
1463 }
1464 ITRACE("%s: error number of current surface is %d, timestamp @%llu",
1465 __FUNCTION__, currentSurface->errBuf.errorNumber, currentSurface->timeStamp);
1466 }
1467 }
1468
setRotationDegrees(int32_t rotationDegrees)1469 void VideoDecoderBase::setRotationDegrees(int32_t rotationDegrees) {
1470 if (mRotationDegrees == rotationDegrees) {
1471 return;
1472 }
1473
1474 ITRACE("set new rotation degree: %d", rotationDegrees);
1475 VADisplayAttribute rotate;
1476 rotate.type = VADisplayAttribRotation;
1477 rotate.value = VA_ROTATION_NONE;
1478 if (rotationDegrees == 0)
1479 rotate.value = VA_ROTATION_NONE;
1480 else if (rotationDegrees == 90)
1481 rotate.value = VA_ROTATION_90;
1482 else if (rotationDegrees == 180)
1483 rotate.value = VA_ROTATION_180;
1484 else if (rotationDegrees == 270)
1485 rotate.value = VA_ROTATION_270;
1486
1487 VAStatus ret = vaSetDisplayAttributes(mVADisplay, &rotate, 1);
1488 if (ret) {
1489 ETRACE("Failed to set rotation degree.");
1490 }
1491 mRotationDegrees = rotationDegrees;
1492 }
1493
setRenderRect()1494 void VideoDecoderBase::setRenderRect() {
1495
1496 if (!mVADisplay)
1497 return;
1498
1499 VAStatus ret;
1500 VARectangle rect;
1501 rect.x = mVideoFormatInfo.cropLeft;
1502 rect.y = mVideoFormatInfo.cropTop;
1503 rect.width = mVideoFormatInfo.width - (mVideoFormatInfo.cropLeft + mVideoFormatInfo.cropRight);
1504 rect.height = mVideoFormatInfo.height - (mVideoFormatInfo.cropBottom + mVideoFormatInfo.cropTop);
1505
1506 VADisplayAttribute render_rect;
1507 render_rect.type = VADisplayAttribRenderRect;
1508 render_rect.value = (long)▭
1509
1510 ret = vaSetDisplayAttributes(mVADisplay, &render_rect, 1);
1511 if (ret) {
1512 ETRACE("Failed to set rotation degree.");
1513 }
1514 }
1515