• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2 * Copyright (c) 2009-2011 Intel Corporation.  All rights reserved.
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 
17 #include "VideoDecoderAVC.h"
18 #include "VideoDecoderTrace.h"
19 #include <string.h>
20 #include <cutils/properties.h>
21 
22 // Macros for actual buffer needed calculation
23 #define WIDI_CONSUMED   6
24 #define HDMI_CONSUMED   2
25 #define NW_CONSUMED     2
26 #define POC_DEFAULT     0x7FFFFFFF
27 
VideoDecoderAVC(const char * mimeType)28 VideoDecoderAVC::VideoDecoderAVC(const char *mimeType)
29     : VideoDecoderBase(mimeType, VBP_H264),
30       mToggleDPB(0),
31       mErrorConcealment(false){
32 
33     invalidateDPB(0);
34     invalidateDPB(1);
35     mLastPictureFlags = VA_PICTURE_H264_INVALID;
36 }
37 
~VideoDecoderAVC()38 VideoDecoderAVC::~VideoDecoderAVC() {
39     stop();
40 }
41 
start(VideoConfigBuffer * buffer)42 Decode_Status VideoDecoderAVC::start(VideoConfigBuffer *buffer) {
43     Decode_Status status;
44 
45     status = VideoDecoderBase::start(buffer);
46     CHECK_STATUS("VideoDecoderBase::start");
47 
48     // We don't want base class to manage reference.
49     VideoDecoderBase::ManageReference(false);
50     // output by picture order count
51     VideoDecoderBase::setOutputMethod(OUTPUT_BY_POC);
52 
53     mErrorConcealment = buffer->flag & WANT_ERROR_CONCEALMENT;
54     if (buffer->data == NULL || buffer->size == 0) {
55         WTRACE("No config data to start VA.");
56         if ((buffer->flag & HAS_SURFACE_NUMBER) && (buffer->flag & HAS_VA_PROFILE)) {
57             ITRACE("Used client supplied profile and surface to start VA.");
58             return VideoDecoderBase::setupVA(buffer->surfaceNumber, buffer->profile);
59         }
60         return DECODE_SUCCESS;
61     }
62 
63     vbp_data_h264 *data = NULL;
64     status = VideoDecoderBase::parseBuffer(buffer->data, buffer->size, true, (void**)&data);
65     CHECK_STATUS("VideoDecoderBase::parseBuffer");
66 
67     status = startVA(data);
68     return status;
69 }
70 
stop(void)71 void VideoDecoderAVC::stop(void) {
72     // drop the last  frame and ignore return value
73     endDecodingFrame(true);
74     VideoDecoderBase::stop();
75     invalidateDPB(0);
76     invalidateDPB(1);
77     mToggleDPB = 0;
78     mErrorConcealment = false;
79     mLastPictureFlags = VA_PICTURE_H264_INVALID;
80 }
81 
flush(void)82 void VideoDecoderAVC::flush(void) {
83     // drop the frame and ignore return value
84     VideoDecoderBase::flush();
85     invalidateDPB(0);
86     invalidateDPB(1);
87     mToggleDPB = 0;
88     mLastPictureFlags = VA_PICTURE_H264_INVALID;
89 }
90 
decode(VideoDecodeBuffer * buffer)91 Decode_Status VideoDecoderAVC::decode(VideoDecodeBuffer *buffer) {
92     Decode_Status status;
93     vbp_data_h264 *data = NULL;
94     if (buffer == NULL) {
95         return DECODE_INVALID_DATA;
96     }
97     status =  VideoDecoderBase::parseBuffer(
98             buffer->data,
99             buffer->size,
100             false,
101             (void**)&data);
102     CHECK_STATUS("VideoDecoderBase::parseBuffer");
103 
104     if (!mVAStarted) {
105          if (data->has_sps && data->has_pps) {
106             status = startVA(data);
107             CHECK_STATUS("startVA");
108         } else {
109             WTRACE("Can't start VA as either SPS or PPS is still not available.");
110             return DECODE_SUCCESS;
111         }
112     }
113 
114     VideoDecoderBase::setRotationDegrees(buffer->rotationDegrees);
115 
116     status = decodeFrame(buffer, data);
117     if (status == DECODE_MULTIPLE_FRAME) {
118         buffer->ext = &mExtensionBuffer;
119         mExtensionBuffer.extType = PACKED_FRAME_TYPE;
120         mExtensionBuffer.extSize = sizeof(mPackedFrame);
121         mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
122     }
123     return status;
124 }
125 
decodeFrame(VideoDecodeBuffer * buffer,vbp_data_h264 * data)126 Decode_Status VideoDecoderAVC::decodeFrame(VideoDecodeBuffer *buffer, vbp_data_h264 *data) {
127     Decode_Status status;
128     if (data->has_sps == 0 || data->has_pps == 0) {
129         return DECODE_NO_CONFIG;
130     }
131 
132     mVideoFormatInfo.flags = 0;
133     uint32_t fieldFlags = 0;
134     for (unsigned int i = 0; i < data->num_pictures; i++) {
135         VAPictureH264 &pic = data->pic_data[i].pic_parms->CurrPic;
136         fieldFlags |= pic.flags;
137         // Don't remove the following codes, it can be enabled for debugging DPB.
138 #if 0
139         VTRACE("%d: decoding frame %.2f, poc top = %d, poc bottom = %d, flags = %d,  reference = %d",
140                 i,
141                 buffer->timeStamp/1E6,
142                 pic.TopFieldOrderCnt,
143                 pic.BottomFieldOrderCnt,
144                 pic.flags,
145                 (pic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
146                 (pic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE));
147 #endif
148     }
149     int32_t topField = fieldFlags & VA_PICTURE_H264_TOP_FIELD;
150     int32_t botField = fieldFlags & VA_PICTURE_H264_BOTTOM_FIELD;
151     if ((topField == 0 && botField != 0) || (topField != 0 && botField == 0)) {
152         mVideoFormatInfo.flags |= IS_SINGLE_FIELD;
153     }
154 
155     if (data->new_sps || data->new_pps) {
156         status = handleNewSequence(data);
157         CHECK_STATUS("handleNewSequence");
158     }
159 
160     if (isWiDiStatusChanged()) {
161         mSizeChanged = false;
162         flushSurfaceBuffers();
163         return DECODE_FORMAT_CHANGE;
164     }
165 
166     // first pic_data always exists, check if any slice is parsed
167     if (data->pic_data[0].num_slices == 0) {
168         ITRACE("No slice available for decoding.");
169         status = mSizeChanged ? DECODE_FORMAT_CHANGE : DECODE_SUCCESS;
170         mSizeChanged = false;
171         return status;
172     }
173 
174     uint64_t lastPTS = mCurrentPTS;
175     mCurrentPTS = buffer->timeStamp;
176     //if (lastPTS != mCurrentPTS) {
177     if (isNewFrame(data, lastPTS == mCurrentPTS)) {
178         if (mLowDelay) {
179             // start decoding a new frame
180             status = beginDecodingFrame(data);
181             if (status != DECODE_SUCCESS) {
182                 Decode_Status st = status;
183                 // finish decoding the last frame if
184                 // encounter error when decode the new frame
185                 status = endDecodingFrame(false);
186                 CHECK_STATUS("endDecodingFrame");
187                 return st;
188             }
189         }
190 
191         // finish decoding the last frame
192         status = endDecodingFrame(false);
193         CHECK_STATUS("endDecodingFrame");
194 
195         if (!mLowDelay) {
196             // start decoding a new frame
197             status = beginDecodingFrame(data);
198             CHECK_STATUS("beginDecodingFrame");
199         }
200     } else {
201         status = continueDecodingFrame(data);
202         CHECK_STATUS("continueDecodingFrame");
203     }
204 
205     // HAS_COMPLETE_FRAME is not reliable as it may indicate end of a field
206 #if 0
207     if (buffer->flag & HAS_COMPLETE_FRAME) {
208         // finish decoding current frame
209         status = endDecodingFrame(false);
210         CHECK_STATUS("endDecodingFrame");
211     }
212 #endif
213     return DECODE_SUCCESS;
214 }
215 
beginDecodingFrame(vbp_data_h264 * data)216 Decode_Status VideoDecoderAVC::beginDecodingFrame(vbp_data_h264 *data) {
217     Decode_Status status;
218 
219     status = acquireSurfaceBuffer();
220     CHECK_STATUS("acquireSurfaceBuffer");
221     VAPictureH264 *picture = &(data->pic_data[0].pic_parms->CurrPic);
222     if ((picture->flags  & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
223         (picture->flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
224         mAcquiredBuffer->referenceFrame = true;
225     } else {
226         mAcquiredBuffer->referenceFrame = false;
227     }
228     // set asReference in updateDPB
229 
230     if (picture->flags & VA_PICTURE_H264_TOP_FIELD) {
231         mAcquiredBuffer->renderBuffer.scanFormat = VA_BOTTOM_FIELD | VA_TOP_FIELD;
232     } else {
233         mAcquiredBuffer->renderBuffer.scanFormat = VA_FRAME_PICTURE;
234     }
235 
236     // TODO: Set the discontinuity flag
237     mAcquiredBuffer->renderBuffer.flag = 0;
238     mAcquiredBuffer->renderBuffer.timeStamp = mCurrentPTS;
239     mAcquiredBuffer->pictureOrder = getPOC(picture);
240 
241     if (mSizeChanged) {
242         mAcquiredBuffer->renderBuffer.flag |= IS_RESOLUTION_CHANGE;
243         mSizeChanged = false;
244     }
245 
246     status  = continueDecodingFrame(data);
247     // surface buffer is released if decode fails
248     return status;
249 }
250 
251 
continueDecodingFrame(vbp_data_h264 * data)252 Decode_Status VideoDecoderAVC::continueDecodingFrame(vbp_data_h264 *data) {
253     Decode_Status status;
254     vbp_picture_data_h264 *picData = data->pic_data;
255 
256     // TODO: remove these debugging codes
257     if (mAcquiredBuffer == NULL || mAcquiredBuffer->renderBuffer.surface == VA_INVALID_SURFACE) {
258         ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
259         return DECODE_FAIL;
260     }
261     for (uint32_t picIndex = 0; picIndex < data->num_pictures; picIndex++, picData++) {
262         // sanity check
263         if (picData == NULL || picData->pic_parms == NULL || picData->slc_data == NULL || picData->num_slices == 0) {
264             return DECODE_PARSER_FAIL;
265         }
266 
267         if (picIndex > 0 &&
268             (picData->pic_parms->CurrPic.flags & (VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD)) == 0) {
269             // it is a packed frame buffer
270             vbp_picture_data_h264 *lastPic = &data->pic_data[picIndex - 1];
271             vbp_slice_data_h264 *sliceData = &(lastPic->slc_data[lastPic->num_slices - 1]);
272             mPackedFrame.offSet = sliceData->slice_size + sliceData->slice_offset;
273             mPackedFrame.timestamp = mCurrentPTS; // use the current time stamp for the packed frame
274             ITRACE("slice data offset= %d, size = %d", sliceData->slice_offset, sliceData->slice_size);
275             return DECODE_MULTIPLE_FRAME;
276         }
277 
278         for (uint32_t sliceIndex = 0; sliceIndex < picData->num_slices; sliceIndex++) {
279             status = decodeSlice(data, picIndex, sliceIndex);
280             if (status != DECODE_SUCCESS) {
281                 endDecodingFrame(true);
282                 // TODO: this is new code
283                 // remove current frame from DPB as it can't be decoded.
284                 removeReferenceFromDPB(picData->pic_parms);
285                 return status;
286             }
287         }
288     }
289     return DECODE_SUCCESS;
290 }
291 
decodeSlice(vbp_data_h264 * data,uint32_t picIndex,uint32_t sliceIndex)292 Decode_Status VideoDecoderAVC::decodeSlice(vbp_data_h264 *data, uint32_t picIndex, uint32_t sliceIndex) {
293     Decode_Status status;
294     VAStatus vaStatus;
295     uint32_t bufferIDCount = 0;
296     // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
297     VABufferID bufferIDs[4];
298 
299     vbp_picture_data_h264 *picData = &(data->pic_data[picIndex]);
300     vbp_slice_data_h264 *sliceData = &(picData->slc_data[sliceIndex]);
301     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
302     VASliceParameterBufferH264 *sliceParam = &(sliceData->slc_parms);
303 
304     if (sliceParam->first_mb_in_slice == 0 || mDecodingFrame == false) {
305         // either condition indicates start of a new frame
306         if (sliceParam->first_mb_in_slice != 0) {
307             WTRACE("The first slice is lost.");
308             // TODO: handle the first slice lost
309         }
310         if (mDecodingFrame) {
311             // interlace content, complete decoding the first field
312             vaStatus = vaEndPicture(mVADisplay, mVAContext);
313             CHECK_VA_STATUS("vaEndPicture");
314 
315             // for interlace content, top field may be valid only after the second field is parsed
316             int32_t poc = getPOC(&(picParam->CurrPic));
317             if (poc < mAcquiredBuffer->pictureOrder) {
318                 mAcquiredBuffer->pictureOrder = poc;
319             }
320         }
321 
322         // Check there is no reference frame loss before decoding a frame
323 
324         // Update  the reference frames and surface IDs for DPB and current frame
325         status = updateDPB(picParam);
326         CHECK_STATUS("updateDPB");
327 
328 #ifndef USE_AVC_SHORT_FORMAT
329         //We have to provide a hacked DPB rather than complete DPB for libva as workaround
330         status = updateReferenceFrames(picData);
331         CHECK_STATUS("updateReferenceFrames");
332 #endif
333         vaStatus = vaBeginPicture(mVADisplay, mVAContext, mAcquiredBuffer->renderBuffer.surface);
334         CHECK_VA_STATUS("vaBeginPicture");
335 
336         // start decoding a frame
337         mDecodingFrame = true;
338 
339         vaStatus = vaCreateBuffer(
340             mVADisplay,
341             mVAContext,
342             VAPictureParameterBufferType,
343             sizeof(VAPictureParameterBufferH264),
344             1,
345             picParam,
346             &bufferIDs[bufferIDCount]);
347         CHECK_VA_STATUS("vaCreatePictureParameterBuffer");
348         bufferIDCount++;
349 
350         vaStatus = vaCreateBuffer(
351             mVADisplay,
352             mVAContext,
353             VAIQMatrixBufferType,
354             sizeof(VAIQMatrixBufferH264),
355             1,
356             data->IQ_matrix_buf,
357             &bufferIDs[bufferIDCount]);
358         CHECK_VA_STATUS("vaCreateIQMatrixBuffer");
359         bufferIDCount++;
360     }
361 
362 #ifndef USE_AVC_SHORT_FORMAT
363 
364     status = setReference(sliceParam);
365     CHECK_STATUS("setReference");
366 
367     vaStatus = vaCreateBuffer(
368         mVADisplay,
369         mVAContext,
370         VASliceParameterBufferType,
371         sizeof(VASliceParameterBufferH264),
372         1,
373         sliceParam,
374         &bufferIDs[bufferIDCount]);
375 #else
376     vaStatus = vaCreateBuffer(
377         mVADisplay,
378         mVAContext,
379         VASliceParameterBufferType,
380         sizeof(VASliceParameterBufferH264Base),
381         1,
382         sliceParam,
383         &bufferIDs[bufferIDCount]);
384 #endif
385     CHECK_VA_STATUS("vaCreateSliceParameterBuffer");
386     bufferIDCount++;
387 
388     vaStatus = vaCreateBuffer(
389         mVADisplay,
390         mVAContext,
391         VASliceDataBufferType,
392         sliceData->slice_size, //size
393         1,        //num_elements
394         sliceData->buffer_addr + sliceData->slice_offset,
395         &bufferIDs[bufferIDCount]);
396     CHECK_VA_STATUS("vaCreateSliceDataBuffer");
397     bufferIDCount++;
398 
399     vaStatus = vaRenderPicture(
400         mVADisplay,
401         mVAContext,
402         bufferIDs,
403         bufferIDCount);
404     CHECK_VA_STATUS("vaRenderPicture");
405 
406     return DECODE_SUCCESS;
407 }
408 
setReference(VASliceParameterBufferH264 * sliceParam)409 Decode_Status VideoDecoderAVC::setReference(VASliceParameterBufferH264 *sliceParam) {
410     int32_t numList = 1;
411     // TODO: set numList to 0 if it is I slice
412     if (sliceParam->slice_type == 1 || sliceParam->slice_type == 6) {
413         // B slice
414         numList = 2;
415     }
416 
417     int32_t activeMinus1 = sliceParam->num_ref_idx_l0_active_minus1;
418     VAPictureH264 *ref = sliceParam->RefPicList0;
419 
420     for (int32_t i = 0; i < numList; i++) {
421         if (activeMinus1 >= REF_LIST_SIZE) {
422             ETRACE("Invalid activeMinus1 (%d)", activeMinus1);
423             return DECODE_PARSER_FAIL;
424         }
425         for (int32_t j = 0; j <= activeMinus1; j++, ref++) {
426             if (!(ref->flags & VA_PICTURE_H264_INVALID)) {
427                 ref->picture_id = findSurface(ref);
428                 if (ref->picture_id == VA_INVALID_SURFACE) {
429                     // Error DecodeRefMissing is counted once even there're multiple
430                     mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
431                     mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
432 
433                     if (mLastReference) {
434                         WTRACE("Reference frame %d is missing. Use last reference", getPOC(ref));
435                         ref->picture_id = mLastReference->renderBuffer.surface;
436                     } else {
437                         ETRACE("Reference frame %d is missing. Stop decoding.", getPOC(ref));
438                         return DECODE_NO_REFERENCE;
439                     }
440                 }
441             }
442         }
443         activeMinus1 = sliceParam->num_ref_idx_l1_active_minus1;
444         ref = sliceParam->RefPicList1;
445     }
446     return DECODE_SUCCESS;
447 }
448 
updateDPB(VAPictureParameterBufferH264 * picParam)449 Decode_Status VideoDecoderAVC::updateDPB(VAPictureParameterBufferH264 *picParam) {
450     clearAsReference(mToggleDPB);
451     // pointer to toggled DPB (new)
452     DecodedPictureBuffer *dpb = mDPBs[!mToggleDPB];
453     VAPictureH264 *ref = picParam->ReferenceFrames;
454 
455     // update current picture ID
456     picParam->CurrPic.picture_id = mAcquiredBuffer->renderBuffer.surface;
457 
458     // build new DPB
459     for (int32_t i = 0; i < MAX_REF_NUMBER; i++, ref++) {
460         if (ref->flags & VA_PICTURE_H264_INVALID) {
461             continue;
462         }
463 #ifdef USE_AVC_SHORT_FORMAT
464         ref->picture_id = findSurface(ref);
465 #endif
466         dpb->poc = getPOC(ref);
467         // looking for the latest ref frame in the DPB with specified POC, in case frames have same POC
468         dpb->surfaceBuffer = findRefSurfaceBuffer(ref);
469         if (dpb->surfaceBuffer == NULL) {
470             ETRACE("Reference frame %d is missing for current frame %d", dpb->poc, getPOC(&(picParam->CurrPic)));
471             // Error DecodeRefMissing is counted once even there're multiple
472             mAcquiredBuffer->renderBuffer.errBuf.errorNumber = 1;
473             mAcquiredBuffer->renderBuffer.errBuf.errorArray[0].type = DecodeRefMissing;
474             if (dpb->poc == getPOC(&(picParam->CurrPic))) {
475                 WTRACE("updateDPB: Using the current picture for missing reference.");
476                 dpb->surfaceBuffer = mAcquiredBuffer;
477             } else if (mLastReference) {
478                 WTRACE("updateDPB: Use last reference frame %d for missing reference.", mLastReference->pictureOrder);
479                 // TODO: this is new code for error resilience
480                 dpb->surfaceBuffer = mLastReference;
481             } else {
482                 WTRACE("updateDPB: Unable to recover the missing reference frame.");
483                 // continue buillding DPB without updating dpb pointer.
484                 continue;
485                 // continue building DPB as this reference may not be actually used.
486                 // especially happen after seeking to a non-IDR I frame.
487                 //return DECODE_NO_REFERENCE;
488             }
489         }
490         if (dpb->surfaceBuffer) {
491             // this surface is used as reference
492             dpb->surfaceBuffer->asReferernce = true;
493         }
494         dpb++;
495     }
496 
497     // add current frame to DPB if it  is a reference frame
498     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
499         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
500         dpb->poc = getPOC(&(picParam->CurrPic));
501         dpb->surfaceBuffer = mAcquiredBuffer;
502         dpb->surfaceBuffer->asReferernce = true;
503     }
504     // invalidate the current used DPB
505     invalidateDPB(mToggleDPB);
506     mToggleDPB = !mToggleDPB;
507     return DECODE_SUCCESS;
508 }
509 
updateReferenceFrames(vbp_picture_data_h264 * picData)510 Decode_Status VideoDecoderAVC::updateReferenceFrames(vbp_picture_data_h264 *picData) {
511     bool found = false;
512     uint32_t flags = 0;
513     VAPictureParameterBufferH264 *picParam = picData->pic_parms;
514     VASliceParameterBufferH264 *sliceParam = NULL;
515     uint8_t activeMinus1 = 0;
516     VAPictureH264 *refList = NULL;
517     VAPictureH264 *dpb = picParam->ReferenceFrames;
518     VAPictureH264 *refFrame = NULL;
519 
520     // invalidate DPB in the picture buffer
521     memset(picParam->ReferenceFrames, 0xFF, sizeof(picParam->ReferenceFrames));
522     picParam->num_ref_frames = 0;
523 
524     // update DPB  from the reference list in each slice.
525     for (uint32_t slice = 0; slice < picData->num_slices; slice++) {
526         sliceParam = &(picData->slc_data[slice].slc_parms);
527 
528         for (int32_t list = 0; list < 2; list++) {
529             refList = (list == 0) ? sliceParam->RefPicList0 :
530                                     sliceParam->RefPicList1;
531             activeMinus1 = (list == 0) ? sliceParam->num_ref_idx_l0_active_minus1 :
532                                          sliceParam->num_ref_idx_l1_active_minus1;
533             if (activeMinus1 >= REF_LIST_SIZE) {
534                 return DECODE_PARSER_FAIL;
535             }
536             for (uint8_t item = 0; item < (uint8_t)(activeMinus1 + 1); item++, refList++) {
537                 if (refList->flags & VA_PICTURE_H264_INVALID) {
538                     break;
539                 }
540                 found = false;
541                 refFrame = picParam->ReferenceFrames;
542                 for (uint8_t frame = 0; frame < picParam->num_ref_frames; frame++, refFrame++) {
543                     if (refFrame->TopFieldOrderCnt == refList->TopFieldOrderCnt) {
544                         ///check for complementary field
545                         flags = refFrame->flags | refList->flags;
546                         //If both TOP and BOTTOM are set, we'll clear those flags
547                         if ((flags & VA_PICTURE_H264_TOP_FIELD) &&
548                             (flags & VA_PICTURE_H264_BOTTOM_FIELD)) {
549                             refFrame->flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
550                         }
551                         found = true;  //already in the DPB; will not add this one
552                         break;
553                     }
554                 }
555                 if (found == false) {
556                     // add a new reference to the DPB
557                     dpb->picture_id = findSurface(refList);
558                     if (dpb->picture_id == VA_INVALID_SURFACE) {
559                         if (mLastReference != NULL) {
560                             dpb->picture_id = mLastReference->renderBuffer.surface;
561                         } else {
562                             ETRACE("Reference frame %d is missing. Stop updating references frames.", getPOC(refList));
563                             return DECODE_NO_REFERENCE;
564                         }
565                     }
566                     dpb->flags = refList->flags;
567                     // if it's bottom field in dpb, there must have top field in DPB,
568                     // so clear the bottom flag, or will confuse VED to address top field
569                     if (dpb->flags & VA_PICTURE_H264_BOTTOM_FIELD)
570                         dpb->flags &= (~VA_PICTURE_H264_BOTTOM_FIELD);
571                     dpb->frame_idx = refList->frame_idx;
572                     dpb->TopFieldOrderCnt = refList->TopFieldOrderCnt;
573                     dpb->BottomFieldOrderCnt = refList->BottomFieldOrderCnt;
574                     dpb++;
575                     picParam->num_ref_frames++;
576                 }
577             }
578         }
579     }
580     return DECODE_SUCCESS;
581 }
582 
removeReferenceFromDPB(VAPictureParameterBufferH264 * picParam)583 void VideoDecoderAVC::removeReferenceFromDPB(VAPictureParameterBufferH264 *picParam) {
584     // remove the current frame from DPB as it can't be decoded.
585     if ((picParam->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) ||
586         (picParam->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE)) {
587         DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
588         int32_t poc = getPOC(&(picParam->CurrPic));
589         for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
590             if (poc == dpb->poc) {
591                 dpb->poc = (int32_t)POC_DEFAULT;
592                 if (dpb->surfaceBuffer) {
593                     dpb->surfaceBuffer->asReferernce = false;
594                 }
595                 dpb->surfaceBuffer = NULL;
596                 break;
597             }
598         }
599     }
600 }
601 
getPOC(VAPictureH264 * pic)602 int32_t VideoDecoderAVC::getPOC(VAPictureH264 *pic) {
603     if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD) {
604         return pic->BottomFieldOrderCnt;
605     }
606     return pic->TopFieldOrderCnt;
607 }
608 
findSurface(VAPictureH264 * pic)609 VASurfaceID VideoDecoderAVC::findSurface(VAPictureH264 *pic) {
610     VideoSurfaceBuffer *p = findSurfaceBuffer(pic);
611     if (p == NULL) {
612         ETRACE("Could not find surface for poc %d", getPOC(pic));
613         return VA_INVALID_SURFACE;
614     }
615     return p->renderBuffer.surface;
616 }
617 
findSurfaceBuffer(VAPictureH264 * pic)618 VideoSurfaceBuffer* VideoDecoderAVC::findSurfaceBuffer(VAPictureH264 *pic) {
619     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
620     for (int32_t i = 0; i < DPB_SIZE; i++, dpb++) {
621         if (dpb->poc == pic->BottomFieldOrderCnt ||
622             dpb->poc == pic->TopFieldOrderCnt) {
623             // TODO: remove these debugging codes
624             if (dpb->surfaceBuffer == NULL) {
625                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
626             }
627             return dpb->surfaceBuffer;
628         }
629     }
630     // ETRACE("Unable to find surface for poc %d", getPOC(pic));
631     return NULL;
632 }
633 
findRefSurfaceBuffer(VAPictureH264 * pic)634 VideoSurfaceBuffer* VideoDecoderAVC::findRefSurfaceBuffer(VAPictureH264 *pic) {
635     DecodedPictureBuffer *dpb = mDPBs[mToggleDPB];
636     // always looking for the latest one in the DPB, in case ref frames have same POC
637     dpb += (DPB_SIZE - 1);
638     for (int32_t i = DPB_SIZE; i > 0; i--, dpb--) {
639         if (dpb->poc == pic->BottomFieldOrderCnt ||
640             dpb->poc == pic->TopFieldOrderCnt) {
641             // TODO: remove these debugging codes
642             if (dpb->surfaceBuffer == NULL) {
643                 ETRACE("Invalid surface buffer in the DPB for poc %d.", getPOC(pic));
644             }
645             return dpb->surfaceBuffer;
646         }
647     }
648     ETRACE("Unable to find surface for poc %d", getPOC(pic));
649     return NULL;
650 }
651 
invalidateDPB(int toggle)652 void VideoDecoderAVC::invalidateDPB(int toggle) {
653     DecodedPictureBuffer* p = mDPBs[toggle];
654     for (int i = 0; i < DPB_SIZE; i++) {
655         p->poc = (int32_t) POC_DEFAULT;
656         p->surfaceBuffer = NULL;
657         p++;
658     }
659 }
660 
clearAsReference(int toggle)661 void VideoDecoderAVC::clearAsReference(int toggle) {
662     DecodedPictureBuffer* p = mDPBs[toggle];
663     for (int i = 0; i < DPB_SIZE; i++) {
664         if (p->surfaceBuffer) {
665             p->surfaceBuffer->asReferernce = false;
666         }
667         p++;
668     }
669 }
670 
startVA(vbp_data_h264 * data)671 Decode_Status VideoDecoderAVC::startVA(vbp_data_h264 *data) {
672     int32_t DPBSize = getDPBSize(data);
673 
674     //Use high profile for all kinds of H.264 profiles (baseline, main and high) except for constrained baseline
675     VAProfile vaProfile = VAProfileH264High;
676 
677     // TODO: determine when to use VAProfileH264ConstrainedBaseline, set only if we are told to do so
678     if ((data->codec_data->profile_idc == 66 || data->codec_data->constraint_set0_flag == 1) &&
679         data->codec_data->constraint_set1_flag == 1) {
680         if (mErrorConcealment) {
681             vaProfile = VAProfileH264ConstrainedBaseline;
682         }
683     }
684 
685     VideoDecoderBase::setOutputWindowSize(mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK ? OUTPUT_WINDOW_SIZE : DPBSize);
686     updateFormatInfo(data);
687 
688    // for 1080p, limit the total surface to 19, according the hardware limitation
689    // change the max surface number from 19->10 to workaround memory shortage
690    // remove the workaround
691     if(mVideoFormatInfo.height == 1088 && DPBSize + AVC_EXTRA_SURFACE_NUMBER > 19) {
692         DPBSize = 19 - AVC_EXTRA_SURFACE_NUMBER;
693     }
694 
695     if (mConfigBuffer.flag & WANT_ADAPTIVE_PLAYBACK) {
696         // When Adaptive playback is enabled, turn off low delay mode.
697         // Otherwise there may be a 240ms stuttering if the output mode is changed from LowDelay to Delay.
698         enableLowDelayMode(false);
699     } else {
700         // for baseline profile, enable low delay mode automatically
701         enableLowDelayMode(data->codec_data->profile_idc == 66);
702     }
703 
704     return VideoDecoderBase::setupVA(DPBSize + AVC_EXTRA_SURFACE_NUMBER, vaProfile);
705 }
706 
updateFormatInfo(vbp_data_h264 * data)707 void VideoDecoderAVC::updateFormatInfo(vbp_data_h264 *data) {
708     // new video size
709     uint32_t width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
710     uint32_t height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
711     ITRACE("updateFormatInfo: current size: %d x %d, new size: %d x %d",
712         mVideoFormatInfo.width, mVideoFormatInfo.height, width, height);
713 
714     if ((mVideoFormatInfo.width != width ||
715         mVideoFormatInfo.height != height) &&
716         width && height) {
717         if (VideoDecoderBase::alignMB(mVideoFormatInfo.width) != width ||
718             VideoDecoderBase::alignMB(mVideoFormatInfo.height) != height) {
719             mSizeChanged = true;
720             ITRACE("Video size is changed.");
721         }
722         mVideoFormatInfo.width = width;
723         mVideoFormatInfo.height = height;
724     }
725 
726     // video_range has default value of 0.
727     mVideoFormatInfo.videoRange = data->codec_data->video_full_range_flag;
728 
729     switch (data->codec_data->matrix_coefficients) {
730         case 1:
731             mVideoFormatInfo.colorMatrix = VA_SRC_BT709;
732             break;
733 
734         // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
735         // SMPTE 170M/BT601
736         case 5:
737         case 6:
738             mVideoFormatInfo.colorMatrix = VA_SRC_BT601;
739             break;
740 
741         default:
742             // unknown color matrix, set to 0 so color space flag will not be set.
743             mVideoFormatInfo.colorMatrix = 0;
744             break;
745     }
746     mVideoFormatInfo.aspectX = data->codec_data->sar_width;
747     mVideoFormatInfo.aspectY = data->codec_data->sar_height;
748     mVideoFormatInfo.bitrate = data->codec_data->bit_rate;
749     mVideoFormatInfo.cropLeft = data->codec_data->crop_left;
750     mVideoFormatInfo.cropRight = data->codec_data->crop_right;
751     mVideoFormatInfo.cropTop = data->codec_data->crop_top;
752     mVideoFormatInfo.cropBottom = data->codec_data->crop_bottom;
753 
754     ITRACE("Cropping: left = %d, top = %d, right = %d, bottom = %d",
755         data->codec_data->crop_left,
756         data->codec_data->crop_top,
757         data->codec_data->crop_right,
758         data->codec_data->crop_bottom);
759 
760     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION) {
761         mVideoFormatInfo.actualBufferNeeded = mConfigBuffer.surfaceNumber;
762     } else {
763         // The number of actual buffer needed is
764         // outputQueue + nativewindow_owned + num_ref_frames + widi_need_max + 1(available buffer)
765         // while outputQueue = DPB < 8? DPB :8
766         mVideoFormatInfo.actualBufferNeeded = mOutputWindowSize + NW_CONSUMED /* Owned by native window */
767                                               + data->codec_data->num_ref_frames
768 #ifndef USE_GEN_HW
769                                               + HDMI_CONSUMED /* Two extra buffers are needed for native window buffer cycling */
770                                               + (mWiDiOn ? WIDI_CONSUMED : 0) /* WiDi maximum needs */
771 #endif
772                                               + 1;
773     }
774 
775     ITRACE("actualBufferNeeded =%d", mVideoFormatInfo.actualBufferNeeded);
776 
777     mVideoFormatInfo.valid = true;
778 
779     setRenderRect();
780 }
781 
isWiDiStatusChanged()782 bool VideoDecoderAVC::isWiDiStatusChanged() {
783 #ifndef USE_GEN_HW
784     if (mWiDiOn)
785         return false;
786 
787     if (mConfigBuffer.flag & WANT_SURFACE_PROTECTION)
788         return false;
789 
790     if (!(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER))
791         return false;
792 
793     char prop[PROPERTY_VALUE_MAX];
794     bool widi_on = (property_get("media.widi.enabled", prop, NULL) > 0) &&
795                     (!strcmp(prop, "1") || !strcasecmp(prop, "true"));
796     if (widi_on) {
797         mVideoFormatInfo.actualBufferNeeded += WIDI_CONSUMED;
798         mWiDiOn = true;
799         ITRACE("WiDi is enabled, actual buffer needed is %d", mVideoFormatInfo.actualBufferNeeded);
800         return true;
801     }
802     return false;
803 #else
804     return false;
805 #endif
806 }
807 
handleNewSequence(vbp_data_h264 * data)808 Decode_Status VideoDecoderAVC::handleNewSequence(vbp_data_h264 *data) {
809     updateFormatInfo(data);
810     bool needFlush = false;
811     bool rawDataMode = !(mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER);
812 
813     if (!rawDataMode) {
814         needFlush = (mVideoFormatInfo.width > mVideoFormatInfo.surfaceWidth)
815                 || (mVideoFormatInfo.height > mVideoFormatInfo.surfaceHeight)
816                 || isWiDiStatusChanged()
817                 || (mVideoFormatInfo.actualBufferNeeded > mConfigBuffer.surfaceNumber);
818     }
819 
820     if (needFlush || (rawDataMode && mSizeChanged)) {
821         mSizeChanged = false;
822         flushSurfaceBuffers();
823         return DECODE_FORMAT_CHANGE;
824     } else
825         return DECODE_SUCCESS;
826 }
827 
isNewFrame(vbp_data_h264 * data,bool equalPTS)828 bool VideoDecoderAVC::isNewFrame(vbp_data_h264 *data, bool equalPTS) {
829     if (data->num_pictures == 0) {
830         ETRACE("num_pictures == 0");
831         return true;
832     }
833 
834     vbp_picture_data_h264* picData = data->pic_data;
835     if (picData->num_slices == 0) {
836         ETRACE("num_slices == 0");
837         return true;
838     }
839 
840     bool newFrame = false;
841     uint32_t fieldFlags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
842 
843     if (picData->slc_data[0].slc_parms.first_mb_in_slice != 0) {
844         // not the first slice, assume it is continuation of a partial frame
845         // TODO: check if it is new frame boundary as the first slice may get lost in streaming case.
846         WTRACE("first_mb_in_slice != 0");
847         if (!equalPTS) {
848             // return true if different timestamp, it is a workaround here for a streaming case
849             WTRACE("different PTS, treat it as a new frame");
850             return true;
851         }
852     } else {
853         if ((picData->pic_parms->CurrPic.flags & fieldFlags) == fieldFlags) {
854             ETRACE("Current picture has both odd field and even field.");
855         }
856         // current picture is a field or a frame, and buffer conains the first slice, check if the current picture and
857         // the last picture form an opposite field pair
858         if (((mLastPictureFlags | picData->pic_parms->CurrPic.flags) & fieldFlags) == fieldFlags) {
859             // opposite field
860             newFrame = false;
861             WTRACE("current picture is not at frame boundary.");
862             mLastPictureFlags = 0;
863         } else {
864             newFrame = true;
865             mLastPictureFlags = 0;
866             for (uint32_t i = 0; i < data->num_pictures; i++) {
867                 mLastPictureFlags |= data->pic_data[i].pic_parms->CurrPic.flags;
868             }
869             if ((mLastPictureFlags & fieldFlags) == fieldFlags) {
870                 // current buffer contains both odd field and even field.
871                 mLastPictureFlags = 0;
872             }
873         }
874     }
875 
876     return newFrame;
877 }
878 
getDPBSize(vbp_data_h264 * data)879 int32_t VideoDecoderAVC::getDPBSize(vbp_data_h264 *data) {
880     // 1024 * MaxDPB / ( PicWidthInMbs * FrameHeightInMbs * 384 ), 16
881     struct DPBTable {
882         int32_t level;
883         float maxDPB;
884     } dpbTable[] = {
885         {9,  148.5},
886         {10, 148.5},
887         {11, 337.5},
888         {12, 891.0},
889         {13, 891.0},
890         {20, 891.0},
891         {21, 1782.0},
892         {22, 3037.5},
893         {30, 3037.5},
894         {31, 6750.0},
895         {32, 7680.0},
896         {40, 12288.0},
897         {41, 12288.0},
898         {42, 13056.0},
899         {50, 41400.0},
900         {51, 69120.0}
901     };
902 
903     int32_t count = sizeof(dpbTable)/sizeof(DPBTable);
904     float maxDPB = 0;
905     for (int32_t i = 0; i < count; i++)
906     {
907         if (dpbTable[i].level == data->codec_data->level_idc) {
908             maxDPB = dpbTable[i].maxDPB;
909             break;
910         }
911     }
912 
913     int32_t maxDPBSize = maxDPB * 1024 / (
914         (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) *
915         (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) *
916         384);
917 
918     if (maxDPBSize > 16) {
919         maxDPBSize = 16;
920     } else if (maxDPBSize == 0) {
921         maxDPBSize = 3;
922     }
923     if(maxDPBSize < data->codec_data->num_ref_frames) {
924         maxDPBSize = data->codec_data->num_ref_frames;
925     }
926 
927     // add one extra frame for current frame.
928     maxDPBSize += 1;
929     ITRACE("maxDPBSize = %d, num_ref_frame = %d", maxDPBSize, data->codec_data->num_ref_frames);
930     return maxDPBSize;
931 }
932 
checkHardwareCapability()933 Decode_Status VideoDecoderAVC::checkHardwareCapability() {
934 #ifndef USE_GEN_HW
935     VAStatus vaStatus;
936     VAConfigAttrib cfgAttribs[2];
937     cfgAttribs[0].type = VAConfigAttribMaxPictureWidth;
938     cfgAttribs[1].type = VAConfigAttribMaxPictureHeight;
939     vaStatus = vaGetConfigAttributes(mVADisplay, VAProfileH264High,
940             VAEntrypointVLD, cfgAttribs, 2);
941     CHECK_VA_STATUS("vaGetConfigAttributes");
942     if (cfgAttribs[0].value * cfgAttribs[1].value < (uint32_t)mVideoFormatInfo.width * (uint32_t)mVideoFormatInfo.height) {
943         ETRACE("hardware supports resolution %d * %d smaller than the clip resolution %d * %d",
944                 cfgAttribs[0].value, cfgAttribs[1].value, mVideoFormatInfo.width, mVideoFormatInfo.height);
945         return DECODE_DRIVER_FAIL;
946     }
947 #endif
948     return DECODE_SUCCESS;
949 }
950 
951 #ifdef USE_AVC_SHORT_FORMAT
getCodecSpecificConfigs(VAProfile profile,VAConfigID * config)952 Decode_Status VideoDecoderAVC::getCodecSpecificConfigs(
953     VAProfile profile, VAConfigID *config)
954 {
955     VAStatus vaStatus;
956     VAConfigAttrib attrib[2];
957 
958     if (config == NULL) {
959         ETRACE("Invalid parameter!");
960         return DECODE_FAIL;
961     }
962 
963     attrib[0].type = VAConfigAttribRTFormat;
964     attrib[0].value = VA_RT_FORMAT_YUV420;
965     attrib[1].type = VAConfigAttribDecSliceMode;
966     attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
967 
968     vaStatus = vaGetConfigAttributes(mVADisplay,profile,VAEntrypointVLD, &attrib[1], 1);
969 
970     if (attrib[1].value & VA_DEC_SLICE_MODE_BASE) {
971         ITRACE("AVC short format used");
972         attrib[1].value = VA_DEC_SLICE_MODE_BASE;
973     } else if (attrib[1].value & VA_DEC_SLICE_MODE_NORMAL) {
974         ITRACE("AVC long format ssed");
975         attrib[1].value = VA_DEC_SLICE_MODE_NORMAL;
976     } else {
977         ETRACE("Unsupported Decode Slice Mode!");
978         return DECODE_FAIL;
979     }
980 
981     vaStatus = vaCreateConfig(
982             mVADisplay,
983             profile,
984             VAEntrypointVLD,
985             &attrib[0],
986             2,
987             config);
988     CHECK_VA_STATUS("vaCreateConfig");
989 
990     return DECODE_SUCCESS;
991 }
992 #endif
993