1 /* 2 * Copyright (C) 2021 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.mediapc.cts; 18 19 import static android.mediapc.cts.FrameDropTestBase.DECODE_31S; 20 21 import android.media.MediaCodec; 22 import android.media.MediaExtractor; 23 import android.media.MediaFormat; 24 import android.util.Pair; 25 import android.view.Surface; 26 27 import java.io.File; 28 import java.nio.ByteBuffer; 29 import java.util.ArrayList; 30 import java.util.concurrent.locks.Condition; 31 import java.util.concurrent.locks.Lock; 32 import java.util.concurrent.locks.ReentrantLock; 33 /** 34 * The following class calculates the frame drops for the given array of testFiles playback. 35 * It will do playback for at least 30 seconds worth of input data or for utmost 31 seconds. 36 * If input reaches eos, it will rewind the input to start position. 37 */ 38 public class PlaybackFrameDrop extends CodecDecoderTestBase { 39 private static final int AV1_INITIAL_DELAY = 8; 40 private final String mDecoderName; 41 private final String[] mTestFiles; 42 private final long mEachFrameTimeIntervalUs; 43 private final boolean mIsAsync; 44 45 private int mFrameDropCount; 46 private ByteBuffer mBuffer; 47 private ArrayList<MediaCodec.BufferInfo> mBufferInfos; 48 49 private long mInputMaxPtsUs; 50 private long mRenderStartTimeUs; 51 private long mBasePts; 52 private long mMaxPts; 53 private long mDecodeStartTimeMs; 54 private int mSampleIndex; 55 private int mMaxNumFrames; 56 private int mInitialDelay; 57 58 private OutputHandler mOutputHandler; 59 private Thread mThread; 60 61 class OutputHandler implements Runnable { 62 class BufferData { 63 public final int frameCount; // total count of full frames up to this point 64 public final int bufferIndex; 65 public final MediaCodec.BufferInfo info; 66 BufferData(int frameCount, int bufferIndex, MediaCodec.BufferInfo info)67 public BufferData(int frameCount, int bufferIndex, MediaCodec.BufferInfo info) { 68 this.frameCount = frameCount; 69 this.bufferIndex = bufferIndex; 70 this.info = info; 71 } 72 }; 73 74 private final ArrayList<BufferData> mQueue = new ArrayList<>(); 75 private boolean mStop = false; 76 private final Lock mLock = new ReentrantLock(); 77 private final Condition mCondition = mLock.newCondition(); 78 getOutput()79 private BufferData getOutput() throws InterruptedException { 80 BufferData output = null; 81 mLock.lock(); 82 try { 83 while (!mStop) { 84 if (mQueue.isEmpty()) { 85 mCondition.await(); 86 } else { 87 output = mQueue.remove(0); 88 break; 89 } 90 } 91 } finally { 92 mLock.unlock(); 93 } 94 return output; 95 } 96 97 @Override run()98 public void run() { 99 try { 100 while (true) { 101 BufferData output = getOutput(); 102 if (output != null) { 103 delayedReleaseOutput(output.frameCount, output.bufferIndex, output.info); 104 } else { 105 break; 106 } 107 } 108 } catch (InterruptedException e) { 109 // ignore 110 } 111 } 112 add(int outputCount, int bufferIndex, MediaCodec.BufferInfo info)113 public void add(int outputCount, int bufferIndex, MediaCodec.BufferInfo info) { 114 mLock.lock(); 115 try { 116 mQueue.add(new BufferData(outputCount, bufferIndex, info)); 117 mCondition.signal(); 118 } finally { 119 mLock.unlock(); 120 } 121 } 122 stop()123 public void stop() throws Exception { 124 mLock.lock(); 125 try { 126 mStop = true; 127 mCondition.signal(); 128 } finally { 129 mLock.unlock(); 130 } 131 } 132 } 133 PlaybackFrameDrop(String mime, String decoderName, String[] testFiles, Surface surface, int frameRate, boolean isAsync)134 PlaybackFrameDrop(String mime, String decoderName, String[] testFiles, Surface surface, 135 int frameRate, boolean isAsync) { 136 super(mime, null); 137 mDecoderName = decoderName; 138 mTestFiles = testFiles; 139 mSurface = surface; 140 mEachFrameTimeIntervalUs = 1000000 / frameRate; 141 mIsAsync = isAsync; 142 mInputMaxPtsUs = 0; 143 mBasePts = 0; 144 mMaxPts = 0; 145 mSampleIndex = 0; 146 mFrameDropCount = 0; 147 mBufferInfos = new ArrayList<>(); 148 // When testing AV1, because of super frames, we allow initial few frames to be delayed. 149 mInitialDelay = mime.equals(MediaFormat.MIMETYPE_VIDEO_AV1) ? AV1_INITIAL_DELAY : 0; 150 // Decode for 30 seconds 151 mMaxNumFrames = frameRate * 30 + mInitialDelay + 1; 152 mOutputHandler = new OutputHandler(); 153 mThread = new Thread(mOutputHandler); 154 } 155 createInputList(MediaFormat format, ByteBuffer buffer, ArrayList<MediaCodec.BufferInfo> list, int offset, long ptsOffset)156 private MediaFormat createInputList(MediaFormat format, ByteBuffer buffer, 157 ArrayList<MediaCodec.BufferInfo> list, int offset, long ptsOffset) { 158 int csdBuffersSize = 0; 159 if (hasCSD(format)) { 160 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 161 bufferInfo.offset = offset; 162 bufferInfo.size = 0; 163 bufferInfo.presentationTimeUs = 0; 164 bufferInfo.flags = MediaCodec.BUFFER_FLAG_CODEC_CONFIG; 165 for (int i = 0; ; i++) { 166 String csdKey = "csd-" + i; 167 if (format.containsKey(csdKey)) { 168 ByteBuffer csdBuffer = format.getByteBuffer(csdKey); 169 bufferInfo.size += csdBuffer.limit(); 170 buffer.put(csdBuffer); 171 format.removeKey(csdKey); 172 } else break; 173 } 174 list.add(bufferInfo); 175 offset += bufferInfo.size; 176 } 177 while (true) { 178 MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 179 bufferInfo.size = mExtractor.readSampleData(buffer, offset); 180 if (bufferInfo.size < 0) { 181 break; 182 } 183 bufferInfo.offset = offset; 184 bufferInfo.presentationTimeUs = ptsOffset + mExtractor.getSampleTime(); 185 mInputMaxPtsUs = Math.max(mInputMaxPtsUs, bufferInfo.presentationTimeUs); 186 int flags = mExtractor.getSampleFlags(); 187 bufferInfo.flags = 0; 188 if ((flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) { 189 bufferInfo.flags |= MediaCodec.BUFFER_FLAG_KEY_FRAME; 190 } 191 list.add(bufferInfo); 192 mExtractor.advance(); 193 offset += bufferInfo.size; 194 } 195 buffer.clear(); 196 buffer.position(offset); 197 return format; 198 } 199 setUpSourceFiles()200 public ArrayList<MediaFormat> setUpSourceFiles() throws Exception { 201 ArrayList<MediaFormat> formats = new ArrayList<>(); 202 for (String file : mTestFiles) { 203 formats.add(setUpSource(file)); 204 mExtractor.release(); 205 } 206 int totalSize = 0; 207 for (String srcFile : mTestFiles) { 208 File file = new File(mInpPrefix + srcFile); 209 totalSize += (int) file.length(); 210 } 211 totalSize <<= 1; 212 long ptsOffset = 0; 213 int buffOffset = 0; 214 mBuffer = ByteBuffer.allocate(totalSize); 215 for (String file : mTestFiles) { 216 formats.add(createInputList(setUpSource(file), mBuffer, mBufferInfos, buffOffset, 217 ptsOffset)); 218 mExtractor.release(); 219 ptsOffset = mInputMaxPtsUs + 1000000L; 220 buffOffset = (mBufferInfos.get(mBufferInfos.size() - 1).offset) + 221 (mBufferInfos.get(mBufferInfos.size() - 1).size); 222 } 223 return formats; 224 } 225 getFrameDropCount()226 public int getFrameDropCount() throws Exception { 227 ArrayList<MediaFormat> formats = setUpSourceFiles(); 228 229 // If the decoder doesn't support the formats, then return Integer.MAX_VALUE to indicate 230 // that all frames were dropped 231 if (!areFormatsSupported(mDecoderName, formats)) { 232 return Integer.MAX_VALUE; 233 } 234 235 mCodec = MediaCodec.createByCodecName(mDecoderName); 236 configureCodec(formats.get(0), mIsAsync, false, false); 237 mThread.start(); 238 mCodec.start(); 239 mDecodeStartTimeMs = System.currentTimeMillis(); 240 doWork(Integer.MAX_VALUE); 241 queueEOS(); 242 waitForAllOutputs(); 243 mOutputHandler.stop(); 244 mThread.join(); 245 mCodec.stop(); 246 mCodec.release(); 247 return mFrameDropCount; 248 } 249 250 @Override enqueueInput(int bufferIndex)251 void enqueueInput(int bufferIndex) { 252 if (mSampleIndex >= mBufferInfos.size() || 253 // Decode for mMaxNumFrames samples or for utmost 31 seconds 254 mInputCount >= mMaxNumFrames || 255 (System.currentTimeMillis() - mDecodeStartTimeMs > DECODE_31S)) { 256 enqueueEOS(bufferIndex); 257 } else { 258 MediaCodec.BufferInfo info = mBufferInfos.get(mSampleIndex++); 259 if (info.size > 0) { 260 ByteBuffer dstBuf = mCodec.getInputBuffer(bufferIndex); 261 dstBuf.put(mBuffer.array(), info.offset, info.size); 262 mInputCount++; 263 } 264 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 265 mSawInputEOS = true; 266 } 267 long pts = info.presentationTimeUs; 268 mMaxPts = Math.max(mMaxPts, mBasePts + pts); 269 mCodec.queueInputBuffer(bufferIndex, 0, info.size, mBasePts + pts, info.flags); 270 // If input reaches the end of samples, rewind to start position. 271 if (mSampleIndex == mBufferInfos.size()) { 272 mSampleIndex = 0; 273 mBasePts = mMaxPts + 1000000L; 274 } 275 } 276 } 277 getRenderTimeUs(int frameIndex)278 private long getRenderTimeUs(int frameIndex) { 279 return mRenderStartTimeUs + frameIndex * mEachFrameTimeIntervalUs; 280 } 281 282 @Override releaseOutput(int outputCount, int bufferIndex, MediaCodec.BufferInfo info)283 protected void releaseOutput(int outputCount, int bufferIndex, MediaCodec.BufferInfo info) { 284 mOutputHandler.add(outputCount, bufferIndex, info); 285 } 286 delayedReleaseOutput(int outputCount, int bufferIndex, MediaCodec.BufferInfo info)287 void delayedReleaseOutput(int outputCount, int bufferIndex, MediaCodec.BufferInfo info) { 288 // We will limit the playback to 60 fps using the system timestamps. 289 long nowUs = System.nanoTime() / 1000; 290 291 if (outputCount == 0) { 292 // delay rendering the first frame by the specific delay 293 mRenderStartTimeUs = nowUs + mInitialDelay * mEachFrameTimeIntervalUs; 294 } 295 296 if (nowUs > getRenderTimeUs(outputCount + 1)) { 297 // If the current sample timeStamp is greater than the actual presentation timeStamp 298 // of the next sample, we will consider it as a frame drop and don't render. 299 mFrameDropCount++; 300 mCodec.releaseOutputBuffer(bufferIndex, false); 301 } else if (nowUs > getRenderTimeUs(outputCount)) { 302 // If the current sample timeStamp is greater than the actual presentation timeStamp 303 // of the current sample, we can render it. 304 mCodec.releaseOutputBuffer(bufferIndex, true); 305 } else { 306 // If the current sample timestamp is less than the actual presentation timeStamp, 307 // We are okay with directly rendering the sample if we are less by not more than 308 // half of one sample duration. Otherwise we sleep for how much more we are less 309 // than the half of one sample duration. 310 if ((getRenderTimeUs(outputCount) - nowUs) > (mEachFrameTimeIntervalUs / 2)) { 311 try { 312 Thread.sleep(((getRenderTimeUs(outputCount) - nowUs) - 313 (mEachFrameTimeIntervalUs / 2)) / 1000); 314 } catch (InterruptedException e) { 315 // Do nothing. 316 } 317 } 318 mCodec.releaseOutputBuffer(bufferIndex, true); 319 } 320 } 321 } 322