1 /* 2 * Copyright (C) 2023 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.mediav2.common.cts; 18 19 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface; 20 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible; 21 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010; 22 import static android.mediav2.common.cts.CodecEncoderTestBase.colorFormatToString; 23 import static android.mediav2.common.cts.CodecTestBase.BOARD_SDK_IS_BEFORE_U; 24 import static android.mediav2.common.cts.CodecTestBase.PROFILE_HLG_MAP; 25 import static android.mediav2.common.cts.CodecTestBase.VNDK_IS_BEFORE_U; 26 import static android.mediav2.common.cts.CodecTestBase.hasSupportForColorFormat; 27 import static android.mediav2.common.cts.CodecTestBase.isDefaultCodec; 28 import static android.mediav2.common.cts.CodecTestBase.isHardwareAcceleratedCodec; 29 import static android.mediav2.common.cts.CodecTestBase.isSoftwareCodec; 30 import static android.mediav2.common.cts.CodecTestBase.isVendorCodec; 31 import static android.mediav2.common.cts.MuxerUtils.getMuxerFormatForMediaType; 32 33 import static org.junit.Assert.assertEquals; 34 import static org.junit.Assert.assertFalse; 35 import static org.junit.Assert.assertNotEquals; 36 import static org.junit.Assert.assertTrue; 37 import static org.junit.Assert.fail; 38 import static org.junit.Assume.assumeTrue; 39 40 import android.annotation.TargetApi; 41 import android.media.MediaCodec; 42 import android.media.MediaExtractor; 43 import android.media.MediaFormat; 44 import android.media.MediaMuxer; 45 import android.util.Log; 46 import android.util.Pair; 47 import android.view.Surface; 48 49 import com.android.compatibility.common.util.Preconditions; 50 51 import org.junit.After; 52 import org.junit.Assume; 53 import org.junit.Before; 54 import org.junit.Rule; 55 import org.junit.rules.TestName; 56 57 import java.io.IOException; 58 import java.nio.ByteBuffer; 59 import java.util.ArrayList; 60 import java.util.List; 61 import java.util.Objects; 62 import java.util.stream.IntStream; 63 64 /** 65 * Wrapper class for trying and testing encoder components in surface mode. 66 */ 67 public class CodecEncoderSurfaceTestBase { 68 private static final String LOG_TAG = CodecEncoderSurfaceTestBase.class.getSimpleName(); 69 private static final boolean ENABLE_LOGS = false; 70 71 protected final String mEncoderName; 72 protected final String mEncMediaType; 73 protected final String mDecoderName; 74 protected final String mTestFileMediaType; 75 protected final String mTestFile; 76 protected final EncoderConfigParams mEncCfgParams; 77 protected final int mDecColorFormat; 78 protected final boolean mIsOutputToneMapped; 79 protected final boolean mUsePersistentSurface; 80 protected final String mTestArgs; 81 82 protected MediaExtractor mExtractor; 83 protected MediaCodec mEncoder; 84 protected MediaFormat mEncoderFormat; 85 protected final CodecAsyncHandler mAsyncHandleEncoder = new CodecAsyncHandler(); 86 protected MediaCodec mDecoder; 87 protected MediaFormat mDecoderFormat; 88 protected final CodecAsyncHandler mAsyncHandleDecoder = new CodecAsyncHandler(); 89 protected boolean mIsCodecInAsyncMode; 90 protected boolean mSignalEOSWithLastFrame; 91 protected boolean mSawDecInputEOS; 92 protected boolean mSawDecOutputEOS; 93 protected boolean mSawEncOutputEOS; 94 protected int mDecInputCount; 95 protected int mDecOutputCount; 96 protected int mEncOutputCount; 97 protected int mLatency; 98 protected boolean mReviseLatency; 99 100 protected final StringBuilder mTestConfig = new StringBuilder(); 101 protected final StringBuilder mTestEnv = new StringBuilder(); 102 103 protected boolean mSaveToMem; 104 protected OutputManager mOutputBuff; 105 106 protected Surface mSurface; 107 108 protected MediaMuxer mMuxer; 109 protected int mTrackID = -1; 110 CodecEncoderSurfaceTestBase(String encoder, String mediaType, String decoder, String testFileMediaType, String testFile, EncoderConfigParams encCfgParams, int decColorFormat, boolean isOutputToneMapped, boolean usePersistentSurface, String allTestParams)111 public CodecEncoderSurfaceTestBase(String encoder, String mediaType, String decoder, 112 String testFileMediaType, String testFile, EncoderConfigParams encCfgParams, 113 int decColorFormat, boolean isOutputToneMapped, boolean usePersistentSurface, 114 String allTestParams) { 115 mEncoderName = encoder; 116 mEncMediaType = mediaType; 117 mDecoderName = decoder; 118 mTestFileMediaType = testFileMediaType; 119 mTestFile = testFile; 120 mEncCfgParams = encCfgParams; 121 mDecColorFormat = decColorFormat; 122 mIsOutputToneMapped = isOutputToneMapped; 123 mUsePersistentSurface = usePersistentSurface; 124 mTestArgs = allTestParams; 125 mLatency = mEncCfgParams.mMaxBFrames; 126 mReviseLatency = false; 127 } 128 129 @Rule 130 public TestName mTestName = new TestName(); 131 132 @Before setUpCodecEncoderSurfaceTestBase()133 public void setUpCodecEncoderSurfaceTestBase() throws IOException, CloneNotSupportedException { 134 mTestConfig.setLength(0); 135 mTestConfig.append("\n################## Test Details ####################\n"); 136 mTestConfig.append("Test Name :- ").append(mTestName.getMethodName()).append("\n"); 137 mTestConfig.append("Test Parameters :- ").append(mTestArgs).append("\n"); 138 if (mEncoderName.startsWith(CodecTestBase.INVALID_CODEC) || mDecoderName.startsWith( 139 CodecTestBase.INVALID_CODEC)) { 140 fail("no valid component available for current test. \n" + mTestConfig); 141 } 142 mDecoderFormat = setUpSource(mTestFile); 143 mExtractor.release(); 144 ArrayList<MediaFormat> decoderFormatList = new ArrayList<>(); 145 decoderFormatList.add(mDecoderFormat); 146 Assume.assumeTrue("Decoder: " + mDecoderName + " doesn't support format: " + mDecoderFormat, 147 CodecTestBase.areFormatsSupported(mDecoderName, mTestFileMediaType, 148 decoderFormatList)); 149 if (CodecTestBase.doesAnyFormatHaveHDRProfile(mTestFileMediaType, decoderFormatList) 150 || mTestFile.contains("10bit")) { 151 // Check if encoder is capable of supporting HDR profiles. 152 // Previous check doesn't verify this as profile isn't set in the format 153 Assume.assumeTrue(mEncoderName + " doesn't support HDR encoding", 154 CodecTestBase.doesCodecSupportHDRProfile(mEncoderName, mEncMediaType)); 155 } 156 157 if (mDecColorFormat == COLOR_FormatSurface) { 158 // TODO(b/253492870) Remove the following assumption check once this is supported 159 Assume.assumeFalse(mDecoderName + "is hardware accelerated and " + mEncoderName 160 + "is software only.", 161 isHardwareAcceleratedCodec(mDecoderName) && isSoftwareCodec(mEncoderName)); 162 } else { 163 // findDecoderForFormat() ignores color-format and decoder returned may not be 164 // supporting the color format set in mDecoderFormat. Following check will 165 // skip the test if decoder doesn't support the color format that is set. 166 boolean decoderSupportsColorFormat = 167 hasSupportForColorFormat(mDecoderName, mTestFileMediaType, mDecColorFormat); 168 if (mDecColorFormat == COLOR_FormatYUVP010) { 169 assumeTrue(mDecoderName + " doesn't support P010 output.", 170 decoderSupportsColorFormat); 171 } else { 172 assertTrue(mDecoderName + " doesn't support 420p 888 flexible output.", 173 decoderSupportsColorFormat); 174 } 175 } 176 EncoderConfigParams.Builder foreman = mEncCfgParams.getBuilder() 177 .setWidth(mDecoderFormat.getInteger(MediaFormat.KEY_WIDTH)) 178 .setHeight(mDecoderFormat.getInteger(MediaFormat.KEY_HEIGHT)); 179 mEncoderFormat = foreman.build().getFormat(); 180 } 181 182 @After tearDownCodecEncoderSurfaceTestBase()183 public void tearDownCodecEncoderSurfaceTestBase() { 184 if (mDecoder != null) { 185 mDecoder.release(); 186 mDecoder = null; 187 } 188 if (mSurface != null) { 189 mSurface.release(); 190 mSurface = null; 191 } 192 if (mEncoder != null) { 193 mEncoder.release(); 194 mEncoder = null; 195 } 196 if (mExtractor != null) { 197 mExtractor.release(); 198 mExtractor = null; 199 } 200 if (mMuxer != null) { 201 mMuxer.release(); 202 mMuxer = null; 203 } 204 } 205 getVideoEncoderCfgParams(String mediaType, int bitRate, int frameRate, int bitDepth, int maxBFrames)206 private static EncoderConfigParams getVideoEncoderCfgParams(String mediaType, int bitRate, 207 int frameRate, int bitDepth, int maxBFrames) { 208 EncoderConfigParams.Builder foreman = new EncoderConfigParams.Builder(mediaType) 209 .setBitRate(bitRate) 210 .setFrameRate(frameRate) 211 .setColorFormat(COLOR_FormatSurface) 212 .setInputBitDepth(bitDepth) 213 .setMaxBFrames(maxBFrames); 214 if (bitDepth == 10) { 215 foreman.setProfile(Objects.requireNonNull(PROFILE_HLG_MAP.get(mediaType))[0]); 216 } 217 return foreman.build(); 218 } 219 prepareParamsList(List<Object[]> args, List<Object[]> argsHighBitDepth, int[] maxBFrames, boolean[] usePersistentSurfaceStates)220 public static List<Object[]> prepareParamsList(List<Object[]> args, 221 List<Object[]> argsHighBitDepth, int[] maxBFrames, boolean[] usePersistentSurfaceStates) 222 throws IOException { 223 final boolean isEncoder = true; 224 final boolean needAudio = false; 225 final boolean needVideo = true; 226 final List<Object[]> exhaustiveArgsList = new ArrayList<>(); 227 228 int[] colorFormats = {COLOR_FormatSurface, COLOR_FormatYUV420Flexible}; 229 for (Object[] arg : args) { 230 final String mediaType = (String) arg[0]; 231 final int br = (int) arg[3]; 232 final int fps = (int) arg[4]; 233 for (int colorFormat : colorFormats) { 234 for (boolean usePersistentSurface : usePersistentSurfaceStates) { 235 for (int maxBFrame : maxBFrames) { 236 if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC) 237 && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC) 238 && maxBFrame != 0) { 239 continue; 240 } 241 Object[] testArgs = new Object[8]; 242 testArgs[0] = arg[0]; // encoder mediaType 243 testArgs[1] = arg[1]; // test file mediaType 244 testArgs[2] = arg[2]; // test file 245 testArgs[3] = getVideoEncoderCfgParams(mediaType, br, fps, 8, maxBFrame); 246 testArgs[4] = colorFormat; // color format 247 testArgs[5] = arg[5]; // tone map 248 testArgs[6] = usePersistentSurface; 249 testArgs[7] = String.format("%dkbps_%dfps_%s_%s", br / 1000, fps, 250 colorFormatToString(colorFormat, 8), 251 usePersistentSurface ? "persistentsurface" : "surface"); 252 exhaustiveArgsList.add(testArgs); 253 } 254 } 255 } 256 } 257 // P010 support was added in Android T and on some devices with vendor 258 // partition older than T these tests are failing hence limit the 259 // following tests to vndk Android T and above 260 if (CodecTestBase.VNDK_IS_AT_LEAST_T) { 261 int[] colorFormatsHbd = {COLOR_FormatSurface, COLOR_FormatYUVP010}; 262 for (Object[] arg : argsHighBitDepth) { 263 final String mediaType = (String) arg[0]; 264 final int br = (int) arg[3]; 265 final int fps = (int) arg[4]; 266 final boolean toneMap = (boolean) arg[5]; 267 for (int colorFormat : colorFormatsHbd) { 268 for (boolean usePersistentSurface : usePersistentSurfaceStates) { 269 for (int maxBFrame : maxBFrames) { 270 if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC) 271 && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC) 272 && maxBFrame != 0) { 273 continue; 274 } 275 Object[] testArgs = new Object[8]; 276 testArgs[0] = arg[0]; // encoder mediaType 277 testArgs[1] = arg[1]; // test file mediaType 278 testArgs[2] = arg[2]; // test file 279 testArgs[3] = 280 getVideoEncoderCfgParams(mediaType, br, fps, toneMap ? 8 : 10, 281 maxBFrame); 282 if (toneMap && (colorFormat == COLOR_FormatYUVP010)) { 283 colorFormat = COLOR_FormatYUV420Flexible; 284 } 285 testArgs[4] = colorFormat; // color format 286 testArgs[5] = arg[5]; // tone map 287 testArgs[6] = usePersistentSurface; 288 testArgs[7] = String.format("%dkbps_%dfps_%s_%s_%s", br / 1000, fps, 289 colorFormatToString(colorFormat, toneMap ? 8 : 10), 290 toneMap ? "tonemapyes" : "tonemapno", 291 usePersistentSurface ? "persistentsurface" : "surface"); 292 exhaustiveArgsList.add(testArgs); 293 } 294 } 295 } 296 } 297 } 298 final List<Object[]> argsList = new ArrayList<>(); 299 for (Object[] arg : exhaustiveArgsList) { 300 ArrayList<String> decoderList = 301 CodecTestBase.selectCodecs((String) arg[1], null, null, false); 302 for (String decoderName : decoderList) { 303 int argLength = exhaustiveArgsList.get(0).length; 304 Object[] testArg = new Object[argLength + 1]; 305 testArg[0] = arg[0]; // encoder mediaType 306 testArg[1] = decoderName; // decoder name 307 System.arraycopy(arg, 1, testArg, 2, argLength - 1); 308 argsList.add(testArg); 309 } 310 } 311 312 final List<Object[]> expandedArgsList = 313 CodecTestBase.prepareParamList(argsList, isEncoder, needAudio, needVideo, true); 314 315 // Prior to Android U, this test was not testing persistent surface. While this has 316 // been expected behavior for a long time, we only started testing it in Android U, so 317 // some older devices might not pass this test in persistent surface mode for some 318 // combination of codecs. These may show up as failures when running MTS tests for s/w 319 // encoders with h/w decoders in such cases. 320 321 // Prior to Android U, this test was using the first decoder for a given mediaType. 322 // In Android U, this was updated to test the encoders with all decoders for the 323 // given mediaType. There are some vendor encoders in older versions of Android 324 // and few OMX encoders which do not work as expected with the surface from s/w decoder. 325 // If the device is has vendor partition older than Android U or if the encoder is 326 // an OMX encoder, then limit the tests to first decoder like it was being done prior 327 // to Androd U 328 final List<Object[]> finalArgsList = new ArrayList<>(); 329 for (Object[] arg : expandedArgsList) { 330 String encoderName = (String) arg[0]; 331 String decoderName = (String) arg[2]; 332 String decoderMediaType = (String) arg[3]; 333 if ((BOARD_SDK_IS_BEFORE_U || VNDK_IS_BEFORE_U || encoderName.toUpperCase() 334 .startsWith("OMX")) && isVendorCodec(encoderName)) { 335 if (!isDefaultCodec(decoderName, decoderMediaType, /* isEncoder */false)) { 336 continue; 337 } 338 } 339 finalArgsList.add(arg); 340 } 341 return finalArgsList; 342 } 343 hasSeenError()344 protected boolean hasSeenError() { 345 return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError(); 346 } 347 348 @TargetApi(33) setUpSource(String srcFile)349 protected MediaFormat setUpSource(String srcFile) throws IOException { 350 Preconditions.assertTestFileExists(srcFile); 351 mExtractor = new MediaExtractor(); 352 mExtractor.setDataSource(srcFile); 353 for (int trackID = 0; trackID < mExtractor.getTrackCount(); trackID++) { 354 MediaFormat format = mExtractor.getTrackFormat(trackID); 355 String mediaType = format.getString(MediaFormat.KEY_MIME); 356 if (mediaType.equals(mTestFileMediaType)) { 357 mExtractor.selectTrack(trackID); 358 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDecColorFormat); 359 if (mIsOutputToneMapped) { 360 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER_REQUEST, 361 MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 362 } 363 return format; 364 } 365 } 366 mExtractor.release(); 367 fail("No video track found in file: " + srcFile + ". \n" + mTestConfig + mTestEnv); 368 return null; 369 } 370 resetContext(boolean isAsync, boolean signalEOSWithLastFrame)371 protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) { 372 mAsyncHandleDecoder.resetContext(); 373 mAsyncHandleEncoder.resetContext(); 374 mIsCodecInAsyncMode = isAsync; 375 mSignalEOSWithLastFrame = signalEOSWithLastFrame; 376 mSawDecInputEOS = false; 377 mSawDecOutputEOS = false; 378 mSawEncOutputEOS = false; 379 mDecInputCount = 0; 380 mDecOutputCount = 0; 381 mEncOutputCount = 0; 382 } 383 configureCodec(MediaFormat decFormat, MediaFormat encFormat, boolean isAsync, boolean signalEOSWithLastFrame)384 protected void configureCodec(MediaFormat decFormat, MediaFormat encFormat, boolean isAsync, 385 boolean signalEOSWithLastFrame) { 386 resetContext(isAsync, signalEOSWithLastFrame); 387 mAsyncHandleEncoder.setCallBack(mEncoder, isAsync); 388 mEncoder.configure(encFormat, null, MediaCodec.CONFIGURE_FLAG_ENCODE, null); 389 if (mEncoder.getInputFormat().containsKey(MediaFormat.KEY_LATENCY)) { 390 mReviseLatency = true; 391 mLatency = mEncoder.getInputFormat().getInteger(MediaFormat.KEY_LATENCY); 392 } 393 if (mUsePersistentSurface) { 394 mSurface = MediaCodec.createPersistentInputSurface(); 395 mEncoder.setInputSurface(mSurface); 396 } else { 397 mSurface = mEncoder.createInputSurface(); 398 } 399 assertTrue("Surface is not valid", mSurface.isValid()); 400 mAsyncHandleDecoder.setCallBack(mDecoder, isAsync); 401 mDecoder.configure(decFormat, mSurface, null, 0); 402 mTestEnv.setLength(0); 403 mTestEnv.append("################### Test Environment #####################\n"); 404 mTestEnv.append(String.format("Encoder under test :- %s \n", mEncoderName)); 405 mTestEnv.append(String.format("Format under test :- %s \n", encFormat)); 406 mTestEnv.append(String.format("Encoder is fed with output of :- %s \n", mDecoderName)); 407 mTestEnv.append(String.format("Format of Decoder Input :- %s", decFormat)); 408 mTestEnv.append(String.format("Encoder and Decoder are operating in :- %s mode \n", 409 (isAsync ? "asynchronous" : "synchronous"))); 410 mTestEnv.append(String.format("Components received input eos :- %s \n", 411 (signalEOSWithLastFrame ? "with full buffer" : "with empty buffer"))); 412 if (ENABLE_LOGS) { 413 Log.v(LOG_TAG, "codec configured"); 414 } 415 } 416 enqueueDecoderEOS(int bufferIndex)417 protected void enqueueDecoderEOS(int bufferIndex) { 418 if (!mSawDecInputEOS) { 419 mDecoder.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); 420 mSawDecInputEOS = true; 421 if (ENABLE_LOGS) { 422 Log.v(LOG_TAG, "Queued End of Stream"); 423 } 424 } 425 } 426 enqueueDecoderInput(int bufferIndex)427 protected void enqueueDecoderInput(int bufferIndex) { 428 if (mExtractor.getSampleSize() < 0) { 429 enqueueDecoderEOS(bufferIndex); 430 } else { 431 ByteBuffer inputBuffer = mDecoder.getInputBuffer(bufferIndex); 432 mExtractor.readSampleData(inputBuffer, 0); 433 int size = (int) mExtractor.getSampleSize(); 434 long pts = mExtractor.getSampleTime(); 435 int extractorFlags = mExtractor.getSampleFlags(); 436 int codecFlags = 0; 437 if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) { 438 codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME; 439 } 440 if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) { 441 codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME; 442 } 443 if (!mExtractor.advance() && mSignalEOSWithLastFrame) { 444 codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM; 445 mSawDecInputEOS = true; 446 } 447 if (ENABLE_LOGS) { 448 Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts 449 + " flags: " + codecFlags); 450 } 451 mDecoder.queueInputBuffer(bufferIndex, 0, size, pts, codecFlags); 452 if (size > 0 && (codecFlags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG 453 | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) { 454 mOutputBuff.saveInPTS(pts); 455 mDecInputCount++; 456 } 457 } 458 } 459 dequeueDecoderOutput(int bufferIndex, MediaCodec.BufferInfo info)460 protected void dequeueDecoderOutput(int bufferIndex, MediaCodec.BufferInfo info) { 461 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 462 mSawDecOutputEOS = true; 463 } 464 if (ENABLE_LOGS) { 465 Log.v(LOG_TAG, "output: id: " + bufferIndex + " flags: " + info.flags + " size: " 466 + info.size + " timestamp: " + info.presentationTimeUs); 467 } 468 if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 469 mDecOutputCount++; 470 } 471 mDecoder.releaseOutputBuffer(bufferIndex, mSurface != null); 472 } 473 dequeueEncoderOutput(int bufferIndex, MediaCodec.BufferInfo info)474 protected void dequeueEncoderOutput(int bufferIndex, MediaCodec.BufferInfo info) { 475 if (ENABLE_LOGS) { 476 Log.v(LOG_TAG, "encoder output: id: " + bufferIndex + " flags: " + info.flags 477 + " size: " + info.size + " timestamp: " + info.presentationTimeUs); 478 } 479 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 480 mSawEncOutputEOS = true; 481 } 482 if (info.size > 0) { 483 ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex); 484 if (mSaveToMem) { 485 mOutputBuff.saveToMemory(buf, info); 486 } 487 if (mMuxer != null) { 488 if (mTrackID == -1) { 489 mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat()); 490 mMuxer.start(); 491 } 492 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 493 mMuxer.writeSampleData(mTrackID, buf, info); 494 } 495 } 496 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 497 mOutputBuff.saveOutPTS(info.presentationTimeUs); 498 mEncOutputCount++; 499 } 500 } 501 mEncoder.releaseOutputBuffer(bufferIndex, false); 502 } 503 tryEncoderOutput(long timeOutUs)504 protected void tryEncoderOutput(long timeOutUs) throws InterruptedException { 505 if (mIsCodecInAsyncMode) { 506 if (!hasSeenError() && !mSawEncOutputEOS) { 507 while (mReviseLatency) { 508 mAsyncHandleEncoder.waitOnFormatChange(); 509 mReviseLatency = false; 510 int actualLatency = mAsyncHandleEncoder.getOutputFormat() 511 .getInteger(MediaFormat.KEY_LATENCY, mLatency); 512 if (mLatency < actualLatency) { 513 mLatency = actualLatency; 514 return; 515 } 516 } 517 Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleEncoder.getOutput(); 518 if (element != null) { 519 dequeueEncoderOutput(element.first, element.second); 520 } 521 } 522 } else { 523 MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo(); 524 if (!mSawEncOutputEOS) { 525 int outputBufferId = mEncoder.dequeueOutputBuffer(outInfo, timeOutUs); 526 if (outputBufferId >= 0) { 527 dequeueEncoderOutput(outputBufferId, outInfo); 528 } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 529 mLatency = mEncoder.getOutputFormat() 530 .getInteger(MediaFormat.KEY_LATENCY, mLatency); 531 } 532 } 533 } 534 } 535 queueEOS()536 protected void queueEOS() throws InterruptedException { 537 if (mIsCodecInAsyncMode) { 538 while (!mAsyncHandleDecoder.hasSeenError() && !mSawDecInputEOS) { 539 Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork(); 540 if (element != null) { 541 int bufferID = element.first; 542 MediaCodec.BufferInfo info = element.second; 543 if (info != null) { 544 dequeueDecoderOutput(bufferID, info); 545 } else { 546 enqueueDecoderEOS(element.first); 547 } 548 } 549 } 550 } else { 551 MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo(); 552 while (!mSawDecInputEOS) { 553 int outputBufferId = 554 mDecoder.dequeueOutputBuffer(outInfo, CodecTestBase.Q_DEQ_TIMEOUT_US); 555 if (outputBufferId >= 0) { 556 dequeueDecoderOutput(outputBufferId, outInfo); 557 } 558 int inputBufferId = mDecoder.dequeueInputBuffer(CodecTestBase.Q_DEQ_TIMEOUT_US); 559 if (inputBufferId != -1) { 560 enqueueDecoderEOS(inputBufferId); 561 } 562 } 563 } 564 if (mIsCodecInAsyncMode) { 565 while (!hasSeenError() && !mSawDecOutputEOS) { 566 Pair<Integer, MediaCodec.BufferInfo> decOp = mAsyncHandleDecoder.getOutput(); 567 if (decOp != null) dequeueDecoderOutput(decOp.first, decOp.second); 568 if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream(); 569 if (mDecOutputCount - mEncOutputCount > mLatency) { 570 tryEncoderOutput(-1); 571 } 572 } 573 } else { 574 MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo(); 575 while (!mSawDecOutputEOS) { 576 int outputBufferId = 577 mDecoder.dequeueOutputBuffer(outInfo, CodecTestBase.Q_DEQ_TIMEOUT_US); 578 if (outputBufferId >= 0) { 579 dequeueDecoderOutput(outputBufferId, outInfo); 580 } 581 if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream(); 582 if (mDecOutputCount - mEncOutputCount > mLatency) { 583 tryEncoderOutput(-1); 584 } 585 } 586 } 587 } 588 doWork(int frameLimit)589 protected void doWork(int frameLimit) throws InterruptedException { 590 int frameCnt = 0; 591 if (mIsCodecInAsyncMode) { 592 // dequeue output after inputEOS is expected to be done in waitForAllOutputs() 593 while (!hasSeenError() && !mSawDecInputEOS && frameCnt < frameLimit) { 594 Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork(); 595 if (element != null) { 596 int bufferID = element.first; 597 MediaCodec.BufferInfo info = element.second; 598 if (info != null) { 599 // <id, info> corresponds to output callback. Handle it accordingly 600 dequeueDecoderOutput(bufferID, info); 601 } else { 602 // <id, null> corresponds to input callback. Handle it accordingly 603 enqueueDecoderInput(bufferID); 604 frameCnt++; 605 } 606 } 607 // check decoder EOS 608 if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream(); 609 // encoder output 610 if (mDecOutputCount - mEncOutputCount > mLatency) { 611 tryEncoderOutput(-1); 612 } 613 } 614 } else { 615 MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo(); 616 while (!mSawDecInputEOS && frameCnt < frameLimit) { 617 // decoder input 618 int inputBufferId = mDecoder.dequeueInputBuffer(CodecTestBase.Q_DEQ_TIMEOUT_US); 619 if (inputBufferId != -1) { 620 enqueueDecoderInput(inputBufferId); 621 frameCnt++; 622 } 623 // decoder output 624 int outputBufferId = 625 mDecoder.dequeueOutputBuffer(outInfo, CodecTestBase.Q_DEQ_TIMEOUT_US); 626 if (outputBufferId >= 0) { 627 dequeueDecoderOutput(outputBufferId, outInfo); 628 } 629 // check decoder EOS 630 if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream(); 631 // encoder output 632 if (mDecOutputCount - mEncOutputCount > mLatency) { 633 tryEncoderOutput(-1); 634 } 635 } 636 } 637 } 638 waitForAllEncoderOutputs()639 protected void waitForAllEncoderOutputs() throws InterruptedException { 640 if (mIsCodecInAsyncMode) { 641 while (!hasSeenError() && !mSawEncOutputEOS) { 642 tryEncoderOutput(CodecTestBase.Q_DEQ_TIMEOUT_US); 643 } 644 } else { 645 while (!mSawEncOutputEOS) { 646 tryEncoderOutput(CodecTestBase.Q_DEQ_TIMEOUT_US); 647 } 648 } 649 validateTestState(); 650 } 651 validateTestState()652 private void validateTestState() { 653 assertFalse("Decoder has encountered error in async mode. \n" 654 + mTestConfig + mTestEnv + mAsyncHandleDecoder.getErrMsg(), 655 mAsyncHandleDecoder.hasSeenError()); 656 assertFalse("Encoder has encountered error in async mode. \n" 657 + mTestConfig + mTestEnv + mAsyncHandleEncoder.getErrMsg(), 658 mAsyncHandleEncoder.hasSeenError()); 659 assertTrue("Decoder has not received any input \n" + mTestConfig + mTestEnv, 660 0 != mDecInputCount); 661 assertTrue("Decoder has not sent any output \n" + mTestConfig + mTestEnv, 662 0 != mDecOutputCount); 663 assertTrue("Encoder has not sent any output \n" + mTestConfig + mTestEnv, 664 0 != mEncOutputCount); 665 assertEquals("Decoder output count is not equal to decoder input count \n" 666 + mTestConfig + mTestEnv, mDecInputCount, mDecOutputCount); 667 assertEquals("Encoder output count is not equal to Decoder input count \n" 668 + mTestConfig + mTestEnv, mDecInputCount, mEncOutputCount); 669 if (!mOutputBuff.isOutPtsListIdenticalToInpPtsList((mEncCfgParams.mMaxBFrames != 0))) { 670 fail("Input pts list and Output pts list are not identical \n" + mTestConfig 671 + mTestEnv + mOutputBuff.getErrMsg()); 672 } 673 if (mEncCfgParams.mMaxBFrames == 0 && !mOutputBuff.isPtsStrictlyIncreasing( 674 Long.MIN_VALUE)) { 675 fail("Output timestamps are not strictly increasing \n" + mTestConfig + mTestEnv 676 + mOutputBuff.getErrMsg()); 677 } 678 } 679 validateToneMappedFormat(MediaFormat format, String descriptor)680 protected void validateToneMappedFormat(MediaFormat format, String descriptor) { 681 assertEquals("unexpected color transfer in " + descriptor + " after tone mapping", 682 MediaFormat.COLOR_TRANSFER_SDR_VIDEO, 683 format.getInteger(MediaFormat.KEY_COLOR_TRANSFER, 0)); 684 assertNotEquals("unexpected color standard in " + descriptor + " after tone mapping", 685 MediaFormat.COLOR_STANDARD_BT2020, 686 format.getInteger(MediaFormat.KEY_COLOR_STANDARD, 0)); 687 688 int profile = format.getInteger(MediaFormat.KEY_PROFILE, -1); 689 int[] profileArray = CodecTestBase.PROFILE_HDR_MAP.get(mEncMediaType); 690 assertFalse(descriptor + " must not contain HDR profile after tone mapping", 691 IntStream.of(profileArray).anyMatch(x -> x == profile)); 692 } 693 694 @TargetApi(33) encodeToMemory(boolean isAsync, boolean signalEOSWithLastFrame, boolean saveToMem, OutputManager outBuff, boolean muxOutput, String outPath)695 protected void encodeToMemory(boolean isAsync, boolean signalEOSWithLastFrame, 696 boolean saveToMem, OutputManager outBuff, boolean muxOutput, String outPath) 697 throws IOException, InterruptedException { 698 encodeToMemory(isAsync, signalEOSWithLastFrame, saveToMem, outBuff, muxOutput, outPath, 699 Integer.MAX_VALUE); 700 } 701 702 @TargetApi(33) encodeToMemory(boolean isAsync, boolean signalEOSWithLastFrame, boolean saveToMem, OutputManager outBuff, boolean muxOutput, String outPath, int frameLimit)703 protected void encodeToMemory(boolean isAsync, boolean signalEOSWithLastFrame, 704 boolean saveToMem, OutputManager outBuff, boolean muxOutput, String outPath, 705 int frameLimit) throws IOException, InterruptedException { 706 mSaveToMem = saveToMem; 707 mOutputBuff = outBuff; 708 mOutputBuff.reset(); 709 if (muxOutput) { 710 int muxerFormat = getMuxerFormatForMediaType(mEncMediaType); 711 mMuxer = new MediaMuxer(outPath, muxerFormat); 712 } 713 setUpSource(mTestFile); 714 mDecoder = MediaCodec.createByCodecName(mDecoderName); 715 mEncoder = MediaCodec.createByCodecName(mEncoderName); 716 configureCodec(mDecoderFormat, mEncoderFormat, isAsync, signalEOSWithLastFrame); 717 if (mIsOutputToneMapped) { 718 MediaFormat inpFormat = mDecoder.getInputFormat(); 719 int transferRequest = inpFormat.getInteger(MediaFormat.KEY_COLOR_TRANSFER_REQUEST, 0); 720 assumeTrue(mDecoderName + " does not support HDR to SDR tone mapping", 721 0 != transferRequest); 722 } 723 mEncoder.start(); 724 mDecoder.start(); 725 doWork(frameLimit); 726 queueEOS(); 727 waitForAllEncoderOutputs(); 728 if (muxOutput) { 729 if (mTrackID != -1) { 730 mMuxer.stop(); 731 mTrackID = -1; 732 } 733 if (mMuxer != null) { 734 mMuxer.release(); 735 mMuxer = null; 736 } 737 } 738 if (mIsOutputToneMapped) { 739 MediaFormat encoderOutputFormat = mEncoder.getOutputFormat(); 740 MediaFormat decoderOutputFormat = mDecoder.getOutputFormat(); 741 validateToneMappedFormat(decoderOutputFormat, "decoder output format"); 742 validateToneMappedFormat(encoderOutputFormat, "encoder output format"); 743 if (outPath != null) { 744 MediaExtractor extractor = new MediaExtractor(); 745 extractor.setDataSource(outPath); 746 MediaFormat extractorFormat = extractor.getTrackFormat(0); 747 extractor.release(); 748 validateToneMappedFormat(extractorFormat, "extractor format"); 749 } 750 } 751 mDecoder.reset(); 752 mEncoder.reset(); 753 mSurface.release(); 754 mSurface = null; 755 mDecoder.release(); 756 mEncoder.release(); 757 mExtractor.release(); 758 mSaveToMem = false; 759 } 760 } 761