1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.codec.cts; 18 19 import static org.junit.Assert.assertEquals; 20 import static org.junit.Assert.assertTrue; 21 import static org.junit.Assert.fail; 22 import static org.junit.Assume.assumeTrue; 23 24 import android.graphics.ImageFormat; 25 import android.media.Image; 26 import android.media.MediaCodec; 27 import android.media.MediaCodecInfo; 28 import android.media.MediaCodecList; 29 import android.media.MediaFormat; 30 import android.media.cts.InputSurface; 31 import android.media.cts.InputSurfaceInterface; 32 import android.media.cts.MediaCodecWrapper; 33 import android.media.cts.NdkMediaCodec; 34 import android.media.cts.OutputSurface; 35 import android.media.cts.SdkMediaCodec; 36 import android.media.cts.TestArgs; 37 import android.opengl.GLES20; 38 import android.os.Build; 39 import android.platform.test.annotations.PlatinumTest; 40 import android.platform.test.annotations.Presubmit; 41 import android.platform.test.annotations.RequiresDevice; 42 import android.util.Log; 43 44 import androidx.test.filters.SmallTest; 45 46 import com.android.compatibility.common.util.ApiLevelUtil; 47 import com.android.compatibility.common.util.ApiTest; 48 import com.android.compatibility.common.util.MediaUtils; 49 50 import org.junit.Before; 51 import org.junit.Test; 52 import org.junit.runner.RunWith; 53 import org.junit.runners.Parameterized; 54 55 import java.io.FileOutputStream; 56 import java.io.IOException; 57 import java.nio.ByteBuffer; 58 import java.util.ArrayList; 59 import java.util.Arrays; 60 import java.util.Collection; 61 import java.util.List; 62 63 import javax.microedition.khronos.opengles.GL10; 64 65 /** 66 * Generates a series of video frames, encodes them, decodes them, and tests for significant 67 * divergence from the original. 68 * <p> 69 * We copy the data from the encoder's output buffers to the decoder's input buffers, running 70 * them in parallel. The first buffer output for video/avc contains codec configuration data, 71 * which we must carefully forward to the decoder. 72 * <p> 73 * An alternative approach would be to save the output of the decoder as an mpeg4 video 74 * file, and read it back in from disk. The data we're generating is just an elementary 75 * stream, so we'd need to perform additional steps to make that happen. 76 */ 77 @Presubmit 78 @SmallTest 79 @RequiresDevice 80 @PlatinumTest(focusArea = "media") 81 @RunWith(Parameterized.class) 82 public class EncodeDecodeTest { 83 private static final String TAG = "EncodeDecodeTest"; 84 private static final boolean VERBOSE = false; // lots of logging 85 private static final boolean DEBUG_SAVE_FILE = false; // save copy of encoded movie 86 private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test."; 87 //TODO(b/248315681) Remove codenameEquals() check once devices return correct version for U 88 private static final boolean IS_AFTER_T = ApiLevelUtil.isAfter(Build.VERSION_CODES.TIRAMISU) 89 || ApiLevelUtil.codenameEquals("UpsideDownCake"); 90 91 // parameters for the encoder 92 private static final int FRAME_RATE = 15; // 15fps 93 private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames 94 95 // movie length, in frames 96 private static final int NUM_FRAMES = 30; // two seconds of video 97 98 private static final int TEST_Y = 120; // YUV values for colored rect 99 private static final int TEST_U = 160; 100 private static final int TEST_V = 200; 101 private static final int TEST_R0 = 0; // RGB equivalent of {0,0,0} (BT.601) 102 private static final int TEST_G0 = 136; 103 private static final int TEST_B0 = 0; 104 private static final int TEST_R1 = 236; // RGB equivalent of {120,160,200} (BT.601) 105 private static final int TEST_G1 = 50; 106 private static final int TEST_B1 = 186; 107 private static final int TEST_R0_BT709 = 0; // RGB equivalent of {0,0,0} (BT.709) 108 private static final int TEST_G0_BT709 = 77; 109 private static final int TEST_B0_BT709 = 0; 110 private static final int TEST_R1_BT709 = 250; // RGB equivalent of {120,160,200} (BT.709) 111 private static final int TEST_G1_BT709 = 76; 112 private static final int TEST_B1_BT709 = 189; 113 private static final boolean USE_NDK = true; 114 115 // component names 116 private final String mEncoderName; 117 private final String mDecoderName; 118 // mime 119 private final String mMimeType; 120 // size of a frame, in pixels 121 private final int mWidth; 122 private final int mHeight; 123 // bit rate, in bits per second 124 private final int mBitRate; 125 // validate YUV->RGB decoded frames against BT.601 and/or BT.709 126 private boolean mAllowBT601 = true; 127 private boolean mAllowBT709 = false; 128 129 // largest color component delta seen (i.e. actual vs. expected) 130 private int mLargestColorDelta; 131 prepareParamList(List<Object[]> exhaustiveArgsList)132 static private List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList) { 133 final List<Object[]> argsList = new ArrayList<>(); 134 int argLength = exhaustiveArgsList.get(0).length; 135 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 136 for (Object[] arg : exhaustiveArgsList) { 137 String mediaType = (String)arg[0]; 138 if (TestArgs.shouldSkipMediaType(mediaType)) { 139 continue; 140 } 141 142 MediaFormat format = MediaFormat.createVideoFormat(mediaType, (Integer)arg[1], 143 (Integer)arg[2]); 144 145 String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType); 146 String[] decoderNames = MediaUtils.getDecoderNamesForMime(mediaType); 147 // First pair of decoder and encoder that supports given format is chosen 148 outerLoop: 149 for (String decoder : decoderNames) { 150 if (TestArgs.shouldSkipCodec(decoder)) { 151 continue; 152 } 153 for (String encoder : encoderNames) { 154 if (TestArgs.shouldSkipCodec(encoder)) { 155 continue; 156 } 157 if (MediaUtils.supports(encoder, format) && 158 MediaUtils.supports(decoder, format)) { 159 Object[] testArgs = new Object[argLength + 2]; 160 testArgs[0] = encoder; 161 testArgs[1] = decoder; 162 System.arraycopy(arg, 0, testArgs, 2, argLength); 163 argsList.add(testArgs); 164 // Test only the first codecs that support given format. 165 // Remove the following break statement to test all codecs on the device. 166 break outerLoop; 167 } 168 } 169 } 170 } 171 return argsList; 172 } 173 174 @Before shouldSkip()175 public void shouldSkip() { 176 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight); 177 format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); 178 format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); 179 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 180 assumeTrue(MediaUtils.supports(mEncoderName, format)); 181 assumeTrue(MediaUtils.supports(mDecoderName, format)); 182 } 183 184 @Parameterized.Parameters(name = "{index}_{0}_{1}") input()185 public static Collection<Object[]> input() { 186 final List<Object[]> exhaustiveArgsList = Arrays.asList(new Object[][]{ 187 // Mime, width, height, bit-rate, allow bt601, allow bt709 188 {MediaFormat.MIMETYPE_VIDEO_AVC, 176, 144, 1000000, true, false}, 189 {MediaFormat.MIMETYPE_VIDEO_AVC, 320, 240, 2000000, true, false}, 190 {MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 6000000, true, true}, 191 {MediaFormat.MIMETYPE_VIDEO_VP8, 176, 144, 1000000, true, false}, 192 {MediaFormat.MIMETYPE_VIDEO_VP8, 320, 240, 2000000, true, false}, 193 {MediaFormat.MIMETYPE_VIDEO_VP8, 1280, 720, 6000000, true, true}, 194 }); 195 return prepareParamList(exhaustiveArgsList); 196 } 197 EncodeDecodeTest(String encoder, String decoder, String mimeType, int width, int height, int bitRate, boolean allowBT601, boolean allowBT709)198 public EncodeDecodeTest(String encoder, String decoder, String mimeType, int width, int height, 199 int bitRate, boolean allowBT601, boolean allowBT709) { 200 if ((width % 16) != 0 || (height % 16) != 0) { 201 Log.w(TAG, "WARNING: width or height not multiple of 16"); 202 } 203 mEncoderName = encoder; 204 mDecoderName = decoder; 205 mMimeType = mimeType; 206 mWidth = width; 207 mHeight = height; 208 mBitRate = bitRate; 209 mAllowBT601 = allowBT601; 210 mAllowBT709 = allowBT709; 211 } 212 213 /** Wraps testEncodeDecodeVideoFromBuffer(true) */ 214 private static class BufferToSurfaceWrapper implements Runnable { 215 private Throwable mThrowable; 216 private EncodeDecodeTest mTest; 217 BufferToSurfaceWrapper(EncodeDecodeTest test)218 private BufferToSurfaceWrapper(EncodeDecodeTest test) { 219 mTest = test; 220 } 221 222 @Override run()223 public void run() { 224 try { 225 mTest.encodeDecodeVideoFromBuffer(true); 226 } catch (Throwable th) { 227 mThrowable = th; 228 } 229 } 230 231 /** 232 * Entry point. 233 */ runTest(EncodeDecodeTest obj)234 public static void runTest(EncodeDecodeTest obj) throws Throwable { 235 BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj); 236 Thread th = new Thread(wrapper, "codec test"); 237 th.start(); 238 th.join(); 239 if (wrapper.mThrowable != null) { 240 throw wrapper.mThrowable; 241 } 242 } 243 } 244 245 /** Wraps testEncodeDecodeVideoFromSurfaceToSurface() */ 246 private static class SurfaceToSurfaceWrapper implements Runnable { 247 private Throwable mThrowable; 248 private EncodeDecodeTest mTest; 249 private boolean mUsePersistentInput; 250 private boolean mUseNdk; 251 SurfaceToSurfaceWrapper(EncodeDecodeTest test, boolean persistent, boolean useNdk)252 private SurfaceToSurfaceWrapper(EncodeDecodeTest test, boolean persistent, boolean useNdk) { 253 mTest = test; 254 mUsePersistentInput = persistent; 255 mUseNdk = useNdk; 256 } 257 258 @Override run()259 public void run() { 260 InputSurfaceInterface inputSurface = null; 261 try { 262 if (!mUsePersistentInput) { 263 mTest.encodeDecodeVideoFromSurfaceToSurface(null, mUseNdk); 264 } else { 265 Log.d(TAG, "creating persistent surface"); 266 if (mUseNdk) { 267 inputSurface = NdkMediaCodec.createPersistentInputSurface(); 268 } else { 269 inputSurface = new InputSurface(MediaCodec.createPersistentInputSurface()); 270 } 271 272 for (int i = 0; i < 3; i++) { 273 Log.d(TAG, "test persistent surface - round " + i); 274 mTest.encodeDecodeVideoFromSurfaceToSurface(inputSurface, mUseNdk); 275 } 276 } 277 } catch (Throwable th) { 278 mThrowable = th; 279 } finally { 280 if (inputSurface != null) { 281 inputSurface.release(); 282 } 283 } 284 } 285 286 /** 287 * Entry point. 288 */ runTest(EncodeDecodeTest obj, boolean persisent, boolean useNdk)289 public static void runTest(EncodeDecodeTest obj, boolean persisent, boolean useNdk) 290 throws Throwable { 291 // Few cuttlefish specific color conversion issues were fixed after Android T. 292 if (MediaUtils.onCuttlefish()) { 293 assumeTrue("Color conversion related tests are not valid on cuttlefish releases " 294 + "through android T", IS_AFTER_T); 295 } 296 SurfaceToSurfaceWrapper wrapper = 297 new SurfaceToSurfaceWrapper(obj, persisent, useNdk); 298 Thread th = new Thread(wrapper, "codec test"); 299 th.start(); 300 th.join(); 301 if (wrapper.mThrowable != null) { 302 throw wrapper.mThrowable; 303 } 304 } 305 } 306 307 /** 308 * Tests encoding and subsequently decoding video from frames generated into a buffer. 309 * <p> 310 * We encode several frames of a video test pattern using MediaCodec, then decode the 311 * output with MediaCodec and do some simple checks. 312 * <p> 313 * See http://b.android.com/37769 for a discussion of input format pitfalls. 314 */ encodeDecodeVideoFromBuffer(boolean toSurface)315 private void encodeDecodeVideoFromBuffer(boolean toSurface) throws Exception { 316 MediaCodec encoder = null; 317 MediaCodec decoder = null; 318 319 mLargestColorDelta = -1; 320 321 try { 322 // We avoid the device-specific limitations on width and height by using values that 323 // are multiples of 16, which all tested devices seem to be able to handle. 324 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight); 325 326 // Create a MediaCodec for the desired codec, then configure it as an encoder with 327 // our desired properties. 328 encoder = MediaCodec.createByCodecName(mEncoderName); 329 330 int colorFormat = selectColorFormat(encoder.getCodecInfo(), mMimeType); 331 if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat); 332 333 // Set some properties. Failing to specify some of these can cause the MediaCodec 334 // configure() call to throw an unhelpful exception. 335 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 336 format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); 337 format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); 338 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 339 if (VERBOSE) Log.d(TAG, "format: " + format); 340 341 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 342 encoder.start(); 343 344 // Create a MediaCodec for the decoder, just based on the MIME type. The various 345 // format details will be passed through the csd-0 meta-data later on. 346 decoder = MediaCodec.createByCodecName(mDecoderName); 347 if (VERBOSE) Log.d(TAG, "got decoder: " + decoder.getName()); 348 349 doEncodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface); 350 } finally { 351 if (VERBOSE) Log.d(TAG, "releasing codecs"); 352 if (encoder != null) { 353 encoder.stop(); 354 encoder.release(); 355 } 356 if (decoder != null) { 357 decoder.stop(); 358 decoder.release(); 359 } 360 361 Log.i(TAG, "Largest color delta: " + mLargestColorDelta); 362 } 363 } 364 365 /** 366 * Tests encoding and subsequently decoding video from frames generated into a buffer. 367 * <p> 368 * We encode several frames of a video test pattern using MediaCodec, then decode the 369 * output with MediaCodec and do some simple checks. 370 */ encodeDecodeVideoFromSurfaceToSurface(InputSurfaceInterface inSurf, boolean useNdk)371 private void encodeDecodeVideoFromSurfaceToSurface(InputSurfaceInterface inSurf, boolean useNdk) throws Exception { 372 MediaCodecWrapper encoder = null; 373 MediaCodec decoder = null; 374 InputSurfaceInterface inputSurface = inSurf; 375 OutputSurface outputSurface = null; 376 377 mLargestColorDelta = -1; 378 379 try { 380 // We avoid the device-specific limitations on width and height by using values that 381 // are multiples of 16, which all tested devices seem to be able to handle. 382 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight); 383 384 int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface; 385 386 // Set some properties. Failing to specify some of these can cause the MediaCodec 387 // configure() call to throw an unhelpful exception. 388 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 389 format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); 390 format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); 391 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 392 393 // Set color parameters 394 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 395 format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 396 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 397 398 if (VERBOSE) Log.d(TAG, "format: " + format); 399 400 // Create the output surface. 401 outputSurface = new OutputSurface(mWidth, mHeight); 402 403 decoder = MediaCodec.createByCodecName(mDecoderName); 404 if (VERBOSE) Log.d(TAG, "got decoder: " + decoder.getName()); 405 decoder.configure(format, outputSurface.getSurface(), null, 0); 406 decoder.start(); 407 408 // Create a MediaCodec for the desired codec, then configure it as an encoder with 409 // our desired properties. Request a Surface to use for input. 410 if (useNdk) { 411 encoder = new NdkMediaCodec(mEncoderName); 412 }else { 413 encoder = new SdkMediaCodec(MediaCodec.createByCodecName(mEncoderName)); 414 } 415 encoder.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE); 416 if (inSurf != null) { 417 Log.d(TAG, "using persistent surface"); 418 encoder.setInputSurface(inputSurface); 419 inputSurface.updateSize(mWidth, mHeight); 420 } else { 421 inputSurface = encoder.createInputSurface(); 422 } 423 encoder.start(); 424 425 doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, decoder, outputSurface); 426 } finally { 427 if (VERBOSE) Log.d(TAG, "releasing codecs"); 428 if (inSurf == null && inputSurface != null) { 429 inputSurface.release(); 430 } 431 if (outputSurface != null) { 432 outputSurface.release(); 433 } 434 if (encoder != null) { 435 encoder.stop(); 436 encoder.release(); 437 } 438 if (decoder != null) { 439 decoder.stop(); 440 decoder.release(); 441 } 442 443 Log.i(TAG, "Largest color delta: " + mLargestColorDelta); 444 } 445 } 446 447 /** 448 * Returns a color format that is supported by the codec and by this test code. If no 449 * match is found, this throws a test failure -- the set of formats known to the test 450 * should be expanded for new platforms. 451 */ selectColorFormat(MediaCodecInfo codecInfo, String mimeType)452 private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) { 453 MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType); 454 for (int i = 0; i < capabilities.colorFormats.length; i++) { 455 int colorFormat = capabilities.colorFormats[i]; 456 if (isRecognizedFormat(colorFormat)) { 457 return colorFormat; 458 } 459 } 460 fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType); 461 return 0; // not reached 462 } 463 464 /** 465 * Returns true if this is a color format that this test code understands (i.e. we know how 466 * to read and generate frames in this format). 467 */ isRecognizedFormat(int colorFormat)468 private static boolean isRecognizedFormat(int colorFormat) { 469 switch (colorFormat) { 470 // these are the formats we know how to handle for this test 471 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: 472 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: 473 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 474 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: 475 case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: 476 return true; 477 default: 478 return false; 479 } 480 } 481 482 /** 483 * Returns true if the specified color format is semi-planar YUV. Throws an exception 484 * if the color format is not recognized (e.g. not YUV). 485 */ isSemiPlanarYUV(int colorFormat)486 private static boolean isSemiPlanarYUV(int colorFormat) { 487 switch (colorFormat) { 488 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: 489 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: 490 return false; 491 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 492 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: 493 case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: 494 return true; 495 default: 496 throw new RuntimeException("unknown format " + colorFormat); 497 } 498 } 499 500 /** 501 * Does the actual work for encoding frames from buffers of byte[]. 502 */ doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, MediaCodec decoder, boolean toSurface)503 private void doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, 504 MediaCodec decoder, boolean toSurface) { 505 final int TIMEOUT_USEC = 10000; 506 ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers(); 507 ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); 508 ByteBuffer[] decoderInputBuffers = null; 509 ByteBuffer[] decoderOutputBuffers = null; 510 MediaCodec.BufferInfo decoderInfo = new MediaCodec.BufferInfo(); 511 MediaCodec.BufferInfo encoderInfo = new MediaCodec.BufferInfo(); 512 MediaFormat decoderOutputFormat = null; 513 int generateIndex = 0; 514 int checkIndex = 0; 515 int badFrames = 0; 516 boolean decoderConfigured = false; 517 OutputSurface outputSurface = null; 518 519 // The size of a frame of video data, in the formats we handle, is stride*sliceHeight 520 // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels. Application 521 // of algebra and assuming that stride==width and sliceHeight==height yields: 522 byte[] frameData = new byte[mWidth * mHeight * 3 / 2]; 523 524 // Just out of curiosity. 525 long rawSize = 0; 526 long encodedSize = 0; 527 528 // Save a copy to disk. Useful for debugging the test. Note this is a raw elementary 529 // stream, not a .mp4 file, so not all players will know what to do with it. 530 FileOutputStream outputStream = null; 531 if (DEBUG_SAVE_FILE) { 532 String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4"; 533 try { 534 outputStream = new FileOutputStream(fileName); 535 Log.d(TAG, "encoded output will be saved as " + fileName); 536 } catch (IOException ioe) { 537 Log.w(TAG, "Unable to create debug output file " + fileName); 538 throw new RuntimeException(ioe); 539 } 540 } 541 542 if (toSurface) { 543 outputSurface = new OutputSurface(mWidth, mHeight); 544 } 545 546 // Loop until the output side is done. 547 boolean inputDone = false; 548 boolean encoderDone = false; 549 boolean outputDone = false; 550 int encoderStatus = -1; 551 while (!outputDone) { 552 if (VERBOSE) Log.d(TAG, "loop"); 553 554 555 // If we're not done submitting frames, generate a new one and submit it. By 556 // doing this on every loop we're working to ensure that the encoder always has 557 // work to do. 558 // 559 // We don't really want a timeout here, but sometimes there's a delay opening 560 // the encoder device, so a short timeout can keep us from spinning hard. 561 if (!inputDone) { 562 int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC); 563 if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex); 564 if (inputBufIndex >= 0) { 565 long ptsUsec = computePresentationTime(generateIndex); 566 if (generateIndex == NUM_FRAMES) { 567 // Send an empty frame with the end-of-stream flag set. If we set EOS 568 // on a frame with data, that frame data will be ignored, and the 569 // output will be short one frame. 570 encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec, 571 MediaCodec.BUFFER_FLAG_END_OF_STREAM); 572 inputDone = true; 573 if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); 574 } else { 575 generateFrame(generateIndex, encoderColorFormat, frameData); 576 577 ByteBuffer inputBuf = encoder.getInputBuffer(inputBufIndex); 578 // the buffer should be sized to hold one full frame 579 assertTrue(inputBuf.capacity() >= frameData.length); 580 inputBuf.clear(); 581 inputBuf.put(frameData); 582 583 encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0); 584 if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc"); 585 } 586 generateIndex++; 587 } else { 588 // either all in use, or we timed out during initial setup 589 if (VERBOSE) Log.d(TAG, "input buffer not available"); 590 } 591 } 592 593 // Check for output from the encoder. If there's no output yet, we either need to 594 // provide more input, or we need to wait for the encoder to work its magic. We 595 // can't actually tell which is the case, so if we can't get an output buffer right 596 // away we loop around and see if it wants more input. 597 // 598 // Once we get EOS from the encoder, we don't need to do this anymore. 599 if (!encoderDone) { 600 MediaCodec.BufferInfo info = encoderInfo; 601 if (encoderStatus < 0) { 602 encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 603 } 604 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 605 // no output available yet 606 if (VERBOSE) Log.d(TAG, "no output from encoder available"); 607 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 608 // not expected for an encoder 609 encoderOutputBuffers = encoder.getOutputBuffers(); 610 if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); 611 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 612 // expected on API 18+ 613 MediaFormat newFormat = encoder.getOutputFormat(); 614 if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); 615 } else if (encoderStatus < 0) { 616 fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); 617 } else { // encoderStatus >= 0 618 ByteBuffer encodedData = encoder.getOutputBuffer(encoderStatus); 619 if (encodedData == null) { 620 fail("encoderOutputBuffer " + encoderStatus + " was null"); 621 } 622 623 // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. 624 encodedData.position(info.offset); 625 encodedData.limit(info.offset + info.size); 626 627 boolean releaseBuffer = false; 628 if (!decoderConfigured) { 629 // Codec config info. Only expected on first packet. One way to 630 // handle this is to manually stuff the data into the MediaFormat 631 // and pass that to configure(). We do that here to exercise the API. 632 // For codecs that don't have codec config data (such as VP8), 633 // initialize the decoder before trying to decode the first packet. 634 assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 || 635 mMimeType.equals(MediaFormat.MIMETYPE_VIDEO_VP8)); 636 MediaFormat format = 637 MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight); 638 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) 639 format.setByteBuffer("csd-0", encodedData); 640 decoder.configure(format, toSurface ? outputSurface.getSurface() : null, 641 null, 0); 642 decoder.start(); 643 decoderInputBuffers = decoder.getInputBuffers(); 644 decoderOutputBuffers = decoder.getOutputBuffers(); 645 decoderConfigured = true; 646 if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)"); 647 } 648 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 649 // Get a decoder input buffer 650 assertTrue(decoderConfigured); 651 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); 652 if (inputBufIndex >= 0) { 653 ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; 654 inputBuf.clear(); 655 inputBuf.put(encodedData); 656 decoder.queueInputBuffer(inputBufIndex, 0, info.size, 657 info.presentationTimeUs, info.flags); 658 659 encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; 660 if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" 661 + (encoderDone ? " (EOS)" : "")); 662 releaseBuffer = true; 663 } 664 } else { 665 releaseBuffer = true; 666 } 667 if (releaseBuffer) { 668 encodedSize += info.size; 669 if (outputStream != null) { 670 byte[] data = new byte[info.size]; 671 encodedData.position(info.offset); 672 encodedData.get(data); 673 try { 674 outputStream.write(data); 675 } catch (IOException ioe) { 676 Log.w(TAG, "failed writing debug data to file"); 677 throw new RuntimeException(ioe); 678 } 679 } 680 encoder.releaseOutputBuffer(encoderStatus, false); 681 encoderStatus = -1; 682 } 683 684 } 685 } 686 687 // Check for output from the decoder. We want to do this on every loop to avoid 688 // the possibility of stalling the pipeline. We use a short timeout to avoid 689 // burning CPU if the decoder is hard at work but the next frame isn't quite ready. 690 // 691 // If we're decoding to a Surface, we'll get notified here as usual but the 692 // ByteBuffer references will be null. The data is sent to Surface instead. 693 if (decoderConfigured) { 694 MediaCodec.BufferInfo info = decoderInfo; 695 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 696 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 697 // no output available yet 698 if (VERBOSE) Log.d(TAG, "no output from decoder available"); 699 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 700 // The storage associated with the direct ByteBuffer may already be unmapped, 701 // so attempting to access data through the old output buffer array could 702 // lead to a native crash. 703 if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); 704 decoderOutputBuffers = decoder.getOutputBuffers(); 705 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 706 // this happens before the first frame is returned 707 decoderOutputFormat = decoder.getOutputFormat(); 708 if (VERBOSE) Log.d(TAG, "decoder output format changed: " + 709 decoderOutputFormat); 710 } else if (decoderStatus < 0) { 711 fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); 712 } else { // decoderStatus >= 0 713 if (!toSurface) { 714 ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus]; 715 Image outputImage = (checkIndex % 2 == 0) ? null : decoder.getOutputImage(decoderStatus); 716 717 outputFrame.position(info.offset); 718 outputFrame.limit(info.offset + info.size); 719 720 rawSize += info.size; 721 if (info.size == 0) { 722 if (VERBOSE) Log.d(TAG, "got empty frame"); 723 } else { 724 if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex); 725 assertEquals("Wrong time stamp", computePresentationTime(checkIndex), 726 info.presentationTimeUs); 727 if (!checkFrame(checkIndex++, decoderOutputFormat, outputFrame, outputImage)) { 728 badFrames++; 729 } 730 } 731 if (outputImage != null) { 732 outputImage.close(); 733 } 734 735 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 736 if (VERBOSE) Log.d(TAG, "output EOS"); 737 outputDone = true; 738 } 739 decoder.releaseOutputBuffer(decoderStatus, false /*render*/); 740 } else { 741 if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + 742 " (size=" + info.size + ")"); 743 rawSize += info.size; 744 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 745 if (VERBOSE) Log.d(TAG, "output EOS"); 746 outputDone = true; 747 } 748 749 boolean doRender = (info.size != 0); 750 751 // As soon as we call releaseOutputBuffer, the buffer will be forwarded 752 // to SurfaceTexture to convert to a texture. The API doesn't guarantee 753 // that the texture will be available before the call returns, so we 754 // need to wait for the onFrameAvailable callback to fire. 755 decoder.releaseOutputBuffer(decoderStatus, doRender); 756 if (doRender) { 757 if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex); 758 assertEquals("Wrong time stamp", computePresentationTime(checkIndex), 759 info.presentationTimeUs); 760 outputSurface.awaitNewImage(); 761 outputSurface.drawImage(); 762 if (!checkSurfaceFrame(checkIndex++)) { 763 badFrames++; 764 } 765 } 766 } 767 } 768 } 769 } 770 771 if (VERBOSE) Log.d(TAG, "decoded " + checkIndex + " frames at " 772 + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize); 773 if (outputStream != null) { 774 try { 775 outputStream.close(); 776 } catch (IOException ioe) { 777 Log.w(TAG, "failed closing debug file"); 778 throw new RuntimeException(ioe); 779 } 780 } 781 782 if (outputSurface != null) { 783 outputSurface.release(); 784 } 785 786 if (checkIndex != NUM_FRAMES) { 787 fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex); 788 } 789 if (badFrames != 0) { 790 fail("Found " + badFrames + " bad frames"); 791 } 792 } 793 794 /** 795 * Does the actual work for encoding and decoding from Surface to Surface. 796 */ doEncodeDecodeVideoFromSurfaceToSurface(MediaCodecWrapper encoder, InputSurfaceInterface inputSurface, MediaCodec decoder, OutputSurface outputSurface)797 private void doEncodeDecodeVideoFromSurfaceToSurface(MediaCodecWrapper encoder, 798 InputSurfaceInterface inputSurface, MediaCodec decoder, 799 OutputSurface outputSurface) { 800 final int TIMEOUT_USEC = 10000; 801 ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); 802 ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); 803 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 804 int generateIndex = 0; 805 int checkIndex = 0; 806 int badFrames = 0; 807 808 // Save a copy to disk. Useful for debugging the test. Note this is a raw elementary 809 // stream, not a .mp4 file, so not all players will know what to do with it. 810 FileOutputStream outputStream = null; 811 if (DEBUG_SAVE_FILE) { 812 String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4"; 813 try { 814 outputStream = new FileOutputStream(fileName); 815 Log.d(TAG, "encoded output will be saved as " + fileName); 816 } catch (IOException ioe) { 817 Log.w(TAG, "Unable to create debug output file " + fileName); 818 throw new RuntimeException(ioe); 819 } 820 } 821 822 // Loop until the output side is done. 823 boolean inputDone = false; 824 boolean encoderDone = false; 825 boolean outputDone = false; 826 while (!outputDone) { 827 if (VERBOSE) Log.d(TAG, "loop"); 828 829 // If we're not done submitting frames, generate a new one and submit it. The 830 // eglSwapBuffers call will block if the input is full. 831 if (!inputDone) { 832 if (generateIndex == NUM_FRAMES) { 833 // Send an empty frame with the end-of-stream flag set. 834 if (VERBOSE) Log.d(TAG, "signaling input EOS"); 835 encoder.signalEndOfInputStream(); 836 inputDone = true; 837 } else { 838 inputSurface.makeCurrent(); 839 generateSurfaceFrame(generateIndex); 840 inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000); 841 if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers"); 842 inputSurface.swapBuffers(); 843 } 844 generateIndex++; 845 } 846 847 // Assume output is available. Loop until both assumptions are false. 848 boolean decoderOutputAvailable = true; 849 boolean encoderOutputAvailable = !encoderDone; 850 while (decoderOutputAvailable || encoderOutputAvailable) { 851 // Start by draining any pending output from the decoder. It's important to 852 // do this before we try to stuff any more data in. 853 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 854 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 855 // no output available yet 856 if (VERBOSE) Log.d(TAG, "no output from decoder available"); 857 decoderOutputAvailable = false; 858 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 859 if (VERBOSE) Log.d(TAG, "decoder output buffers changed (but we don't care)"); 860 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 861 // this happens before the first frame is returned 862 MediaFormat decoderOutputFormat = decoder.getOutputFormat(); 863 if (VERBOSE) Log.d(TAG, "decoder output format changed: " + 864 decoderOutputFormat); 865 } else if (decoderStatus < 0) { 866 fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); 867 } else { // decoderStatus >= 0 868 if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus + 869 " (size=" + info.size + ")"); 870 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 871 if (VERBOSE) Log.d(TAG, "output EOS"); 872 outputDone = true; 873 } 874 875 // The ByteBuffers are null references, but we still get a nonzero size for 876 // the decoded data. 877 boolean doRender = (info.size != 0); 878 879 // As soon as we call releaseOutputBuffer, the buffer will be forwarded 880 // to SurfaceTexture to convert to a texture. The API doesn't guarantee 881 // that the texture will be available before the call returns, so we 882 // need to wait for the onFrameAvailable callback to fire. If we don't 883 // wait, we risk dropping frames. 884 outputSurface.makeCurrent(); 885 decoder.releaseOutputBuffer(decoderStatus, doRender); 886 if (doRender) { 887 assertEquals("Wrong time stamp", computePresentationTime(checkIndex), 888 info.presentationTimeUs); 889 if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex); 890 outputSurface.awaitNewImage(); 891 outputSurface.drawImage(); 892 if (!checkSurfaceFrame(checkIndex++)) { 893 badFrames++; 894 } 895 } 896 } 897 if (decoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) { 898 // Continue attempts to drain output. 899 continue; 900 } 901 902 // Decoder is drained, check to see if we've got a new buffer of output from 903 // the encoder. 904 if (!encoderDone) { 905 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); 906 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 907 // no output available yet 908 if (VERBOSE) Log.d(TAG, "no output from encoder available"); 909 encoderOutputAvailable = false; 910 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 911 // not expected for an encoder 912 encoderOutputBuffers = encoder.getOutputBuffers(); 913 if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); 914 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 915 // expected on API 18+ 916 String newFormat = encoder.getOutputFormatString(); 917 if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); 918 } else if (encoderStatus < 0) { 919 fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); 920 } else { // encoderStatus >= 0 921 ByteBuffer encodedData = encoder.getOutputBuffer(encoderStatus); 922 if (encodedData == null) { 923 fail("encoderOutputBuffer " + encoderStatus + " was null"); 924 } 925 926 // It's usually necessary to adjust the ByteBuffer values to match BufferInfo. 927 encodedData.position(info.offset); 928 encodedData.limit(info.offset + info.size); 929 930 if (outputStream != null) { 931 byte[] data = new byte[info.size]; 932 encodedData.get(data); 933 encodedData.position(info.offset); 934 try { 935 outputStream.write(data); 936 } catch (IOException ioe) { 937 Log.w(TAG, "failed writing debug data to file"); 938 throw new RuntimeException(ioe); 939 } 940 } 941 942 // Get a decoder input buffer, blocking until it's available. We just 943 // drained the decoder output, so we expect there to be a free input 944 // buffer now or in the near future (i.e. this should never deadlock 945 // if the codec is meeting requirements). 946 // 947 // The first buffer of data we get will have the BUFFER_FLAG_CODEC_CONFIG 948 // flag set; the decoder will see this and finish configuring itself. 949 int inputBufIndex = decoder.dequeueInputBuffer(-1); 950 ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; 951 inputBuf.clear(); 952 inputBuf.put(encodedData); 953 decoder.queueInputBuffer(inputBufIndex, 0, info.size, 954 info.presentationTimeUs, info.flags); 955 956 // If everything from the encoder has been passed to the decoder, we 957 // can stop polling the encoder output. (This just an optimization.) 958 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 959 encoderDone = true; 960 encoderOutputAvailable = false; 961 } 962 if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder" 963 + (encoderDone ? " (EOS)" : "")); 964 965 encoder.releaseOutputBuffer(encoderStatus, false); 966 } 967 } 968 } 969 } 970 971 if (outputStream != null) { 972 try { 973 outputStream.close(); 974 } catch (IOException ioe) { 975 Log.w(TAG, "failed closing debug file"); 976 throw new RuntimeException(ioe); 977 } 978 } 979 980 if (checkIndex != NUM_FRAMES) { 981 fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex); 982 } 983 if (badFrames != 0) { 984 fail("Found " + badFrames + " bad frames"); 985 } 986 } 987 988 989 /** 990 * Generates data for frame N into the supplied buffer. We have an 8-frame animation 991 * sequence that wraps around. It looks like this: 992 * <pre> 993 * 0 1 2 3 994 * 7 6 5 4 995 * </pre> 996 * We draw one of the eight rectangles and leave the rest set to the zero-fill color. 997 */ generateFrame(int frameIndex, int colorFormat, byte[] frameData)998 private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) { 999 final int HALF_WIDTH = mWidth / 2; 1000 boolean semiPlanar = isSemiPlanarYUV(colorFormat); 1001 1002 // Set to zero. In YUV this is a dull green. 1003 Arrays.fill(frameData, (byte) 0); 1004 1005 int startX, startY; 1006 1007 frameIndex %= 8; 1008 //frameIndex = (frameIndex / 8) % 8; // use this instead for debug -- easier to see 1009 if (frameIndex < 4) { 1010 startX = frameIndex * (mWidth / 4); 1011 startY = 0; 1012 } else { 1013 startX = (7 - frameIndex) * (mWidth / 4); 1014 startY = mHeight / 2; 1015 } 1016 1017 for (int y = startY + (mHeight/2) - 1; y >= startY; --y) { 1018 for (int x = startX + (mWidth/4) - 1; x >= startX; --x) { 1019 if (semiPlanar) { 1020 // full-size Y, followed by UV pairs at half resolution 1021 // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar 1022 // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E 1023 // OMX_TI_COLOR_FormatYUV420PackedSemiPlanar 1024 frameData[y * mWidth + x] = (byte) TEST_Y; 1025 if ((x & 0x01) == 0 && (y & 0x01) == 0) { 1026 frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U; 1027 frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V; 1028 } 1029 } else { 1030 // full-size Y, followed by quarter-size U and quarter-size V 1031 // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar 1032 // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar 1033 frameData[y * mWidth + x] = (byte) TEST_Y; 1034 if ((x & 0x01) == 0 && (y & 0x01) == 0) { 1035 frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U; 1036 frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) + 1037 (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V; 1038 } 1039 } 1040 } 1041 } 1042 } 1043 1044 /** 1045 * Performs a simple check to see if the frame is more or less right. 1046 * <p> 1047 * See {@link #generateFrame} for a description of the layout. The idea is to sample 1048 * one pixel from the middle of the 8 regions, and verify that the correct one has 1049 * the non-background color. We can't know exactly what the video encoder has done 1050 * with our frames, so we just check to see if it looks like more or less the right thing. 1051 * 1052 * @return true if the frame looks good 1053 */ checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData, Image image)1054 private boolean checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData, Image image) { 1055 // Check for color formats we don't understand. There is no requirement for video 1056 // decoders to use a "mundane" format, so we just give a pass on proprietary formats. 1057 // e.g. Nexus 4 0x7FA30C03 OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka 1058 int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 1059 if (!isRecognizedFormat(colorFormat)) { 1060 Log.d(TAG, "unable to check frame contents for colorFormat=" + 1061 Integer.toHexString(colorFormat)); 1062 return true; 1063 } 1064 1065 boolean frameFailed = false; 1066 boolean semiPlanar = isSemiPlanarYUV(colorFormat); 1067 int width = format.getInteger(MediaFormat.KEY_STRIDE, 1068 format.getInteger(MediaFormat.KEY_WIDTH)); 1069 int height = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT, 1070 format.getInteger(MediaFormat.KEY_HEIGHT)); 1071 int halfWidth = width / 2; 1072 int cropLeft = format.getInteger("crop-left"); 1073 int cropRight = format.getInteger("crop-right"); 1074 int cropTop = format.getInteger("crop-top"); 1075 int cropBottom = format.getInteger("crop-bottom"); 1076 if (image != null) { 1077 cropLeft = image.getCropRect().left; 1078 cropRight = image.getCropRect().right - 1; 1079 cropTop = image.getCropRect().top; 1080 cropBottom = image.getCropRect().bottom - 1; 1081 } 1082 int cropWidth = cropRight - cropLeft + 1; 1083 int cropHeight = cropBottom - cropTop + 1; 1084 1085 assertEquals(mWidth, cropWidth); 1086 assertEquals(mHeight, cropHeight); 1087 1088 for (int i = 0; i < 8; i++) { 1089 int x, y; 1090 if (i < 4) { 1091 x = i * (mWidth / 4) + (mWidth / 8); 1092 y = mHeight / 4; 1093 } else { 1094 x = (7 - i) * (mWidth / 4) + (mWidth / 8); 1095 y = (mHeight * 3) / 4; 1096 } 1097 1098 y += cropTop; 1099 x += cropLeft; 1100 1101 int testY, testU, testV; 1102 if (image != null) { 1103 Image.Plane[] planes = image.getPlanes(); 1104 if (planes.length == 3 && image.getFormat() == ImageFormat.YUV_420_888) { 1105 testY = planes[0].getBuffer().get(y * planes[0].getRowStride() + x * planes[0].getPixelStride()) & 0xff; 1106 testU = planes[1].getBuffer().get((y/2) * planes[1].getRowStride() + (x/2) * planes[1].getPixelStride()) & 0xff; 1107 testV = planes[2].getBuffer().get((y/2) * planes[2].getRowStride() + (x/2) * planes[2].getPixelStride()) & 0xff; 1108 } else { 1109 testY = testU = testV = 0; 1110 } 1111 } else { 1112 int off = frameData.position(); 1113 if (semiPlanar) { 1114 // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar 1115 testY = frameData.get(off + y * width + x) & 0xff; 1116 testU = frameData.get(off + width*height + 2*(y/2) * halfWidth + 2*(x/2)) & 0xff; 1117 testV = frameData.get(off + width*height + 2*(y/2) * halfWidth + 2*(x/2) + 1) & 0xff; 1118 } else { 1119 // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar 1120 testY = frameData.get(off + y * width + x) & 0xff; 1121 testU = frameData.get(off + width*height + (y/2) * halfWidth + (x/2)) & 0xff; 1122 testV = frameData.get(off + width*height + halfWidth * (height / 2) + 1123 (y/2) * halfWidth + (x/2)) & 0xff; 1124 } 1125 } 1126 1127 int expY, expU, expV; 1128 if (i == frameIndex % 8) { 1129 // colored rect 1130 expY = TEST_Y; 1131 expU = TEST_U; 1132 expV = TEST_V; 1133 } else { 1134 // should be our zeroed-out buffer 1135 expY = expU = expV = 0; 1136 } 1137 if (!isColorClose(testY, expY) || 1138 !isColorClose(testU, expU) || 1139 !isColorClose(testV, expV)) { 1140 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": yuv=" + testY + 1141 "," + testU + "," + testV + " vs. expected " + expY + "," + expU + 1142 "," + expV + ")"); 1143 frameFailed = true; 1144 } 1145 } 1146 1147 return !frameFailed; 1148 } 1149 1150 /** 1151 * Generates a frame of data using GL commands. 1152 */ generateSurfaceFrame(int frameIndex)1153 private void generateSurfaceFrame(int frameIndex) { 1154 frameIndex %= 8; 1155 1156 int startX, startY; 1157 if (frameIndex < 4) { 1158 // (0,0) is bottom-left in GL 1159 startX = frameIndex * (mWidth / 4); 1160 startY = mHeight / 2; 1161 } else { 1162 startX = (7 - frameIndex) * (mWidth / 4); 1163 startY = 0; 1164 } 1165 1166 GLES20.glDisable(GLES20.GL_SCISSOR_TEST); 1167 GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f); 1168 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 1169 GLES20.glEnable(GLES20.GL_SCISSOR_TEST); 1170 GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2); 1171 GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f); 1172 GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 1173 } 1174 1175 /** 1176 * Checks the frame for correctness. Similar to {@link #checkFrame}, but uses GL to 1177 * read pixels from the current surface. 1178 * 1179 * @return true if the frame looks good 1180 */ checkSurfaceFrame(int frameIndex)1181 private boolean checkSurfaceFrame(int frameIndex) { 1182 ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this 1183 boolean frameFailed = false; 1184 1185 for (int i = 0; i < 8; i++) { 1186 // Note the coordinates are inverted on the Y-axis in GL. 1187 int x, y; 1188 if (i < 4) { 1189 x = i * (mWidth / 4) + (mWidth / 8); 1190 y = (mHeight * 3) / 4; 1191 } else { 1192 x = (7 - i) * (mWidth / 4) + (mWidth / 8); 1193 y = mHeight / 4; 1194 } 1195 1196 GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf); 1197 int r = pixelBuf.get(0) & 0xff; 1198 int g = pixelBuf.get(1) & 0xff; 1199 int b = pixelBuf.get(2) & 0xff; 1200 //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b); 1201 1202 int expR, expG, expB, expR_bt709, expG_bt709, expB_bt709; 1203 if (i == frameIndex % 8) { 1204 // colored rect 1205 expR = TEST_R1; 1206 expG = TEST_G1; 1207 expB = TEST_B1; 1208 expR_bt709 = TEST_R1_BT709; 1209 expG_bt709 = TEST_G1_BT709; 1210 expB_bt709 = TEST_B1_BT709; 1211 } else { 1212 // zero background color 1213 expR = TEST_R0; 1214 expG = TEST_G0; 1215 expB = TEST_B0; 1216 expR_bt709 = TEST_R0_BT709; 1217 expG_bt709 = TEST_G0_BT709; 1218 expB_bt709 = TEST_B0_BT709; 1219 } 1220 1221 // Some decoders use BT.709 when converting HD (i.e. >= 720p) 1222 // frames from YUV to RGB, so check against both BT.601 and BT.709 1223 if (mAllowBT601 && 1224 isColorClose(r, expR) && 1225 isColorClose(g, expG) && 1226 isColorClose(b, expB)) { 1227 // frame OK on BT.601 1228 mAllowBT709 = false; 1229 } else if (mAllowBT709 && 1230 isColorClose(r, expR_bt709) && 1231 isColorClose(g, expG_bt709) && 1232 isColorClose(b, expB_bt709)) { 1233 // frame OK on BT.709 1234 mAllowBT601 = false; 1235 } else { 1236 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + " @ " + x + " " + y + ": rgb=" + r + 1237 "," + g + "," + b + " vs. expected " + expR + "," + expG + 1238 "," + expB + ")"); 1239 frameFailed = true; 1240 } 1241 } 1242 1243 return !frameFailed; 1244 } 1245 1246 /** 1247 * Returns true if the actual color value is close to the expected color value. Updates 1248 * mLargestColorDelta. 1249 */ isColorClose(int actual, int expected)1250 boolean isColorClose(int actual, int expected) { 1251 final int MAX_DELTA = 8; 1252 int delta = Math.abs(actual - expected); 1253 if (delta > mLargestColorDelta) { 1254 mLargestColorDelta = delta; 1255 } 1256 return (delta <= MAX_DELTA); 1257 } 1258 1259 /** 1260 * Generates the presentation time for frame N, in microseconds. 1261 */ computePresentationTime(int frameIndex)1262 private static long computePresentationTime(int frameIndex) { 1263 return 132 + frameIndex * 1000000 / FRAME_RATE; 1264 } 1265 1266 /** 1267 * Tests streaming of video through the encoder and decoder. Data is encoded from 1268 * a series of byte[] buffers and decoded into ByteBuffers. The output is checked for 1269 * validity. 1270 */ 1271 @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Planar", 1272 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedPlanar", 1273 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420SemiPlanar", 1274 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedSemiPlanar", 1275 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_TI_FormatYUV420PackedSemiPlanar"}) 1276 @Test testEncodeDecodeVideoFromBufferToBuffer()1277 public void testEncodeDecodeVideoFromBufferToBuffer() throws Exception { 1278 encodeDecodeVideoFromBuffer(false); 1279 } 1280 1281 /** 1282 * Tests streaming of video through the encoder and decoder. Data is encoded from 1283 * a series of byte[] buffers and decoded into Surfaces. The output is checked for 1284 * validity. 1285 * <p> 1286 * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this 1287 * test on a thread that doesn't have a Looper configured. If we don't, the test will 1288 * pass, but we won't actually test the output because we'll never receive the "frame 1289 * available" notifications". The CTS test framework seems to be configuring a Looper on 1290 * the test thread, so we have to hand control off to a new thread for the duration of 1291 * the test. 1292 */ 1293 @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Planar", 1294 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedPlanar", 1295 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420SemiPlanar", 1296 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedSemiPlanar", 1297 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_TI_FormatYUV420PackedSemiPlanar", 1298 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface", 1299 "android.opengl.GLES20#glReadPixels", 1300 "android.media.MediaFormat#KEY_COLOR_RANGE", 1301 "android.media.MediaFormat#KEY_COLOR_STANDARD", 1302 "android.media.MediaFormat#KEY_COLOR_TRANSFER"}) 1303 @Test testEncodeDecodeVideoFromBufferToSurface()1304 public void testEncodeDecodeVideoFromBufferToSurface() throws Throwable { 1305 BufferToSurfaceWrapper.runTest(this); 1306 } 1307 1308 /** 1309 * Tests streaming of AVC through the encoder and decoder. Data is provided through 1310 * a Surface and decoded onto a Surface. The output is checked for validity. 1311 */ 1312 @ApiTest(apis = {"android.media.MediaCodec#createInputSurface", 1313 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface", 1314 "android.opengl.GLES20#glReadPixels", 1315 "android.media.MediaFormat#KEY_COLOR_RANGE", 1316 "android.media.MediaFormat#KEY_COLOR_STANDARD", 1317 "android.media.MediaFormat#KEY_COLOR_TRANSFER"}) 1318 @Test testEncodeDecodeVideoFromSurfaceToSurface()1319 public void testEncodeDecodeVideoFromSurfaceToSurface() throws Throwable { 1320 SurfaceToSurfaceWrapper.runTest(this, false, false); 1321 } 1322 @ApiTest(apis = {"AMediaCodec_createInputSurface", 1323 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface", 1324 "android.opengl.GLES20#glReadPixels", 1325 "android.media.MediaFormat#KEY_COLOR_RANGE", 1326 "android.media.MediaFormat#KEY_COLOR_STANDARD", 1327 "android.media.MediaFormat#KEY_COLOR_TRANSFER"}) 1328 @Test testEncodeDecodeVideoFromSurfaceToSurfaceNdk()1329 public void testEncodeDecodeVideoFromSurfaceToSurfaceNdk() throws Throwable { 1330 SurfaceToSurfaceWrapper.runTest(this, false, USE_NDK); 1331 } 1332 1333 /** 1334 * Tests streaming of video through the encoder and decoder. Data is provided through 1335 * a PersistentSurface and decoded onto a Surface. The output is checked for validity. 1336 */ 1337 @ApiTest(apis = {"android.media.MediaCodec#createPersistentInputSurface", 1338 "android.media.MediaCodec#setInputSurface", 1339 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface", 1340 "android.opengl.GLES20#glReadPixels", 1341 "android.media.MediaFormat#KEY_COLOR_RANGE", 1342 "android.media.MediaFormat#KEY_COLOR_STANDARD", 1343 "android.media.MediaFormat#KEY_COLOR_TRANSFER"}) 1344 @Test testEncodeDecodeVideoFromSurfaceToPersistentSurface()1345 public void testEncodeDecodeVideoFromSurfaceToPersistentSurface() throws Throwable { 1346 SurfaceToSurfaceWrapper.runTest(this, true, false); 1347 } 1348 1349 @ApiTest(apis = {"AMediaCodec_createPersistentInputSurface", 1350 "AMediaCodec_setInputSurface", 1351 "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface", 1352 "android.opengl.GLES20#glReadPixels", 1353 "android.media.MediaFormat#KEY_COLOR_RANGE", 1354 "android.media.MediaFormat#KEY_COLOR_STANDARD", 1355 "android.media.MediaFormat#KEY_COLOR_TRANSFER"}) 1356 @Test testEncodeDecodeVideoFromSurfaceToPersistentSurfaceNdk()1357 public void testEncodeDecodeVideoFromSurfaceToPersistentSurfaceNdk() throws Throwable { 1358 SurfaceToSurfaceWrapper.runTest(this, true, USE_NDK); 1359 } 1360 } 1361