1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.video.cts; 18 19 import static org.junit.Assert.assertFalse; 20 import static org.junit.Assert.assertNotNull; 21 import static org.junit.Assert.assertNull; 22 import static org.junit.Assert.assertTrue; 23 import static org.junit.Assert.fail; 24 25 import android.graphics.ImageFormat; 26 import android.graphics.Point; 27 import android.media.Image; 28 import android.media.Image.Plane; 29 import android.media.MediaCodec; 30 import android.media.MediaCodec.BufferInfo; 31 import android.media.MediaCodecInfo; 32 import android.media.MediaCodecInfo.CodecCapabilities; 33 import android.media.MediaFormat; 34 import android.media.cts.CodecImage; 35 import android.media.cts.CodecUtils; 36 import android.media.cts.TestArgs; 37 import android.media.cts.TestUtils; 38 import android.media.cts.YUVImage; 39 import android.os.Build; 40 import android.util.Log; 41 import android.util.Pair; 42 43 import androidx.test.platform.app.InstrumentationRegistry; 44 45 import com.android.compatibility.common.util.ApiTest; 46 import com.android.compatibility.common.util.DeviceReportLog; 47 import com.android.compatibility.common.util.MediaPerfUtils; 48 import com.android.compatibility.common.util.MediaUtils; 49 import com.android.compatibility.common.util.ResultType; 50 import com.android.compatibility.common.util.ResultUnit; 51 import com.android.compatibility.common.util.Stat; 52 53 import org.junit.After; 54 import org.junit.Before; 55 import org.junit.Test; 56 import org.junit.runner.RunWith; 57 import org.junit.runners.Parameterized; 58 59 import java.io.IOException; 60 import java.nio.ByteBuffer; 61 import java.util.ArrayList; 62 import java.util.Arrays; 63 import java.util.Collection; 64 import java.util.LinkedList; 65 import java.util.List; 66 import java.util.Random; 67 68 /** 69 * This tries to test video encoder / decoder performance by running encoding / decoding 70 * without displaying the raw data. To make things simpler, encoder is used to encode synthetic 71 * data and decoder is used to decode the encoded video. This approach does not work where 72 * there is only decoder. Performance index is total time taken for encoding and decoding 73 * the whole frames. 74 * To prevent sacrificing quality for faster encoding / decoding, randomly selected pixels are 75 * compared with the original image. As the pixel comparison can slow down the decoding process, 76 * only some randomly selected pixels are compared. As there can be only one performance index, 77 * error above certain threshold in pixel value will be treated as an error. 78 */ 79 @RunWith(Parameterized.class) 80 public class VideoEncoderDecoderTest { 81 private static final String TAG = "VideoEncoderDecoderTest"; 82 private static final String REPORT_LOG_NAME = "CtsVideoTestCases"; 83 // this wait time affects fps as too big value will work as a blocker if device fps 84 // is not very high. 85 private static final long VIDEO_CODEC_WAIT_TIME_US = 1000; 86 private static final boolean VERBOSE = false; 87 private static final int MAX_FPS = 30; // measure performance at 30fps, this is relevant for 88 // the meaning of bitrate 89 90 private static final String AVC = MediaFormat.MIMETYPE_VIDEO_AVC; 91 private static final String H263 = MediaFormat.MIMETYPE_VIDEO_H263; 92 private static final String HEVC = MediaFormat.MIMETYPE_VIDEO_HEVC; 93 private static final String MPEG2 = MediaFormat.MIMETYPE_VIDEO_MPEG2; 94 private static final String MPEG4 = MediaFormat.MIMETYPE_VIDEO_MPEG4; 95 private static final String VP8 = MediaFormat.MIMETYPE_VIDEO_VP8; 96 private static final String VP9 = MediaFormat.MIMETYPE_VIDEO_VP9; 97 private static final String AV1 = MediaFormat.MIMETYPE_VIDEO_AV1; 98 99 // test results: 100 101 private int mCurrentTestRound = 0; 102 private double[][] mEncoderFrameTimeUsDiff; 103 private double[] mEncoderFpsResults; 104 105 private double[][] mDecoderFrameTimeUsDiff; 106 private double[] mDecoderFpsResults; 107 private double[] mTotalFpsResults; 108 private double[] mDecoderRmsErrorResults; 109 110 // i frame interval for encoder 111 private static final int KEY_I_FRAME_INTERVAL = 5; 112 private static final int MAX_TEST_TIMEOUT_MS = 300000; // 5 minutes 113 114 private static final int Y_CLAMP_MIN = 16; 115 private static final int Y_CLAMP_MAX = 235; 116 private static final int YUV_PLANE_ADDITIONAL_LENGTH = 200; 117 private ByteBuffer mYBuffer, mYDirectBuffer; 118 private ByteBuffer mUVBuffer, mUVDirectBuffer; 119 private int mSrcColorFormat; 120 private int mDstColorFormat; 121 private int mBufferWidth; 122 private int mBufferHeight; 123 private int mVideoWidth; 124 private int mVideoHeight; 125 private int mVideoStride; 126 private int mVideoVStride; 127 private int mFrameRate; 128 129 private MediaFormat mEncConfigFormat; 130 private MediaFormat mEncInputFormat; 131 private MediaFormat mEncOutputFormat; 132 private MediaFormat mDecOutputFormat; 133 134 private LinkedList<Pair<ByteBuffer, BufferInfo>> mEncodedOutputBuffer; 135 // check this many pixels per each decoded frame 136 // checking too many points decreases decoder frame rates a lot. 137 private static final int PIXEL_CHECK_PER_FRAME = 1000; 138 // RMS error in pixel values above this will be treated as error. 139 private static final double PIXEL_RMS_ERROR_MARGIN = 20.0; 140 // offset legitimate timestamps away from 0, so that we'll never confuse them 141 // with a missing or otherwise erroneous timestamp. 142 private static final int TIMESTAMP_OFFSET = 132; 143 private double mRmsErrorMargin; 144 private Random mRandom; 145 146 private boolean mUpdatedSwCodec = false; 147 148 private String mMediaType; 149 private int mWidth; 150 private int mHeight; 151 private String mEncoderName; 152 private int mMaxBFrames; 153 154 private class TestConfig { 155 public boolean mTestPixels = true; 156 public boolean mReportFrameTime = false; 157 public int mTotalFrames = 300; 158 public int mMinNumFrames = 300; 159 public int mMaxTimeMs = 120000; // 2 minutes 160 public int mMinTimeMs = 10000; // 10 seconds 161 public int mNumberOfRepeat = 10; 162 initPerfTest()163 public void initPerfTest() { 164 mTestPixels = false; 165 mTotalFrames = 30000; 166 mMinNumFrames = 3000; 167 mNumberOfRepeat = 2; 168 } 169 } 170 171 private TestConfig mTestConfig; 172 isPreferredAbi()173 private static boolean isPreferredAbi() { 174 boolean prefers64Bit = false; 175 if (Build.SUPPORTED_64_BIT_ABIS.length > 0 && 176 Build.SUPPORTED_ABIS.length > 0 && 177 Build.SUPPORTED_ABIS[0].equals(Build.SUPPORTED_64_BIT_ABIS[0])) { 178 prefers64Bit = true; 179 } 180 return android.os.Process.is64Bit() ? prefers64Bit : !prefers64Bit; 181 } 182 183 @Before setUp()184 public void setUp() throws Exception { 185 mEncodedOutputBuffer = new LinkedList<Pair<ByteBuffer, BufferInfo>>(); 186 mUpdatedSwCodec = 187 !TestUtils.isMainlineModuleFactoryVersion("com.google.android.media.swcodec"); 188 // Use time as a seed, hoping to prevent checking pixels in the same pattern 189 long now = System.currentTimeMillis(); 190 mRandom = new Random(now); 191 mTestConfig = new TestConfig(); 192 } 193 194 @After tearDown()195 public void tearDown() throws Exception { 196 mEncodedOutputBuffer.clear(); 197 mEncodedOutputBuffer = null; 198 mYBuffer = null; 199 mUVBuffer = null; 200 mYDirectBuffer = null; 201 mUVDirectBuffer = null; 202 mRandom = null; 203 mTestConfig = null; 204 } 205 206 /** run performance test. */ perf(String mimeType, int w, int h, String encoder, int maxBFrames)207 private void perf(String mimeType, int w, int h, String encoder, int maxBFrames) 208 throws Exception { 209 doTest(mimeType, w, h, true /* isPerf */, encoder, maxBFrames); 210 } 211 212 /** run quality test. */ qual(String mimeType, int w, int h, String encoder, int maxBFrames)213 private void qual(String mimeType, int w, int h, String encoder, int maxBFrames) 214 throws Exception { 215 qual(mimeType, w, h, encoder, maxBFrames, PIXEL_RMS_ERROR_MARGIN); 216 } 217 218 /** run quality test with configurable error. */ qual(String mimeType, int w, int h, String encoder, int maxBFrames, double margin)219 private void qual(String mimeType, int w, int h, String encoder, int maxBFrames, double margin) 220 throws Exception { 221 mRmsErrorMargin = margin; 222 doTest(mimeType, w, h, false /* isPerf */, encoder, maxBFrames); 223 } 224 prepareParamsList(List<Object[]> testParams, String mediaType, int[] widths, int[] heights)225 static void prepareParamsList(List<Object[]> testParams, String mediaType, int[] widths, 226 int[] heights) { 227 String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType); 228 int[] maxBFrames = {0, 2}; 229 for (int i = 0; i < widths.length; i++) { 230 MediaFormat format = 231 MediaFormat.createVideoFormat(mediaType, widths[i], heights[i]); 232 for (String encoder : encoderNames) { 233 if (TestArgs.shouldSkipCodec(encoder)) { 234 continue; 235 } 236 if (MediaUtils.supports(encoder, format)) { 237 for (int maxBFrame : maxBFrames) { 238 if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC) 239 && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC) 240 && maxBFrame != 0) { 241 continue; 242 } 243 testParams.add( 244 new Object[]{mediaType, widths[i], heights[i], encoder, maxBFrame}); 245 } 246 } 247 } 248 } 249 } 250 251 @Parameterized.Parameters(name = "{0}_{3}_{1}x{2}_{4}") input()252 public static Collection<Object[]> input() throws IOException { 253 final List<Object[]> testParams = new ArrayList<>(); 254 final String[] mediaTypes = {AVC, HEVC, MPEG2, MPEG4, VP8, VP9, H263, AV1}; 255 for (String mediaType : mediaTypes) { 256 if (mediaType.equals(AVC)) { 257 int[] widths = {320, 720, 1280, 1920}; 258 int[] heights = {240, 480, 720, 1080}; 259 prepareParamsList(testParams, mediaType, widths, heights); 260 } else if (mediaType.equals(H263)) { 261 int[] widths = {176, 352, 704, 1408}; 262 int[] heights = {144, 288, 576, 1152}; 263 prepareParamsList(testParams, mediaType, widths, heights); 264 } else if (mediaType.equals(HEVC)) { 265 int[] widths = {320, 720, 1280, 1920, 3840}; 266 int[] heights = {240, 480, 720, 1080, 2160}; 267 prepareParamsList(testParams, mediaType, widths, heights); 268 } else if (mediaType.equals(MPEG2)) { 269 int[] widths = {176, 352, 640, 1280, 1920}; 270 int[] heights = {144, 288, 480, 720, 1080}; 271 prepareParamsList(testParams, mediaType, widths, heights); 272 } else if (mediaType.equals(MPEG4)) { 273 int[] widths = {176, 352, 640, 1280}; 274 int[] heights = {144, 288, 480, 720}; 275 prepareParamsList(testParams, mediaType, widths, heights); 276 } else if (mediaType.equals(VP8)) { 277 int[] widths = {320, 640, 1280, 1920}; 278 int[] heights = {180, 360, 720, 1080}; 279 prepareParamsList(testParams, mediaType, widths, heights); 280 } else if (mediaType.equals(VP9)) { 281 int[] widths = {320, 640, 1280, 1920, 3840}; 282 int[] heights = {180, 360, 720, 1080, 2160}; 283 prepareParamsList(testParams, mediaType, widths, heights); 284 } else if (mediaType.equals(AV1)) { 285 int[] widths = {320, 720, 1280, 1920}; 286 int[] heights = {240, 480, 720, 1080}; 287 prepareParamsList(testParams, mediaType, widths, heights); 288 } 289 } 290 return testParams; 291 } 292 VideoEncoderDecoderTest(String mediaType, int width, int height, String encoderName, int maxBFrames)293 public VideoEncoderDecoderTest(String mediaType, int width, int height, 294 String encoderName, int maxBFrames) { 295 this.mMediaType = mediaType; 296 this.mWidth = width; 297 this.mHeight = height; 298 this.mEncoderName = encoderName; 299 this.mMaxBFrames = maxBFrames; 300 } 301 302 @ApiTest(apis = {"VideoCapabilities#getSupportedWidths", 303 "VideoCapabilities#getSupportedHeightsFor", 304 "VideoCapabilities#getSupportedFrameRatesFor", 305 "VideoCapabilities#getBitrateRange", 306 "VideoCapabilities#getAchievableFrameRatesFor", 307 "CodecCapabilities#COLOR_FormatYUV420SemiPlanar", 308 "CodecCapabilities#COLOR_FormatYUV420Planar", 309 "CodecCapabilities#COLOR_FormatYUV420Flexible", 310 "android.media.MediaFormat#KEY_MAX_B_FRAMES"}) 311 @Test testQual()312 public void testQual() throws Exception { 313 if (mMediaType == H263 && (mWidth == 704 || mWidth == 1408)) { 314 qual(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames, 25); 315 } else { 316 qual(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames); 317 } 318 } 319 320 @ApiTest(apis = {"VideoCapabilities#getSupportedWidths", 321 "VideoCapabilities#getSupportedHeightsFor", 322 "VideoCapabilities#getSupportedFrameRatesFor", 323 "VideoCapabilities#getBitrateRange", 324 "VideoCapabilities#getAchievableFrameRatesFor", 325 "CodecCapabilities#COLOR_FormatYUV420SemiPlanar", 326 "CodecCapabilities#COLOR_FormatYUV420Planar", 327 "CodecCapabilities#COLOR_FormatYUV420Flexible", 328 "android.media.MediaFormat#KEY_MAX_B_FRAMES"}) 329 @Test testPerf()330 public void testPerf() throws Exception { 331 perf(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames); 332 } 333 isSrcSemiPlanar()334 private boolean isSrcSemiPlanar() { 335 return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar; 336 } 337 isSrcFlexYUV()338 private boolean isSrcFlexYUV() { 339 return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible; 340 } 341 isDstSemiPlanar()342 private boolean isDstSemiPlanar() { 343 return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar; 344 } 345 isDstFlexYUV()346 private boolean isDstFlexYUV() { 347 return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible; 348 } 349 getColorFormat(CodecInfo info)350 private static int getColorFormat(CodecInfo info) { 351 if (info.mSupportSemiPlanar) { 352 return CodecCapabilities.COLOR_FormatYUV420SemiPlanar; 353 } else if (info.mSupportPlanar) { 354 return CodecCapabilities.COLOR_FormatYUV420Planar; 355 } else { 356 // FlexYUV must be supported 357 return CodecCapabilities.COLOR_FormatYUV420Flexible; 358 } 359 } 360 361 private static class RunResult { 362 public final int mNumFrames; 363 public final double mDurationMs; 364 public final double mRmsError; 365 RunResult()366 RunResult() { 367 mNumFrames = 0; 368 mDurationMs = Double.NaN; 369 mRmsError = Double.NaN; 370 } 371 RunResult(int numFrames, double durationMs)372 RunResult(int numFrames, double durationMs) { 373 mNumFrames = numFrames; 374 mDurationMs = durationMs; 375 mRmsError = Double.NaN; 376 } 377 RunResult(int numFrames, double durationMs, double rmsError)378 RunResult(int numFrames, double durationMs, double rmsError) { 379 mNumFrames = numFrames; 380 mDurationMs = durationMs; 381 mRmsError = rmsError; 382 } 383 } 384 doTest(String mimeType, int w, int h, boolean isPerf, String encoderName, int maxBFrames)385 private void doTest(String mimeType, int w, int h, boolean isPerf, String encoderName, 386 int maxBFrames) throws Exception { 387 if (TestArgs.shouldSkipMediaType(mimeType)) { 388 return; 389 } 390 MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h); 391 392 if (isPerf) { 393 mTestConfig.initPerfTest(); 394 } 395 396 if (TestArgs.shouldSkipCodec(encoderName)) { 397 return; 398 } 399 CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(encoderName, mimeType, w, h, MAX_FPS); 400 assertNotNull(infoEnc); 401 402 // Skip decoding pass for performance tests as bitstream complexity is not representative 403 String[] decoderNames = null; // no decoding pass required by default 404 int codingPasses = 1; // used for time limit. 1 for encoding pass 405 int numRuns = mTestConfig.mNumberOfRepeat; // used for result array sizing 406 if (!isPerf) { 407 // consider all decoders for quality tests 408 decoderNames = MediaUtils.getDecoderNames(format); 409 if (decoderNames.length == 0) { 410 MediaUtils.skipTest("No decoders for " + format); 411 return; 412 } 413 numRuns *= decoderNames.length; // combine each decoder with the encoder 414 codingPasses += decoderNames.length; 415 } 416 417 // be a bit conservative 418 mTestConfig.mMaxTimeMs = Math.min( 419 mTestConfig.mMaxTimeMs, MAX_TEST_TIMEOUT_MS / 5 * 4 / codingPasses 420 / mTestConfig.mNumberOfRepeat); 421 // reduce test-run on non-real devices 422 if (MediaUtils.onFrankenDevice()) { 423 mTestConfig.mMaxTimeMs /= 10; 424 } 425 Log.i(TAG, "current ABI is " + (isPreferredAbi() ? "" : "not ") + "a preferred one"); 426 427 mVideoWidth = w; 428 mVideoHeight = h; 429 mSrcColorFormat = getColorFormat(infoEnc); 430 Log.i(TAG, "Testing video resolution " + w + "x" + h + ": enc format " + mSrcColorFormat); 431 432 initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH); 433 434 // Adjust total number of frames to prevent OOM. 435 Runtime rt = Runtime.getRuntime(); 436 long usedMemory = rt.totalMemory() - rt.freeMemory(); 437 mTestConfig.mTotalFrames = Math.min(mTestConfig.mTotalFrames, 438 (int) (rt.maxMemory() - usedMemory) / 4 * 3 / 439 (infoEnc.mBitRate / 8 / infoEnc.mFps + 1)); 440 Log.i(TAG, "Total testing frames " + mTestConfig.mTotalFrames); 441 442 mEncoderFrameTimeUsDiff = new double[numRuns][mTestConfig.mTotalFrames - 1]; 443 mEncoderFpsResults = new double[numRuns]; 444 445 if (decoderNames != null) { 446 mDecoderFrameTimeUsDiff = new double[numRuns][mTestConfig.mTotalFrames - 1]; 447 mDecoderFpsResults = new double[numRuns]; 448 mTotalFpsResults = new double[numRuns]; 449 mDecoderRmsErrorResults = new double[numRuns]; 450 } 451 452 boolean success = true; 453 int runIx = 0; 454 for (int i = 0; i < mTestConfig.mNumberOfRepeat && success; i++) { 455 mCurrentTestRound = runIx; 456 format = new MediaFormat(); 457 format.setString(MediaFormat.KEY_MIME, mimeType); 458 format.setInteger(MediaFormat.KEY_BIT_RATE, infoEnc.mBitRate); 459 format.setInteger(MediaFormat.KEY_BITRATE_MODE, 460 MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR); 461 format.setInteger(MediaFormat.KEY_WIDTH, w); 462 format.setInteger(MediaFormat.KEY_HEIGHT, h); 463 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mSrcColorFormat); 464 format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps); 465 mFrameRate = infoEnc.mFps; 466 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL); 467 format.setInteger(MediaFormat.KEY_MAX_B_FRAMES, maxBFrames); 468 469 RunResult encodingResult = 470 runEncoder(encoderName, format, mTestConfig.mTotalFrames, i); 471 double encodingTime = encodingResult.mDurationMs; 472 int framesEncoded = encodingResult.mNumFrames; 473 474 if (decoderNames != null && decoderNames.length > 0) { 475 for (String decoderName : decoderNames) { 476 if (TestArgs.shouldSkipCodec(decoderName)) { 477 continue; 478 } 479 CodecInfo infoDec = 480 CodecInfo.getSupportedFormatInfo(decoderName, mimeType, w, h, MAX_FPS); 481 assertNotNull(infoDec); 482 mDstColorFormat = getColorFormat(infoDec); 483 484 // re-initialize format for decoder 485 format = new MediaFormat(); 486 format.setString(MediaFormat.KEY_MIME, mimeType); 487 format.setInteger(MediaFormat.KEY_WIDTH, w); 488 format.setInteger(MediaFormat.KEY_HEIGHT, h); 489 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDstColorFormat); 490 RunResult decoderResult = runDecoder(decoderName, format, i); 491 if (decoderResult == null) { 492 success = false; 493 } else { 494 double decodingTime = decoderResult.mDurationMs; 495 mDecoderRmsErrorResults[runIx] = decoderResult.mRmsError; 496 mEncoderFpsResults[runIx] = framesEncoded / encodingTime; 497 int framesDecoded = decoderResult.mNumFrames; 498 mDecoderFpsResults[runIx] = framesDecoded / decodingTime; 499 if (framesDecoded == framesEncoded) { 500 mTotalFpsResults[runIx] = 501 framesEncoded / (encodingTime + decodingTime); 502 } 503 } 504 ++runIx; 505 } 506 } else { 507 mEncoderFpsResults[runIx] = mTestConfig.mTotalFrames / encodingTime; 508 ++runIx; 509 } 510 511 // clear things for re-start 512 mEncodedOutputBuffer.clear(); 513 // it will be good to clean everything to make every run the same. 514 System.gc(); 515 } 516 517 // log results before verification 518 double[] measuredFps = new double[numRuns]; 519 if (isPerf) { 520 for (int i = 0; i < numRuns; i++) { 521 measuredFps[i] = logPerformanceResults(encoderName, i); 522 } 523 } 524 if (mTestConfig.mTestPixels && decoderNames != null) { 525 logQualityResults(mimeType, encoderName, decoderNames); 526 for (int i = 0; i < numRuns; i++) { 527 // make sure that rms error is not too big for all runs 528 if (mDecoderRmsErrorResults[i] >= mRmsErrorMargin) { 529 fail("rms error is bigger than the limit " 530 + Arrays.toString(mDecoderRmsErrorResults) + " vs " + mRmsErrorMargin); 531 } 532 } 533 } 534 535 if (isPerf) { 536 // allow improvements in mainline-updated google-supplied software codecs. 537 boolean fasterIsOk = mUpdatedSwCodec & encoderName.startsWith("c2.android."); 538 String error = MediaPerfUtils.verifyAchievableFrameRates( 539 encoderName, mimeType, w, h, fasterIsOk, maxBFrames > 0, measuredFps); 540 // Performance numbers only make sense on real devices, so skip on non-real devices 541 // 542 // Also ignore verification on non-preferred ABIs due to the possibility of 543 // this being emulated. On some CPU-s 32-bit mode is emulated using big cores 544 // that results in the SW codecs also running much faster (perhaps they are 545 // scheduled for the big cores as well) 546 // TODO: still verify lower bound. 547 if ((MediaUtils.onFrankenDevice() || (infoEnc.mIsSoftware && !isPreferredAbi())) 548 && error != null) { 549 // ensure there is data, but don't insist that it is correct 550 assertFalse(error, error.startsWith("Failed to get ")); 551 } else { 552 assertNull(error, error); 553 } 554 } 555 assertTrue(success); 556 } 557 logQualityResults(String mimeType, String encoderName, String[] decoderNames)558 private void logQualityResults(String mimeType, String encoderName, String[] decoderNames) { 559 String streamName = "video_encoder_decoder_quality"; 560 DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, streamName); 561 log.addValue("encoder_name", encoderName, ResultType.NEUTRAL, ResultUnit.NONE); 562 log.addValues("decoder_names", Arrays.asList(decoderNames), ResultType.NEUTRAL, ResultUnit.NONE); 563 log.addValue("mime_type", mimeType, ResultType.NEUTRAL, ResultUnit.NONE); 564 log.addValue("width", mVideoWidth, ResultType.NEUTRAL, ResultUnit.NONE); 565 log.addValue("height", mVideoHeight, ResultType.NEUTRAL, ResultUnit.NONE); 566 log.addValues("encoder_fps", mEncoderFpsResults, ResultType.HIGHER_BETTER, 567 ResultUnit.FPS); 568 log.addValues("rms_error", mDecoderRmsErrorResults, ResultType.LOWER_BETTER, 569 ResultUnit.NONE); 570 log.addValues("decoder_fps", mDecoderFpsResults, ResultType.HIGHER_BETTER, 571 ResultUnit.FPS); 572 log.addValues("encoder_decoder_fps", mTotalFpsResults, ResultType.HIGHER_BETTER, 573 ResultUnit.FPS); 574 log.addValue("encoder_average_fps", Stat.getAverage(mEncoderFpsResults), 575 ResultType.HIGHER_BETTER, ResultUnit.FPS); 576 log.addValue("decoder_average_fps", Stat.getAverage(mDecoderFpsResults), 577 ResultType.HIGHER_BETTER, ResultUnit.FPS); 578 log.setSummary("encoder_decoder_average_fps", Stat.getAverage(mTotalFpsResults), 579 ResultType.HIGHER_BETTER, ResultUnit.FPS); 580 log.submit(InstrumentationRegistry.getInstrumentation()); 581 } 582 logPerformanceResults(String encoderName, int round)583 private double logPerformanceResults(String encoderName, int round) { 584 String streamName = "video_encoder_performance"; 585 DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, streamName); 586 String message = MediaPerfUtils.addPerformanceHeadersToLog( 587 log, "encoder stats:", round, encoderName, 588 mEncConfigFormat, mEncInputFormat, mEncOutputFormat); 589 double[] frameTimeUsDiff = mEncoderFrameTimeUsDiff[round]; 590 double fps = MediaPerfUtils.addPerformanceStatsToLog( 591 log, new MediaUtils.Stats(frameTimeUsDiff), message); 592 593 if (mTestConfig.mReportFrameTime) { 594 double[] msDiff = new double[frameTimeUsDiff.length]; 595 double nowUs = 0, lastMs = 0; 596 for (int i = 0; i < frameTimeUsDiff.length; ++i) { 597 nowUs += frameTimeUsDiff[i]; 598 double nowMs = Math.round(nowUs) / 1000.; 599 msDiff[i] = Math.round((nowMs - lastMs) * 1000) / 1000.; 600 lastMs = nowMs; 601 } 602 log.addValues("encoder_raw_diff", msDiff, ResultType.NEUTRAL, ResultUnit.MS); 603 } 604 605 log.submit(InstrumentationRegistry.getInstrumentation()); 606 return fps; 607 } 608 609 /** 610 * run encoder benchmarking 611 * @param encoderName encoder name 612 * @param format format of media to encode 613 * @param totalFrames total number of frames to encode 614 * @return time taken in ms to encode the frames. This does not include initialization time. 615 */ runEncoder( String encoderName, MediaFormat format, int totalFrames, int runId)616 private RunResult runEncoder( 617 String encoderName, MediaFormat format, int totalFrames, int runId) { 618 MediaCodec codec = null; 619 try { 620 codec = MediaCodec.createByCodecName(encoderName); 621 mEncConfigFormat = format; 622 codec.configure( 623 format, 624 null /* surface */, 625 null /* crypto */, 626 MediaCodec.CONFIGURE_FLAG_ENCODE); 627 } catch (IllegalStateException e) { 628 Log.e(TAG, "codec '" + encoderName + "' failed configuration."); 629 codec.release(); 630 assertTrue("codec '" + encoderName + "' failed configuration.", false); 631 } catch (IOException | NullPointerException e) { 632 Log.i(TAG, "could not find codec for " + format); 633 return new RunResult(); 634 } 635 codec.start(); 636 mEncInputFormat = codec.getInputFormat(); 637 ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); 638 MediaFormat inputFormat = codec.getInputFormat(); 639 mVideoStride = inputFormat.containsKey(MediaFormat.KEY_STRIDE) 640 ? inputFormat.getInteger(MediaFormat.KEY_STRIDE) 641 : inputFormat.getInteger(MediaFormat.KEY_WIDTH); 642 mVideoVStride = inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT) 643 ? inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT) 644 : inputFormat.getInteger(MediaFormat.KEY_HEIGHT); 645 646 int numBytesSubmitted = 0; 647 int numBytesDequeued = 0; 648 int inFramesCount = 0; 649 int outFramesCount = 0; 650 long lastOutputTimeUs = 0; 651 long start = System.currentTimeMillis(); 652 while (true) { 653 int index; 654 655 if (inFramesCount < totalFrames) { 656 index = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */); 657 if (index != MediaCodec.INFO_TRY_AGAIN_LATER) { 658 int size; 659 long elapsedMs = System.currentTimeMillis() - start; 660 boolean eos = (inFramesCount == totalFrames - 1 661 || elapsedMs > mTestConfig.mMaxTimeMs 662 || (elapsedMs > mTestConfig.mMinTimeMs 663 && inFramesCount > mTestConfig.mMinNumFrames)); 664 665 // when encoder only supports flexYUV, use Image only; otherwise, 666 // use ByteBuffer & Image each on half of the frames to test both 667 if (isSrcFlexYUV() || inFramesCount % 2 == 0) { 668 Image image = codec.getInputImage(index); 669 // image should always be available 670 assertTrue(image != null); 671 size = queueInputImageEncoder( 672 codec, image, index, inFramesCount, 673 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0, runId); 674 } else { 675 ByteBuffer buffer = codec.getInputBuffer(index); 676 size = queueInputBufferEncoder( 677 codec, buffer, index, inFramesCount, 678 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0, runId); 679 } 680 inFramesCount++; 681 numBytesSubmitted += size; 682 if (VERBOSE) { 683 Log.d(TAG, "queued " + size + " bytes of input data, frame " + 684 (inFramesCount - 1)); 685 } 686 } 687 } 688 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 689 index = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */); 690 if (index == MediaCodec.INFO_TRY_AGAIN_LATER) { 691 } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 692 mEncOutputFormat = codec.getOutputFormat(); 693 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 694 codecOutputBuffers = codec.getOutputBuffers(); 695 } else if (index >= 0) { 696 long nowUs = (System.nanoTime() + 500) / 1000; 697 dequeueOutputBufferEncoder(codec, codecOutputBuffers, index, info); 698 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { 699 int pos = outFramesCount - 1; 700 if (pos >= 0 && pos < mEncoderFrameTimeUsDiff[mCurrentTestRound].length) { 701 mEncoderFrameTimeUsDiff[mCurrentTestRound][pos] = nowUs - lastOutputTimeUs; 702 } 703 lastOutputTimeUs = nowUs; 704 705 numBytesDequeued += info.size; 706 ++outFramesCount; 707 } 708 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 709 if (VERBOSE) { 710 Log.d(TAG, "dequeued output EOS."); 711 } 712 break; 713 } 714 if (VERBOSE) { 715 Log.d(TAG, "dequeued " + info.size + " bytes of output data."); 716 } 717 } 718 } 719 long finish = System.currentTimeMillis(); 720 int validDataNum = Math.min(mEncodedOutputBuffer.size() - 1, 721 mEncoderFrameTimeUsDiff[mCurrentTestRound].length); 722 mEncoderFrameTimeUsDiff[mCurrentTestRound] = 723 Arrays.copyOf(mEncoderFrameTimeUsDiff[mCurrentTestRound], validDataNum); 724 if (VERBOSE) { 725 Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, " 726 + "dequeued " + numBytesDequeued + " bytes."); 727 } 728 codec.stop(); 729 codec.release(); 730 codec = null; 731 732 mEncOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE, 733 format.getInteger(MediaFormat.KEY_BIT_RATE)); 734 mEncOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 735 format.getInteger(MediaFormat.KEY_FRAME_RATE)); 736 if (outFramesCount > 0) { 737 mEncOutputFormat.setInteger( 738 "actual-bitrate", 739 (int)(numBytesDequeued * 8. * format.getInteger(MediaFormat.KEY_FRAME_RATE) 740 / outFramesCount)); 741 } 742 return new RunResult(outFramesCount, (finish - start) / 1000.); 743 } 744 745 /** 746 * Fills input buffer for encoder from YUV buffers. 747 * @return size of enqueued data. 748 */ queueInputBufferEncoder( MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags, int runId)749 private int queueInputBufferEncoder( 750 MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags, int runId) { 751 buffer.clear(); 752 753 Point origin = getOrigin(frameCount, runId); 754 // Y color first 755 int srcOffsetY = origin.x + origin.y * mBufferWidth; 756 final byte[] yBuffer = mYBuffer.array(); 757 for (int i = 0; i < mVideoHeight; i++) { 758 buffer.position(i * mVideoStride); 759 buffer.put(yBuffer, srcOffsetY, mVideoWidth); 760 srcOffsetY += mBufferWidth; 761 } 762 if (isSrcSemiPlanar()) { 763 int srcOffsetU = origin.y / 2 * mBufferWidth + origin.x / 2 * 2; 764 final byte[] uvBuffer = mUVBuffer.array(); 765 for (int i = 0; i < mVideoHeight / 2; i++) { 766 buffer.position(mVideoVStride * mVideoStride + i * mVideoStride); 767 buffer.put(uvBuffer, srcOffsetU, mVideoWidth); 768 srcOffsetU += mBufferWidth; 769 } 770 } else { 771 int srcOffsetU = origin.y / 2 * mBufferWidth / 2 + origin.x / 2; 772 int srcOffsetV = srcOffsetU + mBufferWidth / 2 * mBufferHeight / 2; 773 final byte[] uvBuffer = mUVBuffer.array(); 774 for (int i = 0; i < mVideoHeight / 2; i++) { //U only 775 buffer.position(mVideoVStride * mVideoStride + i * mVideoStride / 2); 776 buffer.put(uvBuffer, srcOffsetU, mVideoWidth / 2); 777 srcOffsetU += mBufferWidth / 2; 778 } 779 for (int i = 0; i < mVideoHeight / 2; i++) { //V only 780 buffer.position(mVideoVStride * mVideoStride * 5 / 4 + i * mVideoStride / 2); 781 buffer.put(uvBuffer, srcOffsetV, mVideoWidth / 2); 782 srcOffsetV += mBufferWidth / 2; 783 } 784 } 785 // submit till end of the data 786 int size = buffer.position(); 787 long ptsUsec = computePresentationTime(frameCount); 788 789 codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags); 790 if (VERBOSE && (frameCount == 0)) { 791 printByteArray("Y ", mYBuffer.array(), 0, 20); 792 printByteArray("UV ", mUVBuffer.array(), 0, 20); 793 printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20); 794 } 795 return size; 796 } 797 798 /** 799 * Fills input image for encoder from YUV buffers. 800 * @return size of enqueued data. 801 */ queueInputImageEncoder( MediaCodec codec, Image image, int index, int frameCount, int flags, int runId)802 private int queueInputImageEncoder( 803 MediaCodec codec, Image image, int index, int frameCount, int flags, int runId) { 804 assertTrue(image.getFormat() == ImageFormat.YUV_420_888); 805 806 807 Point origin = getOrigin(frameCount, runId); 808 809 // Y color first 810 CodecImage srcImage = new YUVImage( 811 origin, 812 mVideoWidth, mVideoHeight, 813 mBufferWidth, mBufferHeight, 814 isSrcSemiPlanar(), 815 mYDirectBuffer, mUVDirectBuffer); 816 817 CodecUtils.copyFlexYUVImage(image, srcImage); 818 819 int size = mVideoHeight * mVideoWidth * 3 / 2; 820 long ptsUsec = computePresentationTime(frameCount); 821 822 codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags); 823 if (VERBOSE && (frameCount == 0)) { 824 printByteArray("Y ", mYBuffer.array(), 0, 20); 825 printByteArray("UV ", mUVBuffer.array(), 0, 20); 826 printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20); 827 } 828 return size; 829 } 830 831 /** 832 * Dequeue encoded data from output buffer and store for later usage. 833 */ dequeueOutputBufferEncoder( MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info)834 private void dequeueOutputBufferEncoder( 835 MediaCodec codec, ByteBuffer[] outputBuffers, 836 int index, MediaCodec.BufferInfo info) { 837 ByteBuffer output = outputBuffers[index]; 838 int l = info.size; 839 ByteBuffer copied = ByteBuffer.allocate(l); 840 output.get(copied.array(), 0, l); 841 BufferInfo savedInfo = new BufferInfo(); 842 savedInfo.set(0, l, info.presentationTimeUs, info.flags); 843 mEncodedOutputBuffer.addLast(Pair.create(copied, savedInfo)); 844 codec.releaseOutputBuffer(index, false /* render */); 845 } 846 847 /** 848 * run decoder benchmarking with encoded stream stored from encoding phase 849 * @param decoderName decoder name 850 * @param format format of media to decode 851 * @return returns length-2 array with 0: time for decoding, 1 : rms error of pixels 852 */ runDecoder(String decoderName, MediaFormat format, int runId)853 private RunResult runDecoder(String decoderName, MediaFormat format, int runId) { 854 MediaCodec codec = null; 855 try { 856 codec = MediaCodec.createByCodecName(decoderName); 857 } catch (IOException | NullPointerException e) { 858 Log.i(TAG, "could not find decoder for " + format); 859 return null; 860 } 861 codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); 862 codec.start(); 863 ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); 864 865 double totalErrorSquared = 0; 866 867 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 868 boolean sawOutputEOS = false; 869 int inputLeft = mEncodedOutputBuffer.size(); 870 int inputBufferCount = 0; 871 int outFrameCount = 0; 872 YUVValue expected = new YUVValue(); 873 YUVValue decoded = new YUVValue(); 874 long lastOutputTimeUs = 0; 875 long start = System.currentTimeMillis(); 876 while (!sawOutputEOS) { 877 if (inputLeft > 0) { 878 int inputBufIndex = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US); 879 880 if (inputBufIndex >= 0) { 881 ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; 882 dstBuf.clear(); 883 ByteBuffer src = mEncodedOutputBuffer.get(inputBufferCount).first; 884 BufferInfo srcInfo = mEncodedOutputBuffer.get(inputBufferCount).second; 885 int writeSize = src.capacity(); 886 dstBuf.put(src.array(), 0, writeSize); 887 888 int flags = srcInfo.flags; 889 if ((System.currentTimeMillis() - start) > mTestConfig.mMaxTimeMs) { 890 flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM; 891 } 892 893 codec.queueInputBuffer( 894 inputBufIndex, 895 0 /* offset */, 896 writeSize, 897 srcInfo.presentationTimeUs, 898 flags); 899 inputLeft --; 900 inputBufferCount ++; 901 } 902 } 903 904 int res = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US); 905 if (res >= 0) { 906 int outputBufIndex = res; 907 908 // only do YUV compare on EOS frame if the buffer size is none-zero 909 if (info.size > 0) { 910 long nowUs = (System.nanoTime() + 500) / 1000; 911 int pos = outFrameCount - 1; 912 if (pos >= 0 && pos < mDecoderFrameTimeUsDiff[mCurrentTestRound].length) { 913 mDecoderFrameTimeUsDiff[mCurrentTestRound][pos] = nowUs - lastOutputTimeUs; 914 } 915 lastOutputTimeUs = nowUs; 916 917 if (mTestConfig.mTestPixels) { 918 Point origin = getOrigin(computeFrameIndex(info.presentationTimeUs), runId); 919 int i; 920 921 // if decoder supports planar or semiplanar, check output with 922 // ByteBuffer & Image each on half of the points 923 int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME; 924 if (!isDstFlexYUV()) { 925 pixelCheckPerFrame /= 2; 926 ByteBuffer buf = codec.getOutputBuffer(outputBufIndex); 927 if (VERBOSE && (outFrameCount == 0)) { 928 printByteBuffer("Y ", buf, 0, 20); 929 printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20); 930 printByteBuffer("UV ", buf, 931 mVideoWidth * mVideoHeight + mVideoWidth * 60, 20); 932 } 933 for (i = 0; i < pixelCheckPerFrame; i++) { 934 int w = mRandom.nextInt(mVideoWidth); 935 int h = mRandom.nextInt(mVideoHeight); 936 getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected); 937 getPixelValuesFromOutputBuffer(buf, w, h, decoded); 938 if (VERBOSE) { 939 Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:" 940 + " expected " 941 + expected.mY + "," + expected.mU + "," + expected.mV 942 + " decoded " 943 + decoded.mY + "," + decoded.mU + "," + decoded.mV); 944 } 945 totalErrorSquared += expected.calcErrorSquared(decoded); 946 } 947 } 948 949 Image image = codec.getOutputImage(outputBufIndex); 950 assertTrue(image != null); 951 for (i = 0; i < pixelCheckPerFrame; i++) { 952 int w = mRandom.nextInt(mVideoWidth); 953 int h = mRandom.nextInt(mVideoHeight); 954 getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected); 955 getPixelValuesFromImage(image, w, h, decoded); 956 if (VERBOSE) { 957 Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:" 958 + " expcted " 959 + expected.mY + "," + expected.mU + "," + expected.mV 960 + " decoded " 961 + decoded.mY + "," + decoded.mU + "," + decoded.mV); 962 } 963 totalErrorSquared += expected.calcErrorSquared(decoded); 964 } 965 } 966 outFrameCount++; 967 } 968 codec.releaseOutputBuffer(outputBufIndex, false /* render */); 969 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 970 Log.d(TAG, "saw output EOS."); 971 sawOutputEOS = true; 972 } 973 } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 974 mDecOutputFormat = codec.getOutputFormat(); 975 Log.d(TAG, "output format has changed to " + mDecOutputFormat); 976 int colorFormat = mDecOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT); 977 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar 978 || colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) { 979 mDstColorFormat = colorFormat; 980 } else { 981 mDstColorFormat = CodecCapabilities.COLOR_FormatYUV420Flexible; 982 Log.w(TAG, "output format changed to unsupported one " + 983 Integer.toHexString(colorFormat) + ", using FlexYUV"); 984 } 985 mVideoStride = mDecOutputFormat.containsKey(MediaFormat.KEY_STRIDE) 986 ? mDecOutputFormat.getInteger(MediaFormat.KEY_STRIDE) 987 : mDecOutputFormat.getInteger(MediaFormat.KEY_WIDTH); 988 mVideoVStride = mDecOutputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT) 989 ? mDecOutputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT) 990 : mDecOutputFormat.getInteger(MediaFormat.KEY_HEIGHT); 991 } 992 } 993 long finish = System.currentTimeMillis(); 994 int validDataNum = Math.min(outFrameCount - 1, 995 mDecoderFrameTimeUsDiff[mCurrentTestRound].length); 996 mDecoderFrameTimeUsDiff[mCurrentTestRound] = 997 Arrays.copyOf(mDecoderFrameTimeUsDiff[mCurrentTestRound], validDataNum); 998 codec.stop(); 999 codec.release(); 1000 codec = null; 1001 1002 // divide by 3 as sum is done for Y, U, V. 1003 double errorRms = Math.sqrt(totalErrorSquared / PIXEL_CHECK_PER_FRAME / outFrameCount / 3); 1004 return new RunResult(outFrameCount, (finish - start) / 1000., errorRms); 1005 } 1006 1007 /** 1008 * returns origin in the absolute frame for given frame count. 1009 * The video scene is moving by moving origin per each frame. 1010 */ getOrigin(int frameCount, int runId)1011 private Point getOrigin(int frameCount, int runId) { 1012 // Translation is basically: 1013 // x = A * sin(B * t) + C * t 1014 // y = D * cos(E * t) + F * t 1015 // 'bouncing' in a [0, length] regions (constrained to [0, length] by mirroring at 0 1016 // and length.) 1017 double x = (1 - Math.sin(frameCount / (7. + (runId % 2)))) * 0.1 + frameCount * 0.005; 1018 double y = (1 - Math.cos(frameCount / (10. + (runId & ~1)))) 1019 + frameCount * (0.01 + runId / 1000.); 1020 1021 // At every 32nd or 13th frame out of 32, an additional varying offset is added to 1022 // produce a jerk. 1023 if (frameCount % 32 == 0) { 1024 x += ((frameCount % 64) / 32) + 0.3 + y; 1025 } 1026 if (frameCount % 32 == 13) { 1027 y += ((frameCount % 64) / 32) + 0.6 + x; 1028 } 1029 1030 // constrain to region 1031 int xi = (int)((x % 2) * YUV_PLANE_ADDITIONAL_LENGTH); 1032 int yi = (int)((y % 2) * YUV_PLANE_ADDITIONAL_LENGTH); 1033 if (xi > YUV_PLANE_ADDITIONAL_LENGTH) { 1034 xi = 2 * YUV_PLANE_ADDITIONAL_LENGTH - xi; 1035 } 1036 if (yi > YUV_PLANE_ADDITIONAL_LENGTH) { 1037 yi = 2 * YUV_PLANE_ADDITIONAL_LENGTH - yi; 1038 } 1039 return new Point(xi, yi); 1040 } 1041 1042 /** 1043 * initialize reference YUV plane 1044 * @param w This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution 1045 * to allow movements 1046 * @param h This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution 1047 * to allow movements 1048 * @param semiPlanarEnc 1049 * @param semiPlanarDec 1050 */ initYUVPlane(int w, int h)1051 private void initYUVPlane(int w, int h) { 1052 int bufferSizeY = w * h; 1053 mYBuffer = ByteBuffer.allocate(bufferSizeY); 1054 mUVBuffer = ByteBuffer.allocate(bufferSizeY / 2); 1055 mYDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY); 1056 mUVDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY / 2); 1057 mBufferWidth = w; 1058 mBufferHeight = h; 1059 final byte[] yArray = mYBuffer.array(); 1060 final byte[] uvArray = mUVBuffer.array(); 1061 for (int i = 0; i < h; i++) { 1062 for (int j = 0; j < w; j++) { 1063 yArray[i * w + j] = clampY((i + j) & 0xff); 1064 } 1065 } 1066 if (isSrcSemiPlanar()) { 1067 for (int i = 0; i < h/2; i++) { 1068 for (int j = 0; j < w/2; j++) { 1069 uvArray[i * w + 2 * j] = (byte) (i & 0xff); 1070 uvArray[i * w + 2 * j + 1] = (byte) (j & 0xff); 1071 } 1072 } 1073 } else { // planar, U first, then V 1074 int vOffset = bufferSizeY / 4; 1075 for (int i = 0; i < h/2; i++) { 1076 for (int j = 0; j < w/2; j++) { 1077 uvArray[i * w/2 + j] = (byte) (i & 0xff); 1078 uvArray[i * w/2 + vOffset + j] = (byte) (j & 0xff); 1079 } 1080 } 1081 } 1082 mYDirectBuffer.put(yArray); 1083 mUVDirectBuffer.put(uvArray); 1084 mYDirectBuffer.rewind(); 1085 mUVDirectBuffer.rewind(); 1086 } 1087 1088 /** 1089 * class to store pixel values in YUV 1090 * 1091 */ 1092 public class YUVValue { 1093 public byte mY; 1094 public byte mU; 1095 public byte mV; YUVValue()1096 public YUVValue() { 1097 } 1098 equalTo(YUVValue other)1099 public boolean equalTo(YUVValue other) { 1100 return (mY == other.mY) && (mU == other.mU) && (mV == other.mV); 1101 } 1102 calcErrorSquared(YUVValue other)1103 public double calcErrorSquared(YUVValue other) { 1104 // Java's byte is signed but here we want to calculate difference in unsigned bytes. 1105 double yDelta = (mY & 0xFF) - (other.mY & 0xFF); 1106 double uDelta = (mU & 0xFF) - (other.mU & 0xFF); 1107 double vDelta = (mV & 0xFF) - (other.mV & 0xFF); 1108 return yDelta * yDelta + uDelta * uDelta + vDelta * vDelta; 1109 } 1110 } 1111 1112 /** 1113 * Read YUV values from given position (x,y) for given origin (originX, originY) 1114 * The whole data is already available from YBuffer and UVBuffer. 1115 * @param result pass the result via this. This is for avoiding creating / destroying too many 1116 * instances 1117 */ getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y, YUVValue result)1118 private void getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y, 1119 YUVValue result) { 1120 result.mY = mYBuffer.get((originY + y) * mBufferWidth + (originX + x)); 1121 if (isSrcSemiPlanar()) { 1122 int index = (originY + y) / 2 * mBufferWidth + (originX + x) / 2 * 2; 1123 //Log.d(TAG, "YUV " + originX + "," + originY + "," + x + "," + y + "," + index); 1124 result.mU = mUVBuffer.get(index); 1125 result.mV = mUVBuffer.get(index + 1); 1126 } else { 1127 int vOffset = mBufferWidth * mBufferHeight / 4; 1128 int index = (originY + y) / 2 * mBufferWidth / 2 + (originX + x) / 2; 1129 result.mU = mUVBuffer.get(index); 1130 result.mV = mUVBuffer.get(vOffset + index); 1131 } 1132 } 1133 1134 /** 1135 * Read YUV pixels from decoded output buffer for give (x, y) position 1136 * Output buffer is composed of Y parts followed by U/V 1137 * @param result pass the result via this. This is for avoiding creating / destroying too many 1138 * instances 1139 */ getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result)1140 private void getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result) { 1141 result.mY = buffer.get(y * mVideoStride + x); 1142 if (isDstSemiPlanar()) { 1143 int index = mVideoStride * mVideoVStride + y / 2 * mVideoStride + x / 2 * 2; 1144 //Log.d(TAG, "Decoded " + x + "," + y + "," + index); 1145 result.mU = buffer.get(index); 1146 result.mV = buffer.get(index + 1); 1147 } else { 1148 int vOffset = mVideoStride * mVideoVStride / 4; 1149 int index = mVideoStride * mVideoVStride + y / 2 * mVideoStride / 2 + x / 2; 1150 result.mU = buffer.get(index); 1151 result.mV = buffer.get(index + vOffset); 1152 } 1153 } 1154 getPixelValuesFromImage(Image image, int x, int y, YUVValue result)1155 private void getPixelValuesFromImage(Image image, int x, int y, YUVValue result) { 1156 assertTrue(image.getFormat() == ImageFormat.YUV_420_888); 1157 1158 Plane[] planes = image.getPlanes(); 1159 assertTrue(planes.length == 3); 1160 1161 result.mY = getPixelFromPlane(planes[0], x, y); 1162 result.mU = getPixelFromPlane(planes[1], x / 2, y / 2); 1163 result.mV = getPixelFromPlane(planes[2], x / 2, y / 2); 1164 } 1165 getPixelFromPlane(Plane plane, int x, int y)1166 private byte getPixelFromPlane(Plane plane, int x, int y) { 1167 ByteBuffer buf = plane.getBuffer(); 1168 return buf.get(y * plane.getRowStride() + x * plane.getPixelStride()); 1169 } 1170 1171 /** 1172 * Y cannot have full range. clamp it to prevent invalid value. 1173 */ clampY(int y)1174 private byte clampY(int y) { 1175 if (y < Y_CLAMP_MIN) { 1176 y = Y_CLAMP_MIN; 1177 } else if (y > Y_CLAMP_MAX) { 1178 y = Y_CLAMP_MAX; 1179 } 1180 return (byte) (y & 0xff); 1181 } 1182 1183 // for debugging printByteArray(String msg, byte[] data, int offset, int len)1184 private void printByteArray(String msg, byte[] data, int offset, int len) { 1185 StringBuilder builder = new StringBuilder(); 1186 builder.append(msg); 1187 builder.append(":"); 1188 for (int i = offset; i < offset + len; i++) { 1189 builder.append(Integer.toHexString(data[i])); 1190 builder.append(","); 1191 } 1192 builder.deleteCharAt(builder.length() - 1); 1193 Log.i(TAG, builder.toString()); 1194 } 1195 1196 // for debugging printByteBuffer(String msg, ByteBuffer data, int offset, int len)1197 private void printByteBuffer(String msg, ByteBuffer data, int offset, int len) { 1198 StringBuilder builder = new StringBuilder(); 1199 builder.append(msg); 1200 builder.append(":"); 1201 for (int i = offset; i < offset + len; i++) { 1202 builder.append(Integer.toHexString(data.get(i))); 1203 builder.append(","); 1204 } 1205 builder.deleteCharAt(builder.length() - 1); 1206 Log.i(TAG, builder.toString()); 1207 } 1208 1209 /** 1210 * Generates the presentation time for frame N, in microseconds. 1211 */ computePresentationTime(int frameIndex)1212 private long computePresentationTime(int frameIndex) { 1213 return TIMESTAMP_OFFSET + frameIndex * 1000000L / mFrameRate; 1214 } 1215 1216 /** 1217 * Generates the frameIndex from presentation time 1218 */ computeFrameIndex(long ptsUsec)1219 private int computeFrameIndex(long ptsUsec) { 1220 assertTrue("value for PtsUsec too low: " + ptsUsec, ptsUsec >= TIMESTAMP_OFFSET); 1221 return (int) ((ptsUsec - TIMESTAMP_OFFSET) * mFrameRate / 1000000.0 + 0.5); 1222 } 1223 1224 } 1225