1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import com.android.cts.media.R; 20 21 import android.content.Context; 22 import android.content.res.AssetFileDescriptor; 23 import android.media.MediaCodec; 24 import android.media.MediaCodecInfo; 25 import android.media.MediaCodecInfo.CodecCapabilities; 26 import android.media.MediaCodecInfo.CodecProfileLevel; 27 import android.media.MediaCodecList; 28 import android.media.MediaExtractor; 29 import android.media.MediaFormat; 30 import android.util.Log; 31 import android.view.Surface; 32 33 import android.opengl.GLES20; 34 import javax.microedition.khronos.opengles.GL10; 35 36 import java.io.IOException; 37 import java.nio.ByteBuffer; 38 import java.util.ArrayList; 39 import java.util.Arrays; 40 import java.util.Collection; 41 import java.util.Locale; 42 import java.util.zip.CRC32; 43 44 public class AdaptivePlaybackTest extends MediaPlayerTestBase { 45 private static final String TAG = "AdaptivePlaybackTest"; 46 private boolean sanity = false; 47 private static final int MIN_FRAMES_BEFORE_DRC = 2; 48 H264(CodecFactory factory)49 public Iterable<Codec> H264(CodecFactory factory) { 50 return factory.createCodecList( 51 mContext, 52 "video/avc", 53 "OMX.google.h264.decoder", 54 R.raw.video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz, 55 R.raw.video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz); 56 } 57 HEVC(CodecFactory factory)58 public Iterable<Codec> HEVC(CodecFactory factory) { 59 return factory.createCodecList( 60 mContext, 61 "video/hevc", 62 "OMX.google.hevc.decoder", 63 R.raw.video_640x360_mp4_hevc_450kbps_30fps_aac_stereo_128kbps_48000hz, 64 R.raw.video_1280x720_mp4_hevc_1150kbps_30fps_aac_stereo_128kbps_48000hz); 65 } 66 H263(CodecFactory factory)67 public Iterable<Codec> H263(CodecFactory factory) { 68 return factory.createCodecList( 69 mContext, 70 "video/3gpp", 71 "OMX.google.h263.decoder", 72 R.raw.video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz, 73 R.raw.video_352x288_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz); 74 } 75 Mpeg4(CodecFactory factory)76 public Iterable<Codec> Mpeg4(CodecFactory factory) { 77 return factory.createCodecList( 78 mContext, 79 "video/mp4v-es", 80 "OMX.google.mpeg4.decoder", 81 82 R.raw.video_1280x720_mp4_mpeg4_1000kbps_25fps_aac_stereo_128kbps_44100hz, 83 R.raw.video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz); 84 } 85 VP8(CodecFactory factory)86 public Iterable<Codec> VP8(CodecFactory factory) { 87 return factory.createCodecList( 88 mContext, 89 "video/x-vnd.on2.vp8", 90 "OMX.google.vp8.decoder", 91 R.raw.video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz, 92 R.raw.video_1280x720_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_44100hz); 93 } 94 VP9(CodecFactory factory)95 public Iterable<Codec> VP9(CodecFactory factory) { 96 return factory.createCodecList( 97 mContext, 98 "video/x-vnd.on2.vp9", 99 "OMX.google.vp9.decoder", 100 R.raw.video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_44100hz, 101 R.raw.video_1280x720_webm_vp9_309kbps_25fps_vorbis_stereo_128kbps_44100hz); 102 } 103 104 CodecFactory ALL = new CodecFactory(); 105 CodecFactory SW = new SWCodecFactory(); 106 CodecFactory HW = new HWCodecFactory(); 107 H264()108 public Iterable<Codec> H264() { return H264(ALL); } HEVC()109 public Iterable<Codec> HEVC() { return HEVC(ALL); } VP8()110 public Iterable<Codec> VP8() { return VP8(ALL); } VP9()111 public Iterable<Codec> VP9() { return VP9(ALL); } Mpeg4()112 public Iterable<Codec> Mpeg4() { return Mpeg4(ALL); } H263()113 public Iterable<Codec> H263() { return H263(ALL); } 114 AllCodecs()115 public Iterable<Codec> AllCodecs() { 116 return chain(H264(ALL), HEVC(ALL), VP8(ALL), VP9(ALL), Mpeg4(ALL), H263(ALL)); 117 } 118 SWCodecs()119 public Iterable<Codec> SWCodecs() { 120 return chain(H264(SW), HEVC(SW), VP8(SW), VP9(SW), Mpeg4(SW), H263(SW)); 121 } 122 HWCodecs()123 public Iterable<Codec> HWCodecs() { 124 return chain(H264(HW), HEVC(HW), VP8(HW), VP9(HW), Mpeg4(HW), H263(HW)); 125 } 126 127 /* tests for adaptive codecs */ 128 Test adaptiveEarlyEos = new EarlyEosTest().adaptive(); 129 Test adaptiveEosFlushSeek = new EosFlushSeekTest().adaptive(); 130 Test adaptiveSkipAhead = new AdaptiveSkipTest(true /* forward */); 131 Test adaptiveSkipBack = new AdaptiveSkipTest(false /* forward */); 132 133 /* DRC tests for adaptive codecs */ 134 Test adaptiveReconfigDrc = new ReconfigDrcTest().adaptive(); 135 Test adaptiveSmallReconfigDrc = new ReconfigDrcTest().adaptiveSmall(); 136 Test adaptiveDrc = new AdaptiveDrcTest(); /* adaptive */ 137 Test adaptiveSmallDrc = new AdaptiveDrcTest().adaptiveSmall(); 138 139 /* tests for regular codecs */ 140 Test earlyEos = new EarlyEosTest(); 141 Test eosFlushSeek = new EosFlushSeekTest(); 142 Test flushConfigureDrc = new ReconfigDrcTest(); 143 144 Test[] allTests = { 145 adaptiveEarlyEos, 146 adaptiveEosFlushSeek, 147 adaptiveSkipAhead, 148 adaptiveSkipBack, 149 adaptiveReconfigDrc, 150 adaptiveSmallReconfigDrc, 151 adaptiveDrc, 152 adaptiveSmallDrc, 153 earlyEos, 154 eosFlushSeek, 155 flushConfigureDrc, 156 }; 157 158 /* helpers to run sets of tests */ runEOS()159 public void runEOS() { ex(AllCodecs(), new Test[] { 160 adaptiveEarlyEos, 161 adaptiveEosFlushSeek, 162 adaptiveReconfigDrc, 163 adaptiveSmallReconfigDrc, 164 earlyEos, 165 eosFlushSeek, 166 flushConfigureDrc, 167 }); } 168 runAll()169 public void runAll() { ex(AllCodecs(), allTests); } runSW()170 public void runSW() { ex(SWCodecs(), allTests); } runHW()171 public void runHW() { ex(HWCodecs(), allTests); } 172 sanityAll()173 public void sanityAll() { sanity = true; try { runAll(); } finally { sanity = false; } } sanitySW()174 public void sanitySW() { sanity = true; try { runSW(); } finally { sanity = false; } } sanityHW()175 public void sanityHW() { sanity = true; try { runHW(); } finally { sanity = false; } } 176 runH264()177 public void runH264() { ex(H264(), allTests); } runHEVC()178 public void runHEVC() { ex(HEVC(), allTests); } runVP8()179 public void runVP8() { ex(VP8(), allTests); } runVP9()180 public void runVP9() { ex(VP9(), allTests); } runMpeg4()181 public void runMpeg4() { ex(Mpeg4(), allTests); } runH263()182 public void runH263() { ex(H263(), allTests); } 183 onlyH264HW()184 public void onlyH264HW() { ex(H264(HW), allTests); } onlyHEVCHW()185 public void onlyHEVCHW() { ex(HEVC(HW), allTests); } onlyVP8HW()186 public void onlyVP8HW() { ex(VP8(HW), allTests); } onlyVP9HW()187 public void onlyVP9HW() { ex(VP9(HW), allTests); } onlyMpeg4HW()188 public void onlyMpeg4HW() { ex(Mpeg4(HW), allTests); } onlyH263HW()189 public void onlyH263HW() { ex(H263(HW), allTests); } 190 onlyH264SW()191 public void onlyH264SW() { ex(H264(SW), allTests); } onlyHEVCSW()192 public void onlyHEVCSW() { ex(HEVC(SW), allTests); } onlyVP8SW()193 public void onlyVP8SW() { ex(VP8(SW), allTests); } onlyVP9SW()194 public void onlyVP9SW() { ex(VP9(SW), allTests); } onlyMpeg4SW()195 public void onlyMpeg4SW() { ex(Mpeg4(SW), allTests); } onlyH263SW()196 public void onlyH263SW() { ex(H263(SW), allTests); } 197 bytebuffer()198 public void bytebuffer() { ex(H264(SW), new EarlyEosTest().byteBuffer()); } texture()199 public void texture() { ex(H264(HW), new EarlyEosTest().texture()); } 200 201 /* inidividual tests */ testH264_adaptiveEarlyEos()202 public void testH264_adaptiveEarlyEos() { ex(H264(), adaptiveEarlyEos); } testHEVC_adaptiveEarlyEos()203 public void testHEVC_adaptiveEarlyEos() { ex(HEVC(), adaptiveEarlyEos); } testVP8_adaptiveEarlyEos()204 public void testVP8_adaptiveEarlyEos() { ex(VP8(), adaptiveEarlyEos); } testVP9_adaptiveEarlyEos()205 public void testVP9_adaptiveEarlyEos() { ex(VP9(), adaptiveEarlyEos); } testMpeg4_adaptiveEarlyEos()206 public void testMpeg4_adaptiveEarlyEos() { ex(Mpeg4(), adaptiveEarlyEos); } testH263_adaptiveEarlyEos()207 public void testH263_adaptiveEarlyEos() { ex(H263(), adaptiveEarlyEos); } 208 testH264_adaptiveEosFlushSeek()209 public void testH264_adaptiveEosFlushSeek() { ex(H264(), adaptiveEosFlushSeek); } testHEVC_adaptiveEosFlushSeek()210 public void testHEVC_adaptiveEosFlushSeek() { ex(HEVC(), adaptiveEosFlushSeek); } testVP8_adaptiveEosFlushSeek()211 public void testVP8_adaptiveEosFlushSeek() { ex(VP8(), adaptiveEosFlushSeek); } testVP9_adaptiveEosFlushSeek()212 public void testVP9_adaptiveEosFlushSeek() { ex(VP9(), adaptiveEosFlushSeek); } testMpeg4_adaptiveEosFlushSeek()213 public void testMpeg4_adaptiveEosFlushSeek() { ex(Mpeg4(), adaptiveEosFlushSeek); } testH263_adaptiveEosFlushSeek()214 public void testH263_adaptiveEosFlushSeek() { ex(H263(), adaptiveEosFlushSeek); } 215 testH264_adaptiveSkipAhead()216 public void testH264_adaptiveSkipAhead() { ex(H264(), adaptiveSkipAhead); } testHEVC_adaptiveSkipAhead()217 public void testHEVC_adaptiveSkipAhead() { ex(HEVC(), adaptiveSkipAhead); } testVP8_adaptiveSkipAhead()218 public void testVP8_adaptiveSkipAhead() { ex(VP8(), adaptiveSkipAhead); } testVP9_adaptiveSkipAhead()219 public void testVP9_adaptiveSkipAhead() { ex(VP9(), adaptiveSkipAhead); } testMpeg4_adaptiveSkipAhead()220 public void testMpeg4_adaptiveSkipAhead() { ex(Mpeg4(), adaptiveSkipAhead); } testH263_adaptiveSkipAhead()221 public void testH263_adaptiveSkipAhead() { ex(H263(), adaptiveSkipAhead); } 222 testH264_adaptiveSkipBack()223 public void testH264_adaptiveSkipBack() { ex(H264(), adaptiveSkipBack); } testHEVC_adaptiveSkipBack()224 public void testHEVC_adaptiveSkipBack() { ex(HEVC(), adaptiveSkipBack); } testVP8_adaptiveSkipBack()225 public void testVP8_adaptiveSkipBack() { ex(VP8(), adaptiveSkipBack); } testVP9_adaptiveSkipBack()226 public void testVP9_adaptiveSkipBack() { ex(VP9(), adaptiveSkipBack); } testMpeg4_adaptiveSkipBack()227 public void testMpeg4_adaptiveSkipBack() { ex(Mpeg4(), adaptiveSkipBack); } testH263_adaptiveSkipBack()228 public void testH263_adaptiveSkipBack() { ex(H263(), adaptiveSkipBack); } 229 testH264_adaptiveReconfigDrc()230 public void testH264_adaptiveReconfigDrc() { ex(H264(), adaptiveReconfigDrc); } testHEVC_adaptiveReconfigDrc()231 public void testHEVC_adaptiveReconfigDrc() { ex(HEVC(), adaptiveReconfigDrc); } testVP8_adaptiveReconfigDrc()232 public void testVP8_adaptiveReconfigDrc() { ex(VP8(), adaptiveReconfigDrc); } testVP9_adaptiveReconfigDrc()233 public void testVP9_adaptiveReconfigDrc() { ex(VP9(), adaptiveReconfigDrc); } testMpeg4_adaptiveReconfigDrc()234 public void testMpeg4_adaptiveReconfigDrc() { ex(Mpeg4(), adaptiveReconfigDrc); } testH263_adaptiveReconfigDrc()235 public void testH263_adaptiveReconfigDrc() { ex(H263(), adaptiveReconfigDrc); } 236 testH264_adaptiveSmallReconfigDrc()237 public void testH264_adaptiveSmallReconfigDrc() { ex(H264(), adaptiveSmallReconfigDrc); } testHEVC_adaptiveSmallReconfigDrc()238 public void testHEVC_adaptiveSmallReconfigDrc() { ex(HEVC(), adaptiveSmallReconfigDrc); } testVP8_adaptiveSmallReconfigDrc()239 public void testVP8_adaptiveSmallReconfigDrc() { ex(VP8(), adaptiveSmallReconfigDrc); } testVP9_adaptiveSmallReconfigDrc()240 public void testVP9_adaptiveSmallReconfigDrc() { ex(VP9(), adaptiveSmallReconfigDrc); } testMpeg4_adaptiveSmallReconfigDrc()241 public void testMpeg4_adaptiveSmallReconfigDrc() { ex(Mpeg4(), adaptiveSmallReconfigDrc); } testH263_adaptiveSmallReconfigDrc()242 public void testH263_adaptiveSmallReconfigDrc() { ex(H263(), adaptiveSmallReconfigDrc); } 243 testH264_adaptiveDrc()244 public void testH264_adaptiveDrc() { ex(H264(), adaptiveDrc); } testHEVC_adaptiveDrc()245 public void testHEVC_adaptiveDrc() { ex(HEVC(), adaptiveDrc); } testVP8_adaptiveDrc()246 public void testVP8_adaptiveDrc() { ex(VP8(), adaptiveDrc); } testVP9_adaptiveDrc()247 public void testVP9_adaptiveDrc() { ex(VP9(), adaptiveDrc); } testMpeg4_adaptiveDrc()248 public void testMpeg4_adaptiveDrc() { ex(Mpeg4(), adaptiveDrc); } testH263_adaptiveDrc()249 public void testH263_adaptiveDrc() { ex(H263(), adaptiveDrc); } 250 testH264_adaptiveDrcEarlyEos()251 public void testH264_adaptiveDrcEarlyEos() { ex(H264(), new AdaptiveDrcEarlyEosTest()); } testHEVC_adaptiveDrcEarlyEos()252 public void testHEVC_adaptiveDrcEarlyEos() { ex(HEVC(), new AdaptiveDrcEarlyEosTest()); } testVP8_adaptiveDrcEarlyEos()253 public void testVP8_adaptiveDrcEarlyEos() { ex(VP8(), new AdaptiveDrcEarlyEosTest()); } testVP9_adaptiveDrcEarlyEos()254 public void testVP9_adaptiveDrcEarlyEos() { ex(VP9(), new AdaptiveDrcEarlyEosTest()); } 255 testH264_adaptiveSmallDrc()256 public void testH264_adaptiveSmallDrc() { ex(H264(), adaptiveSmallDrc); } testHEVC_adaptiveSmallDrc()257 public void testHEVC_adaptiveSmallDrc() { ex(HEVC(), adaptiveSmallDrc); } testVP8_adaptiveSmallDrc()258 public void testVP8_adaptiveSmallDrc() { ex(VP8(), adaptiveSmallDrc); } testVP9_adaptiveSmallDrc()259 public void testVP9_adaptiveSmallDrc() { ex(VP9(), adaptiveSmallDrc); } 260 testH264_earlyEos()261 public void testH264_earlyEos() { ex(H264(), earlyEos); } testHEVC_earlyEos()262 public void testHEVC_earlyEos() { ex(HEVC(), earlyEos); } testVP8_earlyEos()263 public void testVP8_earlyEos() { ex(VP8(), earlyEos); } testVP9_earlyEos()264 public void testVP9_earlyEos() { ex(VP9(), earlyEos); } testMpeg4_earlyEos()265 public void testMpeg4_earlyEos() { ex(Mpeg4(), earlyEos); } testH263_earlyEos()266 public void testH263_earlyEos() { ex(H263(), earlyEos); } 267 testH264_eosFlushSeek()268 public void testH264_eosFlushSeek() { ex(H264(), eosFlushSeek); } testHEVC_eosFlushSeek()269 public void testHEVC_eosFlushSeek() { ex(HEVC(), eosFlushSeek); } testVP8_eosFlushSeek()270 public void testVP8_eosFlushSeek() { ex(VP8(), eosFlushSeek); } testVP9_eosFlushSeek()271 public void testVP9_eosFlushSeek() { ex(VP9(), eosFlushSeek); } testMpeg4_eosFlushSeek()272 public void testMpeg4_eosFlushSeek() { ex(Mpeg4(), eosFlushSeek); } testH263_eosFlushSeek()273 public void testH263_eosFlushSeek() { ex(H263(), eosFlushSeek); } 274 testH264_flushConfigureDrc()275 public void testH264_flushConfigureDrc() { ex(H264(), flushConfigureDrc); } testHEVC_flushConfigureDrc()276 public void testHEVC_flushConfigureDrc() { ex(HEVC(), flushConfigureDrc); } testVP8_flushConfigureDrc()277 public void testVP8_flushConfigureDrc() { ex(VP8(), flushConfigureDrc); } testVP9_flushConfigureDrc()278 public void testVP9_flushConfigureDrc() { ex(VP9(), flushConfigureDrc); } testMpeg4_flushConfigureDrc()279 public void testMpeg4_flushConfigureDrc() { ex(Mpeg4(), flushConfigureDrc); } testH263_flushConfigureDrc()280 public void testH263_flushConfigureDrc() { ex(H263(), flushConfigureDrc); } 281 282 /* only use unchecked exceptions to allow brief test methods */ ex(Iterable<Codec> codecList, Test test)283 private void ex(Iterable<Codec> codecList, Test test) { 284 ex(codecList, new Test[] { test } ); 285 } 286 ex(Iterable<Codec> codecList, Test[] testList)287 private void ex(Iterable<Codec> codecList, Test[] testList) { 288 TestList tests = new TestList(); 289 for (Codec c : codecList) { 290 for (Test test : testList) { 291 if (test.isValid(c)) { 292 test.addTests(tests, c); 293 } 294 } 295 } 296 try { 297 tests.run(); 298 } catch (Throwable t) { 299 throw new RuntimeException(t); 300 } 301 } 302 303 /* need an inner class to have access to the activity */ 304 abstract class ActivityTest extends Test { 305 TestSurface mNullSurface = new ActivitySurface(null); getSurface()306 protected TestSurface getSurface() { 307 if (mUseSurface) { 308 return new ActivitySurface(getActivity().getSurfaceHolder().getSurface()); 309 } else if (mUseSurfaceTexture) { 310 return new DecoderSurface(1280, 720, mCRC); 311 } 312 return mNullSurface; 313 } 314 } 315 316 static final int NUM_FRAMES = 50; 317 318 /** 319 * Queue some frames with an EOS on the last one. Test that we have decoded as many 320 * frames as we queued. This tests the EOS handling of the codec to see if all queued 321 * (and out-of-order) frames are actually decoded and returned. 322 */ 323 class EarlyEosTest extends ActivityTest { isValid(Codec c)324 public boolean isValid(Codec c) { 325 return getFormat(c) != null; 326 } addTests(TestList tests, final Codec c)327 public void addTests(TestList tests, final Codec c) { 328 for (int i = NUM_FRAMES / 2; i > 0; i--) { 329 final int queuedFrames = i; 330 tests.add( 331 new Step("testing early EOS at " + queuedFrames, this, c) { 332 public void run() { 333 Decoder decoder = new Decoder(c.name); 334 try { 335 decoder.configureAndStart(stepFormat(), stepSurface()); 336 int decodedFrames = -decoder.queueInputBufferRange( 337 stepMedia(), 338 0 /* startFrame */, 339 queuedFrames, 340 true /* sendEos */, 341 true /* waitForEos */); 342 if (decodedFrames <= 0) { 343 Log.w(TAG, "Did not receive EOS -- negating frame count"); 344 } 345 decoder.stop(); 346 if (decodedFrames != queuedFrames) { 347 warn("decoded " + decodedFrames + " frames out of " + 348 queuedFrames + " queued"); 349 } 350 } finally { 351 warn(decoder.getWarnings()); 352 decoder.releaseQuietly(); 353 } 354 } 355 }); 356 if (sanity) { 357 i >>= 1; 358 } 359 } 360 } 361 }; 362 363 /** 364 * Similar to EarlyEosTest, but we keep the component alive and running in between the steps. 365 * This is how seeking should be done if all frames must be outputted. This also tests that 366 * PTS can be repeated after flush. 367 */ 368 class EosFlushSeekTest extends ActivityTest { 369 Decoder mDecoder; // test state isValid(Codec c)370 public boolean isValid(Codec c) { 371 return getFormat(c) != null; 372 } addTests(TestList tests, final Codec c)373 public void addTests(TestList tests, final Codec c) { 374 tests.add( 375 new Step("testing EOS & flush before seek - init", this, c) { 376 public void run() { 377 mDecoder = new Decoder(c.name); 378 mDecoder.configureAndStart(stepFormat(), stepSurface()); 379 }}); 380 381 for (int i = NUM_FRAMES; i > 0; i--) { 382 final int queuedFrames = i; 383 tests.add( 384 new Step("testing EOS & flush before seeking after " + queuedFrames + 385 " frames", this, c) { 386 public void run() { 387 int decodedFrames = -mDecoder.queueInputBufferRange( 388 stepMedia(), 389 0 /* startFrame */, 390 queuedFrames, 391 true /* sendEos */, 392 true /* waitForEos */); 393 if (decodedFrames != queuedFrames) { 394 warn("decoded " + decodedFrames + " frames out of " + 395 queuedFrames + " queued"); 396 } 397 warn(mDecoder.getWarnings()); 398 mDecoder.clearWarnings(); 399 mDecoder.flush(); 400 } 401 }); 402 if (sanity) { 403 i >>= 1; 404 } 405 } 406 407 tests.add( 408 new Step("testing EOS & flush before seek - finally", this, c) { 409 public void run() { 410 try { 411 mDecoder.stop(); 412 } finally { 413 mDecoder.release(); 414 } 415 }}); 416 } 417 }; 418 419 /** 420 * Similar to EosFlushSeekTest, but we change the media size between the steps. 421 * This is how dynamic resolution switching can be done on codecs that do not support 422 * adaptive playback. 423 */ 424 class ReconfigDrcTest extends ActivityTest { 425 Decoder mDecoder; // test state isValid(Codec c)426 public boolean isValid(Codec c) { 427 return getFormat(c) != null && c.mediaList.length > 1; 428 } addTests(TestList tests, final Codec c)429 public void addTests(TestList tests, final Codec c) { 430 tests.add( 431 new Step("testing DRC with reconfigure - init", this, c) { 432 public void run() { 433 mDecoder = new Decoder(c.name); 434 }}); 435 436 for (int i = NUM_FRAMES, ix = 0; i > 0; i--, ix++) { 437 final int queuedFrames = i; 438 final int mediaIx = ix % c.mediaList.length; 439 tests.add( 440 new Step("testing DRC with reconfigure after " + queuedFrames + " frames", 441 this, c, mediaIx) { 442 public void run() { 443 try { 444 mDecoder.configureAndStart(stepFormat(), stepSurface()); 445 int decodedFrames = -mDecoder.queueInputBufferRange( 446 stepMedia(), 447 0 /* startFrame */, 448 queuedFrames, 449 true /* sendEos */, 450 true /* waitForEos */); 451 if (decodedFrames != queuedFrames) { 452 warn("decoded " + decodedFrames + " frames out of " + 453 queuedFrames + " queued"); 454 } 455 warn(mDecoder.getWarnings()); 456 mDecoder.clearWarnings(); 457 mDecoder.flush(); 458 } finally { 459 mDecoder.stop(); 460 } 461 } 462 }); 463 if (sanity) { 464 i >>= 1; 465 } 466 } 467 tests.add( 468 new Step("testing DRC with reconfigure - finally", this, c) { 469 public void run() { 470 mDecoder.release(); 471 }}); 472 } 473 }; 474 475 /* ADAPTIVE-ONLY TESTS - only run on codecs that support adaptive playback */ 476 477 /** 478 * Test dynamic resolution change support. Queue various sized media segments 479 * with different resolutions, verify that all queued frames were decoded. Here 480 * PTS will grow between segments. 481 */ 482 class AdaptiveDrcTest extends ActivityTest { 483 Decoder mDecoder; 484 int mAdjustTimeUs; 485 int mDecodedFrames; 486 int mQueuedFrames; 487 AdaptiveDrcTest()488 public AdaptiveDrcTest() { 489 super(); 490 adaptive(); 491 } isValid(Codec c)492 public boolean isValid(Codec c) { 493 checkAdaptiveFormat(); 494 return c.adaptive && c.mediaList.length > 1; 495 } addTests(TestList tests, final Codec c)496 public void addTests(TestList tests, final Codec c) { 497 tests.add( 498 new Step("testing DRC with no reconfigure - init", this, c) { 499 public void run() throws Throwable { 500 // FIXME wait 2 seconds to allow system to free up previous codecs 501 try { 502 Thread.sleep(2000); 503 } catch (InterruptedException e) {} 504 mDecoder = new Decoder(c.name); 505 mDecoder.configureAndStart(stepFormat(), stepSurface()); 506 mAdjustTimeUs = 0; 507 mDecodedFrames = 0; 508 mQueuedFrames = 0; 509 }}); 510 511 for (int i = NUM_FRAMES, ix = 0; i >= MIN_FRAMES_BEFORE_DRC; i--, ix++) { 512 final int mediaIx = ix % c.mediaList.length; 513 final int segmentSize = i; 514 tests.add( 515 new Step("testing DRC with no reconfigure after " + i + " frames", 516 this, c, mediaIx) { 517 public void run() throws Throwable { 518 mQueuedFrames += segmentSize; 519 boolean lastSequence = segmentSize == MIN_FRAMES_BEFORE_DRC; 520 if (sanity) { 521 lastSequence = (segmentSize >> 1) <= MIN_FRAMES_BEFORE_DRC; 522 } 523 int frames = mDecoder.queueInputBufferRange( 524 stepMedia(), 525 0 /* startFrame */, 526 segmentSize, 527 lastSequence /* sendEos */, 528 lastSequence /* expectEos */, 529 mAdjustTimeUs); 530 if (lastSequence && frames >= 0) { 531 warn("did not receive EOS, received " + frames + " frames"); 532 } else if (!lastSequence && frames < 0) { 533 warn("received EOS, received " + (-frames) + " frames"); 534 } 535 warn(mDecoder.getWarnings()); 536 mDecoder.clearWarnings(); 537 538 mDecodedFrames += Math.abs(frames); 539 mAdjustTimeUs += 1 + stepMedia().getTimestampRangeValue( 540 0, segmentSize, Media.RANGE_END); 541 }}); 542 if (sanity) { 543 i >>= 1; 544 } 545 } 546 tests.add( 547 new Step("testing DRC with no reconfigure - init", this, c) { 548 public void run() throws Throwable { 549 if (mDecodedFrames != mQueuedFrames) { 550 warn("decoded " + mDecodedFrames + " frames out of " + 551 mQueuedFrames + " queued"); 552 } 553 try { 554 mDecoder.stop(); 555 } finally { 556 mDecoder.release(); 557 } 558 } 559 }); 560 } 561 }; 562 563 /** 564 * Queue EOS shortly after a dynamic resolution change. Test that all frames were 565 * decoded. 566 */ 567 class AdaptiveDrcEarlyEosTest extends ActivityTest { AdaptiveDrcEarlyEosTest()568 public AdaptiveDrcEarlyEosTest() { 569 super(); 570 adaptive(); 571 } isValid(Codec c)572 public boolean isValid(Codec c) { 573 checkAdaptiveFormat(); 574 return c.adaptive && c.mediaList.length > 1; 575 } testStep(final Codec c, final int framesBeforeDrc, final int framesBeforeEos)576 public Step testStep(final Codec c, final int framesBeforeDrc, 577 final int framesBeforeEos) { 578 return new Step("testing DRC with no reconfigure after " + framesBeforeDrc + 579 " frames and subsequent EOS after " + framesBeforeEos + " frames", 580 this, c) { 581 public void run() throws Throwable { 582 Decoder decoder = new Decoder(c.name); 583 int queuedFrames = framesBeforeDrc + framesBeforeEos; 584 int framesA = 0; 585 int framesB = 0; 586 try { 587 decoder.configureAndStart(stepFormat(), stepSurface()); 588 Media media = c.mediaList[0]; 589 590 framesA = decoder.queueInputBufferRange( 591 media, 592 0 /* startFrame */, 593 framesBeforeDrc, 594 false /* sendEos */, 595 false /* expectEos */); 596 if (framesA < 0) { 597 warn("received unexpected EOS, received " + (-framesA) + " frames"); 598 } 599 long adjustTimeUs = 1 + media.getTimestampRangeValue( 600 0, framesBeforeDrc, Media.RANGE_END); 601 602 media = c.mediaList[1]; 603 framesB = decoder.queueInputBufferRange( 604 media, 605 0 /* startFrame */, 606 framesBeforeEos, 607 true /* sendEos */, 608 true /* expectEos */, 609 adjustTimeUs); 610 if (framesB >= 0) { 611 warn("did not receive EOS, received " + (-framesB) + " frames"); 612 } 613 decoder.stop(); 614 warn(decoder.getWarnings()); 615 } finally { 616 int decodedFrames = Math.abs(framesA) + Math.abs(framesB); 617 if (decodedFrames != queuedFrames) { 618 warn("decoded " + decodedFrames + " frames out of " + queuedFrames + 619 " queued"); 620 } 621 decoder.release(); 622 } 623 } 624 }; 625 } addTests(TestList tests, Codec c)626 public void addTests(TestList tests, Codec c) { 627 for (int drcFrame = 6; drcFrame >= MIN_FRAMES_BEFORE_DRC; drcFrame--) { 628 for (int eosFrame = 6; eosFrame >= 1; eosFrame--) { 629 tests.add(testStep(c, drcFrame, eosFrame)); 630 } 631 } 632 } 633 }; 634 635 /** 636 * Similar to AdaptiveDrcTest, but tests that PTS can change at adaptive boundaries both 637 * forward and backward without the need to flush. 638 */ 639 class AdaptiveSkipTest extends ActivityTest { 640 boolean forward; 641 public AdaptiveSkipTest(boolean fwd) { 642 forward = fwd; 643 adaptive(); 644 } 645 public boolean isValid(Codec c) { 646 checkAdaptiveFormat(); 647 return c.adaptive; 648 } 649 Decoder mDecoder; 650 int mAdjustTimeUs = 0; 651 int mDecodedFrames = 0; 652 int mQueuedFrames = 0; 653 public void addTests(TestList tests, final Codec c) { 654 tests.add( 655 new Step("testing flushless skipping - init", this, c) { 656 public void run() throws Throwable { 657 mDecoder = new Decoder(c.name); 658 mDecoder.configureAndStart(stepFormat(), stepSurface()); 659 mAdjustTimeUs = 0; 660 mDecodedFrames = 0; 661 mQueuedFrames = 0; 662 }}); 663 664 for (int i = 2, ix = 0; i <= NUM_FRAMES; i++, ix++) { 665 final int mediaIx = ix % c.mediaList.length; 666 final int segmentSize = i; 667 final boolean lastSequence; 668 if (sanity) { 669 lastSequence = (segmentSize << 1) + 1 > NUM_FRAMES; 670 } else { 671 lastSequence = segmentSize >= NUM_FRAMES; 672 } 673 tests.add( 674 new Step("testing flushless skipping " + (forward ? "forward" : "backward") + 675 " after " + i + " frames", this, c) { 676 public void run() throws Throwable { 677 int frames = mDecoder.queueInputBufferRange( 678 stepMedia(), 679 0 /* startFrame */, 680 segmentSize, 681 lastSequence /* sendEos */, 682 lastSequence /* expectEos */, 683 mAdjustTimeUs); 684 if (lastSequence && frames >= 0) { 685 warn("did not receive EOS, received " + frames + " frames"); 686 } else if (!lastSequence && frames < 0) { 687 warn("received unexpected EOS, received " + (-frames) + " frames"); 688 } 689 warn(mDecoder.getWarnings()); 690 mDecoder.clearWarnings(); 691 692 mQueuedFrames += segmentSize; 693 mDecodedFrames += Math.abs(frames); 694 if (forward) { 695 mAdjustTimeUs += 10000000 + stepMedia().getTimestampRangeValue( 696 0, segmentSize, Media.RANGE_DURATION); 697 } 698 }}); 699 if (sanity) { 700 i <<= 1; 701 } 702 } 703 704 tests.add( 705 new Step("testing flushless skipping - finally", this, c) { 706 public void run() throws Throwable { 707 if (mDecodedFrames != mQueuedFrames) { 708 warn("decoded " + mDecodedFrames + " frames out of " + mQueuedFrames + 709 " queued"); 710 } 711 try { 712 mDecoder.stop(); 713 } finally { 714 mDecoder.release(); 715 } 716 }}); 717 } 718 }; 719 720 // not yet used 721 static long checksum(ByteBuffer buf, int size, CRC32 crc) { 722 assertTrue(size >= 0); 723 assertTrue(size <= buf.capacity()); 724 crc.reset(); 725 if (buf.hasArray()) { 726 crc.update(buf.array(), buf.arrayOffset(), size); 727 } else { 728 int pos = buf.position(); 729 buf.rewind(); 730 final int rdsize = Math.min(4096, size); 731 byte bb[] = new byte[rdsize]; 732 int chk; 733 for (int i = 0; i < size; i += chk) { 734 chk = Math.min(rdsize, size - i); 735 buf.get(bb, 0, chk); 736 crc.update(bb, 0, chk); 737 } 738 buf.position(pos); 739 } 740 return crc.getValue(); 741 } 742 743 CRC32 mCRC; 744 745 @Override 746 protected void setUp() throws Exception { 747 super.setUp(); 748 mCRC = new CRC32(); 749 } 750 751 /* ====================================================================== */ 752 /* UTILITY FUNCTIONS */ 753 /* ====================================================================== */ 754 public static String collectionString(Collection<?> c) { 755 StringBuilder res = new StringBuilder("["); 756 boolean subsequent = false; 757 for (Object o: c) { 758 if (subsequent) { 759 res.append(", "); 760 } 761 res.append(o); 762 subsequent = true; 763 } 764 return res.append("]").toString(); 765 } 766 767 static String byteBufferToString(ByteBuffer buf, int start, int len) { 768 int oldPosition = buf.position(); 769 buf.position(start); 770 int strlen = 2; // {} 771 boolean ellipsis = len < buf.limit(); 772 if (ellipsis) { 773 strlen += 3; // ... 774 } else { 775 len = buf.limit(); 776 } 777 strlen += 3 * len - (len > 0 ? 1 : 0); // XX,XX 778 char[] res = new char[strlen]; 779 res[0] = '{'; 780 res[strlen - 1] = '}'; 781 if (ellipsis) { 782 res[strlen - 2] = res[strlen - 3] = res[strlen - 4] = '.'; 783 } 784 for (int i = 1; i < len; i++) { 785 res[i * 3] = ','; 786 } 787 for (int i = 0; i < len; i++) { 788 byte b = buf.get(); 789 int d = (b >> 4) & 15; 790 res[i * 3 + 1] = (char)(d + (d > 9 ? 'a' - 10 : '0')); 791 d = (b & 15); 792 res[i * 3 + 2] = (char)(d + (d > 9 ? 'a' - 10 : '0')); 793 } 794 buf.position(oldPosition); 795 return new String(res); 796 } 797 798 static <E> Iterable<E> chain(Iterable<E> ... iterables) { 799 /* simple chainer using ArrayList */ 800 ArrayList<E> items = new ArrayList<E>(); 801 for (Iterable<E> it: iterables) { 802 for (E el: it) { 803 items.add(el); 804 } 805 } 806 return items; 807 } 808 809 class Decoder { 810 private final static String TAG = "AdaptiveDecoder"; 811 final long kTimeOutUs = 5000; 812 MediaCodec mCodec; 813 ByteBuffer[] mInputBuffers; 814 ByteBuffer[] mOutputBuffers; 815 TestSurface mSurface; 816 boolean mDoChecksum; 817 boolean mQueuedEos; 818 ArrayList<Long> mTimeStamps; 819 ArrayList<String> mWarnings; 820 821 public Decoder(String codecName) { 822 MediaCodec codec = null; 823 try { 824 codec = MediaCodec.createByCodecName(codecName); 825 } catch (Exception e) { 826 throw new RuntimeException("couldn't create codec " + codecName, e); 827 } 828 Log.i(TAG, "using codec: " + codec.getName()); 829 mCodec = codec; 830 mDoChecksum = false; 831 mQueuedEos = false; 832 mTimeStamps = new ArrayList<Long>(); 833 mWarnings = new ArrayList<String>(); 834 } 835 836 public String getName() { 837 return mCodec.getName(); 838 } 839 840 public Iterable<String> getWarnings() { 841 return mWarnings; 842 } 843 844 private void warn(String warning) { 845 mWarnings.add(warning); 846 Log.w(TAG, warning); 847 } 848 849 public void clearWarnings() { 850 mWarnings.clear(); 851 } 852 853 public void configureAndStart(MediaFormat format, TestSurface surface) { 854 mSurface = surface; 855 Log.i(TAG, "configure(" + format + ", " + mSurface.getSurface() + ")"); 856 mCodec.configure(format, mSurface.getSurface(), null /* crypto */, 0 /* flags */); 857 Log.i(TAG, "start"); 858 mCodec.start(); 859 mInputBuffers = mCodec.getInputBuffers(); 860 mOutputBuffers = mCodec.getOutputBuffers(); 861 Log.i(TAG, "configured " + mInputBuffers.length + " input[" + 862 mInputBuffers[0].capacity() + "] and " + 863 mOutputBuffers.length + "output[" + 864 (mOutputBuffers[0] == null ? null : mOutputBuffers[0].capacity()) + "]"); 865 mQueuedEos = false; 866 } 867 868 public void stop() { 869 Log.i(TAG, "stop"); 870 mCodec.stop(); 871 } 872 873 public void flush() { 874 Log.i(TAG, "flush"); 875 mCodec.flush(); 876 mQueuedEos = false; 877 mTimeStamps.clear(); 878 } 879 880 public String dequeueAndReleaseOutputBuffer(MediaCodec.BufferInfo info) { 881 int ix = mCodec.dequeueOutputBuffer(info, kTimeOutUs); 882 if (ix == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 883 mOutputBuffers = mCodec.getOutputBuffers(); 884 Log.d(TAG, "output buffers have changed."); 885 return null; 886 } else if (ix == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 887 MediaFormat format = mCodec.getOutputFormat(); 888 Log.d(TAG, "output format has changed to " + format); 889 int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); 890 mDoChecksum = isRecognizedFormat(colorFormat); 891 return null; 892 } else if (ix < 0) { 893 Log.v(TAG, "no output"); 894 return null; 895 } 896 /* create checksum */ 897 long sum = 0; 898 899 900 Log.v(TAG, "dequeue #" + ix + " => { [" + info.size + "] flags=" + info.flags + 901 " @" + info.presentationTimeUs + "}"); 902 903 // we get a nonzero size for valid decoded frames 904 boolean doRender = (info.size != 0); 905 if (mSurface.getSurface() == null) { 906 if (mDoChecksum) { 907 sum = checksum(mOutputBuffers[ix], info.size, mCRC); 908 } 909 mCodec.releaseOutputBuffer(ix, doRender); 910 } else if (doRender) { 911 // If using SurfaceTexture, as soon as we call releaseOutputBuffer, the 912 // buffer will be forwarded to SurfaceTexture to convert to a texture. 913 // The API doesn't guarantee that the texture will be available before 914 // the call returns, so we need to wait for the onFrameAvailable callback 915 // to fire. If we don't wait, we risk dropping frames. 916 mSurface.prepare(); 917 mCodec.releaseOutputBuffer(ix, doRender); 918 mSurface.waitForDraw(); 919 if (mDoChecksum) { 920 sum = mSurface.checksum(); 921 } 922 } else { 923 mCodec.releaseOutputBuffer(ix, doRender); 924 } 925 926 if (doRender) { 927 if (!mTimeStamps.remove(info.presentationTimeUs)) { 928 warn("invalid timestamp " + info.presentationTimeUs + ", queued " + 929 collectionString(mTimeStamps)); 930 } 931 } 932 933 return String.format(Locale.US, "{pts=%d, flags=%x, data=0x%x}", 934 info.presentationTimeUs, info.flags, sum); 935 } 936 937 /* returns true iff queued a frame */ 938 public boolean queueInputBuffer(Media media, int frameIx, boolean EOS) { 939 return queueInputBuffer(media, frameIx, EOS, 0); 940 } 941 942 public boolean queueInputBuffer(Media media, int frameIx, boolean EOS, long adjustTimeUs) { 943 if (mQueuedEos) { 944 return false; 945 } 946 947 int ix = mCodec.dequeueInputBuffer(kTimeOutUs); 948 949 if (ix < 0) { 950 return false; 951 } 952 953 ByteBuffer buf = mInputBuffers[ix]; 954 Media.Frame frame = media.getFrame(frameIx); 955 buf.clear(); 956 957 long presentationTimeUs = adjustTimeUs; 958 int flags = 0; 959 if (frame != null) { 960 buf.put((ByteBuffer)frame.buf.clear()); 961 presentationTimeUs += frame.presentationTimeUs; 962 flags = frame.flags; 963 } 964 965 if (EOS) { 966 flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM; 967 mQueuedEos = true; 968 } 969 970 mTimeStamps.add(presentationTimeUs); 971 Log.v(TAG, "queue { [" + buf.position() + "]=" + byteBufferToString(buf, 0, 16) + 972 " flags=" + flags + " @" + presentationTimeUs + "} => #" + ix); 973 mCodec.queueInputBuffer( 974 ix, 0 /* offset */, buf.position(), presentationTimeUs, flags); 975 return true; 976 } 977 978 /* returns number of frames received multiplied by -1 if received EOS, 1 otherwise */ 979 public int queueInputBufferRange( 980 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd, 981 boolean waitForEos) { 982 return queueInputBufferRange(media,frameStartIx,frameEndIx,sendEosAtEnd,waitForEos,0); 983 } 984 985 public int queueInputBufferRange( 986 Media media, int frameStartIx, int frameEndIx, boolean sendEosAtEnd, 987 boolean waitForEos, long adjustTimeUs) { 988 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 989 int frameIx = frameStartIx; 990 int numFramesDecoded = 0; 991 boolean sawOutputEos = false; 992 int deadDecoderCounter = 0; 993 ArrayList<String> frames = new ArrayList<String>(); 994 while ((waitForEos && !sawOutputEos) || frameIx < frameEndIx) { 995 if (frameIx < frameEndIx) { 996 if (queueInputBuffer( 997 media, 998 frameIx, 999 sendEosAtEnd && (frameIx + 1 == frameEndIx), 1000 adjustTimeUs)) { 1001 frameIx++; 1002 } 1003 } 1004 1005 String buf = dequeueAndReleaseOutputBuffer(info); 1006 if (buf != null) { 1007 // Some decoders output a 0-sized buffer at the end. Disregard those. 1008 if (info.size > 0) { 1009 deadDecoderCounter = 0; 1010 numFramesDecoded++; 1011 } 1012 1013 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1014 Log.d(TAG, "saw output EOS."); 1015 sawOutputEos = true; 1016 } 1017 } 1018 if (++deadDecoderCounter >= 100) { 1019 warn("have not received an output frame for a while"); 1020 break; 1021 } 1022 } 1023 1024 if (numFramesDecoded < frameEndIx - frameStartIx - 16) { 1025 fail("Queued " + (frameEndIx - frameStartIx) + " frames but only received " + 1026 numFramesDecoded); 1027 } 1028 return (sawOutputEos ? -1 : 1) * numFramesDecoded; 1029 } 1030 1031 void release() { 1032 Log.i(TAG, "release"); 1033 mCodec.release(); 1034 mSurface.release(); 1035 mInputBuffers = null; 1036 mOutputBuffers = null; 1037 mCodec = null; 1038 mSurface = null; 1039 } 1040 1041 // don't fail on exceptions in release() 1042 void releaseQuietly() { 1043 try { 1044 Log.i(TAG, "release"); 1045 mCodec.release(); 1046 } catch (Throwable e) { 1047 Log.e(TAG, "Exception while releasing codec", e); 1048 } 1049 mSurface.release(); 1050 mInputBuffers = null; 1051 mOutputBuffers = null; 1052 mCodec = null; 1053 mSurface = null; 1054 } 1055 }; 1056 1057 /* from EncodeDecodeTest */ 1058 private static boolean isRecognizedFormat(int colorFormat) { 1059 switch (colorFormat) { 1060 // these are the formats we know how to handle for this test 1061 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: 1062 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar: 1063 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: 1064 case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar: 1065 case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar: 1066 return true; 1067 default: 1068 return false; 1069 } 1070 } 1071 1072 private int countFrames( 1073 String codecName, MediaCodecInfo codecInfo, Media media, int eosframe, TestSurface s) 1074 throws Exception { 1075 Decoder codec = new Decoder(codecName); 1076 codec.configureAndStart(media.getFormat(), s /* surface */); 1077 1078 int numframes = codec.queueInputBufferRange( 1079 media, 0, eosframe, true /* sendEos */, true /* waitForEos */); 1080 if (numframes >= 0) { 1081 Log.w(TAG, "Did not receive EOS"); 1082 } else { 1083 numframes *= -1; 1084 } 1085 1086 codec.stop(); 1087 codec.release(); 1088 return numframes; 1089 } 1090 } 1091 1092 /* ====================================================================== */ 1093 /* Video Media Asset */ 1094 /* ====================================================================== */ 1095 class Media { 1096 private final static String TAG = "AdaptiveMedia"; 1097 private MediaFormat mFormat; 1098 private MediaFormat mAdaptiveFormat; 1099 static class Frame { 1100 long presentationTimeUs; 1101 int flags; 1102 ByteBuffer buf; 1103 public Frame(long _pts, int _flags, ByteBuffer _buf) { 1104 presentationTimeUs = _pts; 1105 flags = _flags; 1106 buf = _buf; 1107 } 1108 }; 1109 private Frame[] mFrames; 1110 1111 public Frame getFrame(int ix) { 1112 /* this works even on short sample as frame is allocated as null */ 1113 if (ix >= 0 && ix < mFrames.length) { 1114 return mFrames[ix]; 1115 } 1116 return null; 1117 } 1118 private Media(MediaFormat format, MediaFormat adaptiveFormat, int numFrames) { 1119 /* need separate copies of format as once we add adaptive flags to 1120 MediaFormat, we cannot remove them */ 1121 mFormat = format; 1122 mAdaptiveFormat = adaptiveFormat; 1123 mFrames = new Frame[numFrames]; 1124 } 1125 1126 public MediaFormat getFormat() { 1127 return mFormat; 1128 } 1129 1130 public MediaFormat getAdaptiveFormat(int width, int height) { 1131 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, width); 1132 mAdaptiveFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, height); 1133 return mAdaptiveFormat; 1134 } 1135 1136 public String getMime() { 1137 return mFormat.getString(MediaFormat.KEY_MIME); 1138 } 1139 1140 public int getWidth() { 1141 return mFormat.getInteger(MediaFormat.KEY_WIDTH); 1142 } 1143 1144 public int getHeight() { 1145 return mFormat.getInteger(MediaFormat.KEY_HEIGHT); 1146 } 1147 1148 public final static int RANGE_START = 0; 1149 public final static int RANGE_END = 1; 1150 public final static int RANGE_DURATION = 2; 1151 1152 public long getTimestampRangeValue(int frameStartIx, int frameEndIx, int kind) { 1153 long min = Long.MAX_VALUE, max = Long.MIN_VALUE; 1154 for (int frameIx = frameStartIx; frameIx < frameEndIx; frameIx++) { 1155 Frame frame = getFrame(frameIx); 1156 if (frame != null) { 1157 if (min > frame.presentationTimeUs) { 1158 min = frame.presentationTimeUs; 1159 } 1160 if (max < frame.presentationTimeUs) { 1161 max = frame.presentationTimeUs; 1162 } 1163 } 1164 } 1165 if (kind == RANGE_START) { 1166 return min; 1167 } else if (kind == RANGE_END) { 1168 return max; 1169 } else if (kind == RANGE_DURATION) { 1170 return max - min; 1171 } else { 1172 throw new IllegalArgumentException("kind is not valid: " + kind); 1173 } 1174 } 1175 1176 public static Media read(Context context, int video, int numFrames) 1177 throws java.io.IOException { 1178 MediaExtractor extractor = new MediaExtractor(); 1179 AssetFileDescriptor testFd = context.getResources().openRawResourceFd(video); 1180 extractor.setDataSource(testFd.getFileDescriptor(), testFd.getStartOffset(), 1181 testFd.getLength()); 1182 1183 Media media = new Media( 1184 extractor.getTrackFormat(0), extractor.getTrackFormat(0), numFrames); 1185 extractor.selectTrack(0); 1186 1187 Log.i(TAG, "format=" + media.getFormat()); 1188 ArrayList<ByteBuffer> csds = new ArrayList<ByteBuffer>(); 1189 for (String tag: new String[] { "csd-0", "csd-1" }) { 1190 if (media.getFormat().containsKey(tag)) { 1191 ByteBuffer csd = media.getFormat().getByteBuffer(tag); 1192 Log.i(TAG, tag + "=" + AdaptivePlaybackTest.byteBufferToString(csd, 0, 16)); 1193 csds.add(csd); 1194 } 1195 } 1196 1197 ByteBuffer readBuf = ByteBuffer.allocate(200000); 1198 for (int ix = 0; ix < numFrames; ix++) { 1199 int sampleSize = extractor.readSampleData(readBuf, 0 /* offset */); 1200 1201 if (sampleSize < 0) { 1202 throw new IllegalArgumentException("media is too short at " + ix + " frames"); 1203 } else { 1204 readBuf.position(0).limit(sampleSize); 1205 for (ByteBuffer csd: csds) { 1206 sampleSize += csd.capacity(); 1207 } 1208 ByteBuffer buf = ByteBuffer.allocate(sampleSize); 1209 for (ByteBuffer csd: csds) { 1210 csd.clear(); 1211 buf.put(csd); 1212 csd.clear(); 1213 Log.i(TAG, "csd[" + csd.capacity() + "]"); 1214 } 1215 Log.i(TAG, "frame-" + ix + "[" + sampleSize + "]"); 1216 csds.clear(); 1217 buf.put(readBuf); 1218 media.mFrames[ix] = new Frame( 1219 extractor.getSampleTime(), 1220 extractor.getSampleFlags(), 1221 buf); 1222 extractor.advance(); 1223 } 1224 } 1225 extractor.release(); 1226 testFd.close(); 1227 return media; 1228 } 1229 } 1230 1231 /* ====================================================================== */ 1232 /* Codec, CodecList and CodecFactory */ 1233 /* ====================================================================== */ 1234 class Codec { 1235 private final static String TAG = "AdaptiveCodec"; 1236 1237 public String name; 1238 public CodecCapabilities capabilities; 1239 public Media[] mediaList; 1240 public boolean adaptive; 1241 public Codec(String n, CodecCapabilities c, Media[] m) { 1242 name = n; 1243 capabilities = c; 1244 mediaList = m; 1245 1246 if (capabilities == null) { 1247 adaptive = false; 1248 } else { 1249 Log.w(TAG, "checking capabilities of " + name + " for " + mediaList[0].getMime()); 1250 adaptive = capabilities.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback); 1251 } 1252 } 1253 } 1254 1255 class CodecList extends ArrayList<Codec> { }; 1256 1257 /* all codecs of mime, plus named codec if exists */ 1258 class CodecFamily extends CodecList { 1259 private final static String TAG = "AdaptiveCodecFamily"; 1260 private static final int NUM_FRAMES = AdaptivePlaybackTest.NUM_FRAMES; 1261 1262 public CodecFamily(Context context, String mime, String explicitCodecName, int ... resources) { 1263 try { 1264 /* read all media */ 1265 Media[] mediaList = new Media[resources.length]; 1266 for (int i = 0; i < resources.length; i++) { 1267 Log.v(TAG, "reading media " + resources[i]); 1268 Media media = Media.read(context, resources[i], NUM_FRAMES); 1269 assert media.getMime().equals(mime): 1270 "test stream " + resources[i] + " has " + media.getMime() + 1271 " mime type instead of " + mime; 1272 1273 /* assuming the first timestamp is the smallest */ 1274 long firstPTS = media.getFrame(0).presentationTimeUs; 1275 long smallestPTS = media.getTimestampRangeValue(0, NUM_FRAMES, Media.RANGE_START); 1276 1277 assert firstPTS == smallestPTS: 1278 "first frame timestamp (" + firstPTS + ") is not smallest (" + 1279 smallestPTS + ")"; 1280 1281 mediaList[i] = media; 1282 } 1283 1284 /* enumerate codecs */ 1285 int codecCount = MediaCodecList.getCodecCount(); 1286 for (int codecIx = 0; codecIx < codecCount; codecIx++) { 1287 MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(codecIx); 1288 if (codecInfo.isEncoder()) { 1289 continue; 1290 } 1291 for (String type : codecInfo.getSupportedTypes()) { 1292 if (type.equals(mime)) { 1293 /* mark the explicitly named codec as included */ 1294 if (codecInfo.getName().equals(explicitCodecName)) { 1295 explicitCodecName = null; 1296 } 1297 add(new Codec( 1298 codecInfo.getName(), 1299 codecInfo.getCapabilitiesForType(mime), 1300 mediaList)); 1301 break; 1302 } 1303 } 1304 } 1305 1306 /* test if the explicitly named codec is present on the system */ 1307 if (explicitCodecName != null) { 1308 MediaCodec codec = MediaCodec.createByCodecName(explicitCodecName); 1309 if (codec != null) { 1310 codec.release(); 1311 add(new Codec(explicitCodecName, null, mediaList)); 1312 } 1313 } 1314 } catch (Throwable t) { 1315 Log.wtf("Constructor failed", t); 1316 throw new RuntimeException("constructor failed", t); 1317 } 1318 } 1319 } 1320 1321 /* named codec if exists */ 1322 class CodecByName extends CodecList { 1323 public CodecByName(Context context, String mime, String codecName, int ... resources) { 1324 for (Codec c: new CodecFamily(context, mime, codecName, resources)) { 1325 if (c.name.equals(codecName)) { 1326 add(c); 1327 } 1328 } 1329 } 1330 } 1331 1332 /* all codecs of mime, except named codec if exists */ 1333 class CodecFamilyExcept extends CodecList { 1334 public CodecFamilyExcept( 1335 Context context, String mime, String exceptCodecName, int ... resources) { 1336 for (Codec c: new CodecFamily(context, mime, null, resources)) { 1337 if (!c.name.equals(exceptCodecName)) { 1338 add(c); 1339 } 1340 } 1341 } 1342 } 1343 1344 class CodecFactory { 1345 public CodecList createCodecList( 1346 Context context, String mime, String googleCodecName, int ...resources) { 1347 return new CodecFamily(context, mime, googleCodecName, resources); 1348 } 1349 } 1350 1351 class SWCodecFactory extends CodecFactory { 1352 public CodecList createCodecList( 1353 Context context, String mime, String googleCodecName, int ...resources) { 1354 return new CodecByName(context, mime, googleCodecName, resources); 1355 } 1356 } 1357 1358 class HWCodecFactory extends CodecFactory { 1359 public CodecList createCodecList( 1360 Context context, String mime, String googleCodecName, int ...resources) { 1361 return new CodecFamilyExcept(context, mime, googleCodecName, resources); 1362 } 1363 } 1364 1365 /* ====================================================================== */ 1366 /* Test Steps, Test (Case)s, and Test List */ 1367 /* ====================================================================== */ 1368 class StepRunner implements Runnable { 1369 public StepRunner(Step s) { 1370 mStep = s; 1371 mThrowed = null; 1372 } 1373 public void run() { 1374 try { 1375 mStep.run(); 1376 } catch (Throwable e) { 1377 mThrowed = e; 1378 } 1379 } 1380 public void throwThrowed() throws Throwable { 1381 if (mThrowed != null) { 1382 throw mThrowed; 1383 } 1384 } 1385 private Throwable mThrowed; 1386 private Step mStep; 1387 } 1388 1389 class TestList extends ArrayList<Step> { 1390 private final static String TAG = "AdaptiveTestList"; 1391 public void run() throws Throwable { 1392 Throwable res = null; 1393 for (Step step: this) { 1394 try { 1395 Log.i(TAG, step.getDescription()); 1396 if (step.stepSurface().needsToRunInSeparateThread()) { 1397 StepRunner runner = new StepRunner(step); 1398 Thread th = new Thread(runner, "stepWrapper"); 1399 th.start(); 1400 th.join(); 1401 runner.throwThrowed(); 1402 } else { 1403 step.run(); 1404 } 1405 } catch (Throwable e) { 1406 Log.e(TAG, "while " + step.getDescription(), e); 1407 res = e; 1408 mFailedSteps++; 1409 } finally { 1410 mWarnings += step.getWarnings(); 1411 } 1412 } 1413 if (res != null) { 1414 throw new RuntimeException( 1415 mFailedSteps + " failed steps, " + mWarnings + " warnings", 1416 res); 1417 } 1418 } 1419 public int getWarnings() { 1420 return mWarnings; 1421 } 1422 public int getFailures() { 1423 return mFailedSteps; 1424 } 1425 private int mFailedSteps; 1426 private int mWarnings; 1427 } 1428 1429 abstract class Test { 1430 public static final int FORMAT_ADAPTIVE_LARGEST = 1; 1431 public static final int FORMAT_ADAPTIVE_FIRST = 2; 1432 public static final int FORMAT_REGULAR = 3; 1433 1434 protected int mFormatType; 1435 protected boolean mUseSurface; 1436 protected boolean mUseSurfaceTexture; 1437 1438 public Test() { 1439 mFormatType = FORMAT_REGULAR; 1440 mUseSurface = true; 1441 mUseSurfaceTexture = false; 1442 } 1443 1444 public Test adaptive() { 1445 mFormatType = FORMAT_ADAPTIVE_LARGEST; 1446 return this; 1447 } 1448 1449 public Test adaptiveSmall() { 1450 mFormatType = FORMAT_ADAPTIVE_FIRST; 1451 return this; 1452 } 1453 1454 public Test byteBuffer() { 1455 mUseSurface = false; 1456 mUseSurfaceTexture = false; 1457 return this; 1458 } 1459 1460 public Test texture() { 1461 mUseSurface = false; 1462 mUseSurfaceTexture = true; 1463 return this; 1464 } 1465 1466 public void checkAdaptiveFormat() { 1467 assert mFormatType != FORMAT_REGULAR: 1468 "must be used with adaptive format"; 1469 } 1470 1471 abstract protected TestSurface getSurface(); 1472 1473 /* TRICKY: format is updated in each test run as we are actually reusing the 1474 same 2 MediaFormat objects returned from MediaExtractor. Therefore, 1475 format must be explicitly obtained in each test step. 1476 1477 returns null if codec does not support the format. 1478 */ 1479 protected MediaFormat getFormat(Codec c) { 1480 return getFormat(c, 0); 1481 } 1482 1483 protected MediaFormat getFormat(Codec c, int i) { 1484 MediaFormat format = null; 1485 if (mFormatType == FORMAT_REGULAR) { 1486 format = c.mediaList[i].getFormat(); 1487 } else if (mFormatType == FORMAT_ADAPTIVE_FIRST && c.adaptive) { 1488 format = c.mediaList[i].getAdaptiveFormat( 1489 c.mediaList[i].getWidth(), c.mediaList[i].getHeight()); 1490 } else if (mFormatType == FORMAT_ADAPTIVE_LARGEST && c.adaptive) { 1491 /* update adaptive format to max size used */ 1492 format = c.mediaList[i].getAdaptiveFormat(0, 0); 1493 for (Media media : c.mediaList) { 1494 /* get the largest width, and the largest height independently */ 1495 if (media.getWidth() > format.getInteger(MediaFormat.KEY_MAX_WIDTH)) { 1496 format.setInteger(MediaFormat.KEY_MAX_WIDTH, media.getWidth()); 1497 } 1498 if (media.getHeight() > format.getInteger(MediaFormat.KEY_MAX_HEIGHT)) { 1499 format.setInteger(MediaFormat.KEY_MAX_HEIGHT, media.getHeight()); 1500 } 1501 } 1502 } 1503 return format; 1504 } 1505 1506 public boolean isValid(Codec c) { return true; } 1507 public abstract void addTests(TestList tests, Codec c); 1508 } 1509 1510 abstract class Step { 1511 private static final String TAG = "AdaptiveStep"; 1512 1513 public Step(String title, Test instance, Codec codec, Media media) { 1514 mTest = instance; 1515 mCodec = codec; 1516 mMedia = media; 1517 mDescription = title + " on " + stepSurface().getSurface() + " using " + 1518 mCodec.name + " and " + stepFormat(); 1519 } 1520 public Step(String title, Test instance, Codec codec, int mediaIx) { 1521 this(title, instance, codec, codec.mediaList[mediaIx]); 1522 } 1523 public Step(String title, Test instance, Codec codec) { 1524 this(title, instance, codec, 0); 1525 } 1526 public Step(String description) { 1527 mDescription = description; 1528 } 1529 public Step() { } 1530 1531 public abstract void run() throws Throwable; 1532 1533 private String mDescription; 1534 private Test mTest; 1535 private Codec mCodec; 1536 private Media mMedia; 1537 private int mWarnings; 1538 1539 /* TRICKY: use non-standard getter names so that we don't conflict with the getters 1540 in the Test classes, as most test Steps are defined as anonymous classes inside 1541 the test classes. */ 1542 public MediaFormat stepFormat() { 1543 int ix = Arrays.asList(mCodec.mediaList).indexOf(mMedia); 1544 return mTest.getFormat(mCodec, ix); 1545 } 1546 1547 public TestSurface stepSurface() { 1548 return mTest.getSurface(); 1549 } 1550 1551 public Media stepMedia() { return mMedia; } 1552 1553 public String getDescription() { return mDescription; } 1554 public int getWarnings() { return mWarnings; } 1555 1556 public void warn(String message) { 1557 Log.e(TAG, "WARNING: " + message + " in " + getDescription()); 1558 mWarnings++; 1559 } 1560 public void warn(String message, Throwable t) { 1561 Log.e(TAG, "WARNING: " + message + " in " + getDescription(), t); 1562 mWarnings++; 1563 } 1564 public void warn(Iterable<String> warnings) { 1565 for (String warning: warnings) { 1566 warn(warning); 1567 } 1568 } 1569 } 1570 1571 interface TestSurface { 1572 public Surface getSurface(); 1573 public long checksum(); 1574 public void release(); 1575 public void prepare(); // prepare surface prior to render 1576 public void waitForDraw(); // wait for rendering to take place 1577 public boolean needsToRunInSeparateThread(); 1578 } 1579 1580 class DecoderSurface extends OutputSurface implements TestSurface { 1581 private ByteBuffer mBuf; 1582 int mWidth; 1583 int mHeight; 1584 CRC32 mCRC; 1585 1586 public DecoderSurface(int width, int height, CRC32 crc) { 1587 super(width, height); 1588 mWidth = width; 1589 mHeight = height; 1590 mCRC = crc; 1591 mBuf = ByteBuffer.allocateDirect(4 * width * height); 1592 } 1593 1594 public void prepare() { 1595 makeCurrent(); 1596 } 1597 1598 public void waitForDraw() { 1599 awaitNewImage(); 1600 drawImage(); 1601 } 1602 1603 public long checksum() { 1604 mBuf.position(0); 1605 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, mBuf); 1606 mBuf.position(0); 1607 return AdaptivePlaybackTest.checksum(mBuf, mBuf.capacity(), mCRC); 1608 } 1609 1610 public void release() { 1611 super.release(); 1612 mBuf = null; 1613 } 1614 1615 public boolean needsToRunInSeparateThread() { 1616 return true; 1617 } 1618 } 1619 1620 class ActivitySurface implements TestSurface { 1621 private Surface mSurface; 1622 public ActivitySurface(Surface s) { 1623 mSurface = s; 1624 } 1625 public Surface getSurface() { 1626 return mSurface; 1627 } 1628 public void prepare() { } 1629 public void waitForDraw() { } 1630 public long checksum() { 1631 return 0; 1632 } 1633 public void release() { 1634 // don't release activity surface, as it is reusable 1635 } 1636 public boolean needsToRunInSeparateThread() { 1637 return false; 1638 } 1639 } 1640 1641