1 /* 2 * Copyright 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 package android.media.cts; 17 18 import android.media.cts.R; 19 20 import android.content.Context; 21 import android.content.pm.PackageManager; 22 import android.content.res.AssetFileDescriptor; 23 import android.content.res.Resources; 24 import android.cts.util.MediaUtils; 25 import android.media.AudioFormat; 26 import android.media.AudioManager; 27 import android.media.AudioTrack; 28 import android.media.MediaCodec; 29 import android.media.MediaCodecInfo; 30 import android.media.MediaCodecList; 31 import android.media.MediaExtractor; 32 import android.media.MediaFormat; 33 import android.media.MediaSync; 34 import android.media.MediaTimestamp; 35 import android.media.PlaybackParams; 36 import android.media.SyncParams; 37 import android.os.Handler; 38 import android.os.HandlerThread; 39 import android.test.ActivityInstrumentationTestCase2; 40 import android.util.Log; 41 import android.view.Surface; 42 import android.view.SurfaceHolder; 43 44 import java.io.IOException; 45 import java.lang.Long; 46 import java.lang.Math; 47 import java.nio.ByteBuffer; 48 import java.util.concurrent.atomic.AtomicBoolean; 49 import java.util.List; 50 import java.util.LinkedList; 51 52 /** 53 * Tests for the MediaSync API and local video/audio playback. 54 * 55 * <p>The file in res/raw used by all tests are (c) copyright 2008, 56 * Blender Foundation / www.bigbuckbunny.org, and are licensed under the Creative Commons 57 * Attribution 3.0 License at http://creativecommons.org/licenses/by/3.0/us/. 58 */ 59 public class MediaSyncTest extends ActivityInstrumentationTestCase2<MediaStubActivity> { 60 private static final String LOG_TAG = "MediaSyncTest"; 61 62 private final long NO_TIMESTAMP = -1; 63 private final float FLOAT_PLAYBACK_RATE_TOLERANCE = .02f; 64 private final long TIME_MEASUREMENT_TOLERANCE_US = 20000; 65 final int INPUT_RESOURCE_ID = 66 R.raw.video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_192kbps_44100hz; 67 private final int APPLICATION_AUDIO_PERIOD_MS = 200; 68 private final int TEST_MAX_SPEED = 2; 69 private static final float FLOAT_TOLERANCE = .00001f; 70 71 private Context mContext; 72 private Resources mResources; 73 74 private MediaStubActivity mActivity; 75 76 private MediaSync mMediaSync = null; 77 private Surface mSurface = null; 78 79 private Decoder mDecoderVideo = null; 80 private Decoder mDecoderAudio = null; 81 private boolean mHasAudio = false; 82 private boolean mHasVideo = false; 83 private boolean mEosAudio = false; 84 private boolean mEosVideo = false; 85 private int mTaggedAudioBufferIndex = -1; 86 private final Object mConditionEos = new Object(); 87 private final Object mConditionEosAudio = new Object(); 88 private final Object mConditionTaggedAudioBufferIndex = new Object(); 89 90 private int mNumBuffersReturned = 0; 91 MediaSyncTest()92 public MediaSyncTest() { 93 super(MediaStubActivity.class); 94 } 95 96 @Override setUp()97 protected void setUp() throws Exception { 98 super.setUp(); 99 mActivity = getActivity(); 100 getInstrumentation().waitForIdleSync(); 101 try { 102 runTestOnUiThread(new Runnable() { 103 public void run() { 104 mMediaSync = new MediaSync(); 105 } 106 }); 107 } catch (Throwable e) { 108 e.printStackTrace(); 109 fail(); 110 } 111 mContext = getInstrumentation().getTargetContext(); 112 mResources = mContext.getResources(); 113 mDecoderVideo = new Decoder(this, mMediaSync, false); 114 mDecoderAudio = new Decoder(this, mMediaSync, true); 115 } 116 117 @Override tearDown()118 protected void tearDown() throws Exception { 119 if (mMediaSync != null) { 120 mMediaSync.release(); 121 mMediaSync = null; 122 } 123 if (mDecoderAudio != null) { 124 mDecoderAudio.release(); 125 mDecoderAudio = null; 126 } 127 if (mDecoderVideo != null) { 128 mDecoderVideo.release(); 129 mDecoderVideo = null; 130 } 131 if (mSurface != null) { 132 mSurface.release(); 133 mSurface = null; 134 } 135 mActivity = null; 136 mHasAudio = false; 137 mHasVideo = false; 138 mEosAudio = false; 139 mEosVideo = false; 140 mTaggedAudioBufferIndex = -1; 141 super.tearDown(); 142 } 143 reachedEos_l()144 private boolean reachedEos_l() { 145 return ((!mHasVideo || mEosVideo) && (!mHasAudio || mEosAudio)); 146 } 147 onTaggedAudioBufferIndex(Decoder decoder, int index)148 public void onTaggedAudioBufferIndex(Decoder decoder, int index) { 149 synchronized (mConditionTaggedAudioBufferIndex) { 150 if (decoder == mDecoderAudio) { 151 mTaggedAudioBufferIndex = index; 152 } 153 } 154 } 155 onEos(Decoder decoder)156 public void onEos(Decoder decoder) { 157 synchronized (mConditionEosAudio) { 158 if (decoder == mDecoderAudio) { 159 mEosAudio = true; 160 mConditionEosAudio.notify(); 161 } 162 } 163 164 synchronized (mConditionEos) { 165 if (decoder == mDecoderVideo) { 166 mEosVideo = true; 167 } 168 if (reachedEos_l()) { 169 mConditionEos.notify(); 170 } 171 } 172 } 173 hasAudioOutput()174 private boolean hasAudioOutput() { 175 return mActivity.getPackageManager() 176 .hasSystemFeature(PackageManager.FEATURE_AUDIO_OUTPUT); 177 } 178 179 /** 180 * Tests setPlaybackParams is handled correctly for wrong rate. 181 */ testSetPlaybackParamsFail()182 public void testSetPlaybackParamsFail() throws InterruptedException { 183 final float rate = -1.0f; 184 try { 185 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 186 fail("playback rate " + rate + " is not handled correctly"); 187 } catch (IllegalArgumentException e) { 188 } 189 190 assertTrue("The stream in test file can not be decoded", 191 mDecoderAudio.setup(INPUT_RESOURCE_ID, null, Long.MAX_VALUE, NO_TIMESTAMP)); 192 193 // get audio track. 194 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 195 196 try { 197 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 198 fail("With audio track set, playback rate " + rate 199 + " is not handled correctly"); 200 } catch (IllegalArgumentException e) { 201 } 202 } 203 204 /** 205 * Tests setPlaybackParams is handled correctly for good rate without audio track set. 206 * The case for good rate with audio track set is tested in testPlaybackRate*. 207 */ testSetPlaybackParamsSucceed()208 public void testSetPlaybackParamsSucceed() throws InterruptedException { 209 final float rate = (float)TEST_MAX_SPEED; 210 try { 211 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(rate)); 212 PlaybackParams pbp = mMediaSync.getPlaybackParams(); 213 assertEquals(rate, pbp.getSpeed(), FLOAT_TOLERANCE); 214 } catch (IllegalArgumentException e) { 215 fail("playback rate " + rate + " is not handled correctly"); 216 } 217 } 218 219 /** 220 * Tests returning audio buffers correctly. 221 */ testAudioBufferReturn()222 public void testAudioBufferReturn() throws InterruptedException { 223 final int timeOutMs = 10000; 224 boolean completed = runCheckAudioBuffer(INPUT_RESOURCE_ID, timeOutMs); 225 if (!completed) { 226 throw new RuntimeException("timed out waiting for audio buffer return"); 227 } 228 } 229 230 private PlaybackParams PAUSED_RATE = new PlaybackParams().setSpeed(0.f); 231 private PlaybackParams NORMAL_RATE = new PlaybackParams().setSpeed(1.f); 232 runCheckAudioBuffer(int inputResourceId, int timeOutMs)233 private boolean runCheckAudioBuffer(int inputResourceId, int timeOutMs) { 234 final int NUM_LOOPS = 10; 235 final Object condition = new Object(); 236 237 mHasAudio = true; 238 if (mDecoderAudio.setup(inputResourceId, null, Long.MAX_VALUE, NO_TIMESTAMP) == false) { 239 return true; 240 } 241 242 // get audio track. 243 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 244 245 mMediaSync.setCallback(new MediaSync.Callback() { 246 @Override 247 public void onAudioBufferConsumed( 248 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 249 Decoder decoderAudio = mDecoderAudio; 250 if (decoderAudio != null) { 251 decoderAudio.checkReturnedAudioBuffer(byteBuffer, bufferIndex); 252 decoderAudio.releaseOutputBuffer(bufferIndex, NO_TIMESTAMP); 253 synchronized (condition) { 254 ++mNumBuffersReturned; 255 if (mNumBuffersReturned >= NUM_LOOPS) { 256 condition.notify(); 257 } 258 } 259 } 260 } 261 }, null); 262 263 mMediaSync.setPlaybackParams(NORMAL_RATE); 264 265 synchronized (condition) { 266 mDecoderAudio.start(); 267 268 try { 269 condition.wait(timeOutMs); 270 } catch (InterruptedException e) { 271 } 272 return (mNumBuffersReturned >= NUM_LOOPS); 273 } 274 } 275 276 /** 277 * Tests flush. 278 */ testFlush()279 public void testFlush() throws InterruptedException { 280 final int timeOutMs = 5000; 281 boolean completed = runFlush(INPUT_RESOURCE_ID, timeOutMs); 282 if (!completed) { 283 throw new RuntimeException("timed out waiting for flush"); 284 } 285 } 286 runFlush(int inputResourceId, int timeOutMs)287 private boolean runFlush(int inputResourceId, int timeOutMs) { 288 final int INDEX_BEFORE_FLUSH = 1; 289 final int INDEX_AFTER_FLUSH = 2; 290 final int BUFFER_SIZE = 1024; 291 final int[] returnedIndex = new int[1]; 292 final Object condition = new Object(); 293 294 returnedIndex[0] = -1; 295 296 mHasAudio = true; 297 if (mDecoderAudio.setup(inputResourceId, null, Long.MAX_VALUE, NO_TIMESTAMP) == false) { 298 return true; 299 } 300 301 // get audio track. 302 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 303 304 mMediaSync.setCallback(new MediaSync.Callback() { 305 @Override 306 public void onAudioBufferConsumed( 307 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 308 synchronized (condition) { 309 if (returnedIndex[0] == -1) { 310 returnedIndex[0] = bufferIndex; 311 condition.notify(); 312 } 313 } 314 } 315 }, null); 316 317 mMediaSync.setOnErrorListener(new MediaSync.OnErrorListener() { 318 @Override 319 public void onError(MediaSync sync, int what, int extra) { 320 fail("got error from media sync (" + what + ", " + extra + ")"); 321 } 322 }, null); 323 324 mMediaSync.setPlaybackParams(PAUSED_RATE); 325 326 ByteBuffer buffer1 = ByteBuffer.allocate(BUFFER_SIZE); 327 ByteBuffer buffer2 = ByteBuffer.allocate(BUFFER_SIZE); 328 mMediaSync.queueAudio(buffer1, INDEX_BEFORE_FLUSH, 0 /* presentationTimeUs */); 329 mMediaSync.flush(); 330 mMediaSync.queueAudio(buffer2, INDEX_AFTER_FLUSH, 0 /* presentationTimeUs */); 331 332 synchronized (condition) { 333 mMediaSync.setPlaybackParams(NORMAL_RATE); 334 335 try { 336 condition.wait(timeOutMs); 337 } catch (InterruptedException e) { 338 } 339 return (returnedIndex[0] == INDEX_AFTER_FLUSH); 340 } 341 } 342 343 /** 344 * Tests playing back audio successfully. 345 */ testPlayVideo()346 public void testPlayVideo() throws Exception { 347 playAV(INPUT_RESOURCE_ID, 5000 /* lastBufferTimestampMs */, 348 false /* audio */, true /* video */, 10000 /* timeOutMs */); 349 } 350 351 /** 352 * Tests playing back video successfully. 353 */ testPlayAudio()354 public void testPlayAudio() throws Exception { 355 if (!hasAudioOutput()) { 356 Log.w(LOG_TAG,"AUDIO_OUTPUT feature not found. This system might not have a valid " 357 + "audio output HAL"); 358 return; 359 } 360 361 playAV(INPUT_RESOURCE_ID, 5000 /* lastBufferTimestampMs */, 362 true /* audio */, false /* video */, 10000 /* timeOutMs */); 363 } 364 365 /** 366 * Tests playing back audio and video successfully. 367 */ testPlayAudioAndVideo()368 public void testPlayAudioAndVideo() throws Exception { 369 playAV(INPUT_RESOURCE_ID, 5000 /* lastBufferTimestampMs */, 370 true /* audio */, true /* video */, 10000 /* timeOutMs */); 371 } 372 373 /** 374 * Tests playing at specified playback rate successfully. 375 */ testPlaybackRateQuarter()376 public void testPlaybackRateQuarter() throws Exception { 377 playAV(INPUT_RESOURCE_ID, 2000 /* lastBufferTimestampMs */, 378 true /* audio */, true /* video */, 10000 /* timeOutMs */, 379 0.25f /* playbackRate */); 380 } testPlaybackRateHalf()381 public void testPlaybackRateHalf() throws Exception { 382 playAV(INPUT_RESOURCE_ID, 4000 /* lastBufferTimestampMs */, 383 true /* audio */, true /* video */, 10000 /* timeOutMs */, 384 0.5f /* playbackRate */); 385 } testPlaybackRateDouble()386 public void testPlaybackRateDouble() throws Exception { 387 playAV(INPUT_RESOURCE_ID, 8000 /* lastBufferTimestampMs */, 388 true /* audio */, true /* video */, 10000 /* timeOutMs */, 389 (float)TEST_MAX_SPEED /* playbackRate */); 390 } 391 playAV( final int inputResourceId, final long lastBufferTimestampMs, final boolean audio, final boolean video, int timeOutMs)392 private void playAV( 393 final int inputResourceId, 394 final long lastBufferTimestampMs, 395 final boolean audio, 396 final boolean video, 397 int timeOutMs) throws Exception { 398 playAV(inputResourceId, lastBufferTimestampMs, audio, video, timeOutMs, 1.0f); 399 } 400 401 private class PlayAVState { 402 boolean mTimeValid; 403 long mMediaDurationUs; 404 long mClockDurationUs; 405 float mSyncTolerance; 406 }; 407 playAV( final int inputResourceId, final long lastBufferTimestampMs, final boolean audio, final boolean video, int timeOutMs, final float playbackRate)408 private void playAV( 409 final int inputResourceId, 410 final long lastBufferTimestampMs, 411 final boolean audio, 412 final boolean video, 413 int timeOutMs, 414 final float playbackRate) throws Exception { 415 final int limit = 5; 416 String info = ""; 417 for (int tries = 0; ; ++tries) { 418 // Run test 419 final AtomicBoolean completed = new AtomicBoolean(); 420 final PlayAVState state = new PlayAVState(); 421 Thread decodingThread = new Thread(new Runnable() { 422 @Override 423 public void run() { 424 completed.set(runPlayAV(inputResourceId, lastBufferTimestampMs * 1000, 425 audio, video, playbackRate, state)); 426 } 427 }); 428 decodingThread.start(); 429 decodingThread.join(timeOutMs); 430 assertTrue("timed out decoding to end-of-stream", completed.get()); 431 432 // Examine results 433 if (!state.mTimeValid) return; 434 435 // sync.getTolerance() is MediaSync's tolerance of the playback rate, whereas 436 // FLOAT_PLAYBACK_RATE_TOLERANCE is our test's tolerance. 437 // We need to add both to get an upperbound for allowable error. 438 final double tolerance = state.mMediaDurationUs 439 * (state.mSyncTolerance + FLOAT_PLAYBACK_RATE_TOLERANCE) 440 + TIME_MEASUREMENT_TOLERANCE_US; 441 final double diff = state.mMediaDurationUs - state.mClockDurationUs * playbackRate ; 442 info += "[" + tries 443 + "] playbackRate " + playbackRate 444 + ", clockDurationUs " + state.mClockDurationUs 445 + ", mediaDurationUs " + state.mMediaDurationUs 446 + ", diff " + diff 447 + ", tolerance " + tolerance + "\n"; 448 449 // Good enough? 450 if (Math.abs(diff) <= tolerance) { 451 Log.d(LOG_TAG, info); 452 return; 453 } 454 assertTrue("bad playback\n" + info, tries < limit); 455 456 Log.d(LOG_TAG, "Trying again\n" + info); 457 458 // Try again (may throw Exception) 459 tearDown(); 460 setUp(); 461 462 Thread.sleep(1000 /* millis */); 463 } 464 } 465 runPlayAV( int inputResourceId, long lastBufferTimestampUs, boolean audio, boolean video, float playbackRate, PlayAVState state)466 private boolean runPlayAV( 467 int inputResourceId, 468 long lastBufferTimestampUs, 469 boolean audio, 470 boolean video, 471 float playbackRate, 472 PlayAVState state) { 473 // allow 750ms for playback to get to stable state. 474 final int PLAYBACK_RAMP_UP_TIME_US = 750000; 475 476 final Object conditionFirstAudioBuffer = new Object(); 477 478 if (video) { 479 mMediaSync.setSurface(mActivity.getSurfaceHolder().getSurface()); 480 mSurface = mMediaSync.createInputSurface(); 481 482 if (mDecoderVideo.setup( 483 inputResourceId, mSurface, lastBufferTimestampUs, NO_TIMESTAMP) == false) { 484 return true; 485 } 486 mHasVideo = true; 487 } 488 489 if (audio) { 490 if (mDecoderAudio.setup( 491 inputResourceId, null, lastBufferTimestampUs, 492 PLAYBACK_RAMP_UP_TIME_US) == false) { 493 return true; 494 } 495 496 // get audio track. 497 mMediaSync.setAudioTrack(mDecoderAudio.getAudioTrack()); 498 499 mMediaSync.setCallback(new MediaSync.Callback() { 500 @Override 501 public void onAudioBufferConsumed( 502 MediaSync sync, ByteBuffer byteBuffer, int bufferIndex) { 503 Decoder decoderAudio = mDecoderAudio; 504 if (decoderAudio != null) { 505 decoderAudio.releaseOutputBuffer(bufferIndex, NO_TIMESTAMP); 506 } 507 synchronized (conditionFirstAudioBuffer) { 508 synchronized (mConditionTaggedAudioBufferIndex) { 509 if (mTaggedAudioBufferIndex >= 0 510 && mTaggedAudioBufferIndex == bufferIndex) { 511 conditionFirstAudioBuffer.notify(); 512 } 513 } 514 } 515 } 516 }, null); 517 518 mHasAudio = true; 519 } 520 521 SyncParams sync = new SyncParams().allowDefaults(); 522 mMediaSync.setSyncParams(sync); 523 sync = mMediaSync.getSyncParams(); 524 525 mMediaSync.setPlaybackParams(new PlaybackParams().setSpeed(playbackRate)); 526 527 synchronized (conditionFirstAudioBuffer) { 528 if (video) { 529 mDecoderVideo.start(); 530 } 531 if (audio) { 532 mDecoderAudio.start(); 533 534 // wait for the first audio output buffer returned by media sync. 535 try { 536 conditionFirstAudioBuffer.wait(); 537 } catch (InterruptedException e) { 538 Log.i(LOG_TAG, "worker thread is interrupted."); 539 return true; 540 } 541 } 542 } 543 544 if (audio) { 545 MediaTimestamp mediaTimestamp = mMediaSync.getTimestamp(); 546 assertTrue("No timestamp available for starting", mediaTimestamp != null); 547 long checkStartTimeRealUs = System.nanoTime() / 1000; 548 long checkStartTimeMediaUs = mediaTimestamp.mediaTimeUs; 549 550 synchronized (mConditionEosAudio) { 551 if (!mEosAudio) { 552 try { 553 mConditionEosAudio.wait(); 554 } catch (InterruptedException e) { 555 Log.i(LOG_TAG, "worker thread is interrupted when waiting for audio EOS."); 556 return true; 557 } 558 } 559 } 560 mediaTimestamp = mMediaSync.getTimestamp(); 561 assertTrue("No timestamp available for ending", mediaTimestamp != null); 562 state.mTimeValid = true; 563 state.mClockDurationUs = System.nanoTime() / 1000 - checkStartTimeRealUs; 564 state.mMediaDurationUs = mediaTimestamp.mediaTimeUs - checkStartTimeMediaUs; 565 state.mSyncTolerance = sync.getTolerance(); 566 } 567 568 boolean completed = false; 569 synchronized (mConditionEos) { 570 if (!reachedEos_l()) { 571 try { 572 mConditionEos.wait(); 573 } catch (InterruptedException e) { 574 } 575 } 576 completed = reachedEos_l(); 577 } 578 return completed; 579 } 580 581 private class Decoder extends MediaCodec.Callback { 582 private final int NO_SAMPLE_RATE = -1; 583 private final int NO_BUFFER_INDEX = -1; 584 585 private MediaSyncTest mMediaSyncTest = null; 586 private MediaSync mMediaSync = null; 587 private boolean mIsAudio = false; 588 private long mLastBufferTimestampUs = 0; 589 private long mStartingAudioTimestampUs = NO_TIMESTAMP; 590 591 private Surface mSurface = null; 592 593 private AudioTrack mAudioTrack = null; 594 595 private final Object mConditionCallback = new Object(); 596 private MediaExtractor mExtractor = null; 597 private MediaCodec mDecoder = null; 598 599 private final Object mAudioBufferLock = new Object(); 600 private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>(); 601 602 // accessed only on callback thread. 603 private boolean mEos = false; 604 private boolean mSignaledEos = false; 605 606 private class AudioBuffer { 607 public ByteBuffer mByteBuffer; 608 public int mBufferIndex; 609 AudioBuffer(ByteBuffer byteBuffer, int bufferIndex)610 public AudioBuffer(ByteBuffer byteBuffer, int bufferIndex) { 611 mByteBuffer = byteBuffer; 612 mBufferIndex = bufferIndex; 613 } 614 } 615 616 private HandlerThread mHandlerThread; 617 private Handler mHandler; 618 Decoder(MediaSyncTest test, MediaSync sync, boolean isAudio)619 Decoder(MediaSyncTest test, MediaSync sync, boolean isAudio) { 620 mMediaSyncTest = test; 621 mMediaSync = sync; 622 mIsAudio = isAudio; 623 } 624 setup( int inputResourceId, Surface surface, long lastBufferTimestampUs, long startingAudioTimestampUs)625 public boolean setup( 626 int inputResourceId, Surface surface, long lastBufferTimestampUs, 627 long startingAudioTimestampUs) { 628 if (!mIsAudio) { 629 mSurface = surface; 630 // handle video callback in a separate thread as releaseOutputBuffer is blocking 631 mHandlerThread = new HandlerThread("SyncViewVidDec"); 632 mHandlerThread.start(); 633 mHandler = new Handler(mHandlerThread.getLooper()); 634 } 635 mLastBufferTimestampUs = lastBufferTimestampUs; 636 mStartingAudioTimestampUs = startingAudioTimestampUs; 637 try { 638 // get extrator. 639 String type = mIsAudio ? "audio/" : "video/"; 640 mExtractor = MediaUtils.createMediaExtractorForMimeType( 641 mContext, inputResourceId, type); 642 643 // get decoder. 644 MediaFormat mediaFormat = 645 mExtractor.getTrackFormat(mExtractor.getSampleTrackIndex()); 646 String mimeType = mediaFormat.getString(MediaFormat.KEY_MIME); 647 if (!MediaUtils.hasDecoder(mimeType)) { 648 Log.i(LOG_TAG, "No decoder found for mimeType= " + mimeType); 649 return false; 650 } 651 mDecoder = MediaCodec.createDecoderByType(mimeType); 652 mDecoder.configure(mediaFormat, mSurface, null, 0); 653 mDecoder.setCallback(this, mHandler); 654 655 return true; 656 } catch (IOException e) { 657 throw new RuntimeException("error reading input resource", e); 658 } 659 } 660 start()661 public void start() { 662 if (mDecoder != null) { 663 mDecoder.start(); 664 } 665 } 666 release()667 public void release() { 668 synchronized (mConditionCallback) { 669 if (mDecoder != null) { 670 try { 671 mDecoder.stop(); 672 } catch (IllegalStateException e) { 673 } 674 mDecoder.release(); 675 mDecoder = null; 676 } 677 if (mExtractor != null) { 678 mExtractor.release(); 679 mExtractor = null; 680 } 681 } 682 683 if (mAudioTrack != null) { 684 mAudioTrack.release(); 685 mAudioTrack = null; 686 } 687 } 688 getAudioTrack()689 public AudioTrack getAudioTrack() { 690 if (!mIsAudio) { 691 throw new RuntimeException("can not create audio track for video"); 692 } 693 694 if (mExtractor == null) { 695 throw new RuntimeException("extrator is null"); 696 } 697 698 if (mAudioTrack == null) { 699 MediaFormat mediaFormat = 700 mExtractor.getTrackFormat(mExtractor.getSampleTrackIndex()); 701 int sampleRateInHz = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); 702 int channelConfig = (mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT) == 1 ? 703 AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO); 704 int audioFormat = AudioFormat.ENCODING_PCM_16BIT; 705 int minBufferSizeInBytes = AudioTrack.getMinBufferSize( 706 sampleRateInHz, 707 channelConfig, 708 audioFormat); 709 final int frameCount = APPLICATION_AUDIO_PERIOD_MS * sampleRateInHz / 1000; 710 final int frameSizeInBytes = Integer.bitCount(channelConfig) 711 * AudioFormat.getBytesPerSample(audioFormat); 712 // ensure we consider application requirements for writing audio data 713 minBufferSizeInBytes = TEST_MAX_SPEED /* speed influences buffer size */ 714 * Math.max(minBufferSizeInBytes, frameCount * frameSizeInBytes); 715 mAudioTrack = new AudioTrack( 716 AudioManager.STREAM_MUSIC, 717 sampleRateInHz, 718 channelConfig, 719 audioFormat, 720 minBufferSizeInBytes, 721 AudioTrack.MODE_STREAM); 722 } 723 724 return mAudioTrack; 725 } 726 releaseOutputBuffer(int bufferIndex, long renderTimestampNs)727 public void releaseOutputBuffer(int bufferIndex, long renderTimestampNs) { 728 synchronized (mConditionCallback) { 729 if (mDecoder != null) { 730 if (renderTimestampNs == NO_TIMESTAMP) { 731 mDecoder.releaseOutputBuffer(bufferIndex, false /* render */); 732 } else { 733 mDecoder.releaseOutputBuffer(bufferIndex, renderTimestampNs); 734 } 735 } 736 } 737 } 738 739 @Override onError(MediaCodec codec, MediaCodec.CodecException e)740 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 741 } 742 743 @Override onInputBufferAvailable(MediaCodec codec, int index)744 public void onInputBufferAvailable(MediaCodec codec, int index) { 745 synchronized (mConditionCallback) { 746 if (mExtractor == null || mExtractor.getSampleTrackIndex() == -1 747 || mSignaledEos || mDecoder != codec) { 748 return; 749 } 750 751 ByteBuffer buffer = codec.getInputBuffer(index); 752 int size = mExtractor.readSampleData(buffer, 0); 753 long timestampUs = mExtractor.getSampleTime(); 754 mExtractor.advance(); 755 mSignaledEos = mExtractor.getSampleTrackIndex() == -1 756 || timestampUs >= mLastBufferTimestampUs; 757 codec.queueInputBuffer( 758 index, 759 0, 760 size, 761 timestampUs, 762 mSignaledEos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); 763 } 764 } 765 766 @Override onOutputBufferAvailable( MediaCodec codec, int index, MediaCodec.BufferInfo info)767 public void onOutputBufferAvailable( 768 MediaCodec codec, int index, MediaCodec.BufferInfo info) { 769 synchronized (mConditionCallback) { 770 if (mEos || mDecoder != codec) { 771 return; 772 } 773 774 mEos = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; 775 776 if (info.size > 0) { 777 if (mIsAudio) { 778 ByteBuffer outputByteBuffer = codec.getOutputBuffer(index); 779 synchronized (mAudioBufferLock) { 780 mAudioBuffers.add(new AudioBuffer(outputByteBuffer, index)); 781 } 782 mMediaSync.queueAudio( 783 outputByteBuffer, 784 index, 785 info.presentationTimeUs); 786 if (mStartingAudioTimestampUs >= 0 787 && info.presentationTimeUs >= mStartingAudioTimestampUs) { 788 mMediaSyncTest.onTaggedAudioBufferIndex(this, index); 789 mStartingAudioTimestampUs = NO_TIMESTAMP; 790 } 791 } else { 792 codec.releaseOutputBuffer(index, info.presentationTimeUs * 1000); 793 } 794 } else { 795 codec.releaseOutputBuffer(index, false); 796 } 797 } 798 799 if (mEos) { 800 mMediaSyncTest.onEos(this); 801 } 802 } 803 804 @Override onOutputFormatChanged(MediaCodec codec, MediaFormat format)805 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 806 } 807 checkReturnedAudioBuffer(ByteBuffer byteBuffer, int bufferIndex)808 public void checkReturnedAudioBuffer(ByteBuffer byteBuffer, int bufferIndex) { 809 synchronized (mAudioBufferLock) { 810 AudioBuffer audioBuffer = mAudioBuffers.get(0); 811 if (audioBuffer.mByteBuffer != byteBuffer 812 || audioBuffer.mBufferIndex != bufferIndex) { 813 fail("returned buffer doesn't match what's sent"); 814 } 815 mAudioBuffers.remove(0); 816 } 817 } 818 } 819 } 820