1 // Copyright 2013 The Chromium Authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 package org.chromium.media; 6 7 import android.media.AudioFormat; 8 import android.media.AudioManager; 9 import android.media.AudioTrack; 10 import android.media.MediaCodec; 11 import android.media.MediaCodecInfo; 12 import android.media.MediaCodecList; 13 import android.media.MediaCrypto; 14 import android.media.MediaFormat; 15 import android.os.Build; 16 import android.os.Bundle; 17 import android.util.Log; 18 import android.view.Surface; 19 20 import org.chromium.base.CalledByNative; 21 import org.chromium.base.JNINamespace; 22 23 import java.nio.ByteBuffer; 24 import java.util.ArrayList; 25 import java.util.HashMap; 26 import java.util.Map; 27 28 /** 29 * A wrapper of the MediaCodec class to facilitate exception capturing and 30 * audio rendering. 31 */ 32 @JNINamespace("media") 33 class MediaCodecBridge { 34 private static final String TAG = "MediaCodecBridge"; 35 36 // Error code for MediaCodecBridge. Keep this value in sync with 37 // MediaCodecStatus in media_codec_bridge.h. 38 private static final int MEDIA_CODEC_OK = 0; 39 private static final int MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER = 1; 40 private static final int MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER = 2; 41 private static final int MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED = 3; 42 private static final int MEDIA_CODEC_OUTPUT_FORMAT_CHANGED = 4; 43 private static final int MEDIA_CODEC_INPUT_END_OF_STREAM = 5; 44 private static final int MEDIA_CODEC_OUTPUT_END_OF_STREAM = 6; 45 private static final int MEDIA_CODEC_NO_KEY = 7; 46 private static final int MEDIA_CODEC_STOPPED = 8; 47 private static final int MEDIA_CODEC_ERROR = 9; 48 49 // Codec direction. Keep this in sync with media_codec_bridge.h. 50 private static final int MEDIA_CODEC_DECODER = 0; 51 private static final int MEDIA_CODEC_ENCODER = 1; 52 53 // Max adaptive playback size to be supplied to the decoder. 54 private static final int MAX_ADAPTIVE_PLAYBACK_WIDTH = 1920; 55 private static final int MAX_ADAPTIVE_PLAYBACK_HEIGHT = 1080; 56 57 // After a flush(), dequeueOutputBuffer() can often produce empty presentation timestamps 58 // for several frames. As a result, the player may find that the time does not increase 59 // after decoding a frame. To detect this, we check whether the presentation timestamp from 60 // dequeueOutputBuffer() is larger than input_timestamp - MAX_PRESENTATION_TIMESTAMP_SHIFT_US 61 // after a flush. And we set the presentation timestamp from dequeueOutputBuffer() to be 62 // non-decreasing for the remaining frames. 63 private static final long MAX_PRESENTATION_TIMESTAMP_SHIFT_US = 100000; 64 65 private ByteBuffer[] mInputBuffers; 66 private ByteBuffer[] mOutputBuffers; 67 68 private MediaCodec mMediaCodec; 69 private AudioTrack mAudioTrack; 70 private boolean mFlushed; 71 private long mLastPresentationTimeUs; 72 private String mMime; 73 private boolean mAdaptivePlaybackSupported; 74 75 private static class DequeueInputResult { 76 private final int mStatus; 77 private final int mIndex; 78 DequeueInputResult(int status, int index)79 private DequeueInputResult(int status, int index) { 80 mStatus = status; 81 mIndex = index; 82 } 83 84 @CalledByNative("DequeueInputResult") status()85 private int status() { return mStatus; } 86 87 @CalledByNative("DequeueInputResult") index()88 private int index() { return mIndex; } 89 } 90 91 /** 92 * This class represents supported android codec information. 93 */ 94 private static class CodecInfo { 95 private final String mCodecType; // e.g. "video/x-vnd.on2.vp8". 96 private final String mCodecName; // e.g. "OMX.google.vp8.decoder". 97 private final int mDirection; 98 CodecInfo(String codecType, String codecName, int direction)99 private CodecInfo(String codecType, String codecName, 100 int direction) { 101 mCodecType = codecType; 102 mCodecName = codecName; 103 mDirection = direction; 104 } 105 106 @CalledByNative("CodecInfo") codecType()107 private String codecType() { return mCodecType; } 108 109 @CalledByNative("CodecInfo") codecName()110 private String codecName() { return mCodecName; } 111 112 @CalledByNative("CodecInfo") direction()113 private int direction() { return mDirection; } 114 } 115 116 private static class DequeueOutputResult { 117 private final int mStatus; 118 private final int mIndex; 119 private final int mFlags; 120 private final int mOffset; 121 private final long mPresentationTimeMicroseconds; 122 private final int mNumBytes; 123 DequeueOutputResult(int status, int index, int flags, int offset, long presentationTimeMicroseconds, int numBytes)124 private DequeueOutputResult(int status, int index, int flags, int offset, 125 long presentationTimeMicroseconds, int numBytes) { 126 mStatus = status; 127 mIndex = index; 128 mFlags = flags; 129 mOffset = offset; 130 mPresentationTimeMicroseconds = presentationTimeMicroseconds; 131 mNumBytes = numBytes; 132 } 133 134 @CalledByNative("DequeueOutputResult") status()135 private int status() { return mStatus; } 136 137 @CalledByNative("DequeueOutputResult") index()138 private int index() { return mIndex; } 139 140 @CalledByNative("DequeueOutputResult") flags()141 private int flags() { return mFlags; } 142 143 @CalledByNative("DequeueOutputResult") offset()144 private int offset() { return mOffset; } 145 146 @CalledByNative("DequeueOutputResult") presentationTimeMicroseconds()147 private long presentationTimeMicroseconds() { return mPresentationTimeMicroseconds; } 148 149 @CalledByNative("DequeueOutputResult") numBytes()150 private int numBytes() { return mNumBytes; } 151 } 152 153 /** 154 * Get a list of supported android codec mimes. 155 */ 156 @CalledByNative getCodecsInfo()157 private static CodecInfo[] getCodecsInfo() { 158 // Return the first (highest-priority) codec for each MIME type. 159 Map<String, CodecInfo> encoderInfoMap = new HashMap<String, CodecInfo>(); 160 Map<String, CodecInfo> decoderInfoMap = new HashMap<String, CodecInfo>(); 161 int count = MediaCodecList.getCodecCount(); 162 for (int i = 0; i < count; ++i) { 163 MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i); 164 int direction = 165 info.isEncoder() ? MEDIA_CODEC_ENCODER : MEDIA_CODEC_DECODER; 166 String codecString = info.getName(); 167 String[] supportedTypes = info.getSupportedTypes(); 168 for (int j = 0; j < supportedTypes.length; ++j) { 169 Map<String, CodecInfo> map = info.isEncoder() ? encoderInfoMap : decoderInfoMap; 170 if (!map.containsKey(supportedTypes[j])) { 171 map.put(supportedTypes[j], new CodecInfo( 172 supportedTypes[j], codecString, direction)); 173 } 174 } 175 } 176 ArrayList<CodecInfo> codecInfos = new ArrayList<CodecInfo>( 177 decoderInfoMap.size() + encoderInfoMap.size()); 178 codecInfos.addAll(encoderInfoMap.values()); 179 codecInfos.addAll(decoderInfoMap.values()); 180 return codecInfos.toArray(new CodecInfo[codecInfos.size()]); 181 } 182 getDecoderNameForMime(String mime)183 private static String getDecoderNameForMime(String mime) { 184 int count = MediaCodecList.getCodecCount(); 185 for (int i = 0; i < count; ++i) { 186 MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i); 187 if (info.isEncoder()) { 188 continue; 189 } 190 191 String[] supportedTypes = info.getSupportedTypes(); 192 for (int j = 0; j < supportedTypes.length; ++j) { 193 if (supportedTypes[j].equalsIgnoreCase(mime)) { 194 return info.getName(); 195 } 196 } 197 } 198 199 return null; 200 } 201 MediaCodecBridge( MediaCodec mediaCodec, String mime, boolean adaptivePlaybackSupported)202 private MediaCodecBridge( 203 MediaCodec mediaCodec, String mime, boolean adaptivePlaybackSupported) { 204 assert mediaCodec != null; 205 mMediaCodec = mediaCodec; 206 mMime = mime; 207 mLastPresentationTimeUs = 0; 208 mFlushed = true; 209 mAdaptivePlaybackSupported = adaptivePlaybackSupported; 210 } 211 212 @CalledByNative create(String mime, boolean isSecure, int direction)213 private static MediaCodecBridge create(String mime, boolean isSecure, int direction) { 214 // Creation of ".secure" codecs sometimes crash instead of throwing exceptions 215 // on pre-JBMR2 devices. 216 if (isSecure && Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR2) { 217 return null; 218 } 219 MediaCodec mediaCodec = null; 220 boolean adaptivePlaybackSupported = false; 221 try { 222 // |isSecure| only applies to video decoders. 223 if (mime.startsWith("video") && isSecure && direction == MEDIA_CODEC_DECODER) { 224 String decoderName = getDecoderNameForMime(mime); 225 if (decoderName == null) { 226 return null; 227 } 228 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { 229 // To work around an issue that we cannot get the codec info from the secure 230 // decoder, create an insecure decoder first so that we can query its codec 231 // info. http://b/15587335. 232 MediaCodec insecureCodec = MediaCodec.createByCodecName(decoderName); 233 adaptivePlaybackSupported = codecSupportsAdaptivePlayback(insecureCodec, mime); 234 insecureCodec.release(); 235 } 236 mediaCodec = MediaCodec.createByCodecName(decoderName + ".secure"); 237 } else { 238 if (direction == MEDIA_CODEC_ENCODER) { 239 mediaCodec = MediaCodec.createEncoderByType(mime); 240 } else { 241 mediaCodec = MediaCodec.createDecoderByType(mime); 242 adaptivePlaybackSupported = codecSupportsAdaptivePlayback(mediaCodec, mime); 243 } 244 } 245 } catch (Exception e) { 246 Log.e(TAG, "Failed to create MediaCodec: " + mime + ", isSecure: " 247 + isSecure + ", direction: " + direction, e); 248 } 249 250 if (mediaCodec == null) { 251 return null; 252 } 253 return new MediaCodecBridge(mediaCodec, mime, adaptivePlaybackSupported); 254 } 255 256 @CalledByNative release()257 private void release() { 258 try { 259 mMediaCodec.release(); 260 } catch (IllegalStateException e) { 261 // The MediaCodec is stuck in a wrong state, possibly due to losing 262 // the surface. 263 Log.e(TAG, "Cannot release media codec", e); 264 } 265 mMediaCodec = null; 266 if (mAudioTrack != null) { 267 mAudioTrack.release(); 268 } 269 } 270 271 @CalledByNative start()272 private boolean start() { 273 try { 274 mMediaCodec.start(); 275 mInputBuffers = mMediaCodec.getInputBuffers(); 276 } catch (IllegalStateException e) { 277 Log.e(TAG, "Cannot start the media codec", e); 278 return false; 279 } 280 return true; 281 } 282 283 @CalledByNative dequeueInputBuffer(long timeoutUs)284 private DequeueInputResult dequeueInputBuffer(long timeoutUs) { 285 int status = MEDIA_CODEC_ERROR; 286 int index = -1; 287 try { 288 int indexOrStatus = mMediaCodec.dequeueInputBuffer(timeoutUs); 289 if (indexOrStatus >= 0) { // index! 290 status = MEDIA_CODEC_OK; 291 index = indexOrStatus; 292 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 293 Log.e(TAG, "dequeueInputBuffer: MediaCodec.INFO_TRY_AGAIN_LATER"); 294 status = MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER; 295 } else { 296 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus); 297 assert false; 298 } 299 } catch (Exception e) { 300 Log.e(TAG, "Failed to dequeue input buffer", e); 301 } 302 return new DequeueInputResult(status, index); 303 } 304 305 @CalledByNative flush()306 private int flush() { 307 try { 308 mFlushed = true; 309 if (mAudioTrack != null) { 310 // Need to call pause() here, or otherwise flush() is a no-op. 311 mAudioTrack.pause(); 312 mAudioTrack.flush(); 313 } 314 mMediaCodec.flush(); 315 } catch (IllegalStateException e) { 316 Log.e(TAG, "Failed to flush MediaCodec", e); 317 return MEDIA_CODEC_ERROR; 318 } 319 return MEDIA_CODEC_OK; 320 } 321 322 @CalledByNative stop()323 private void stop() { 324 mMediaCodec.stop(); 325 if (mAudioTrack != null) { 326 mAudioTrack.pause(); 327 } 328 } 329 330 @CalledByNative getOutputHeight()331 private int getOutputHeight() { 332 return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_HEIGHT); 333 } 334 335 @CalledByNative getOutputWidth()336 private int getOutputWidth() { 337 return mMediaCodec.getOutputFormat().getInteger(MediaFormat.KEY_WIDTH); 338 } 339 340 @CalledByNative getInputBuffer(int index)341 private ByteBuffer getInputBuffer(int index) { 342 return mInputBuffers[index]; 343 } 344 345 @CalledByNative getOutputBuffer(int index)346 private ByteBuffer getOutputBuffer(int index) { 347 return mOutputBuffers[index]; 348 } 349 350 @CalledByNative getInputBuffersCount()351 private int getInputBuffersCount() { 352 return mInputBuffers.length; 353 } 354 355 @CalledByNative getOutputBuffersCount()356 private int getOutputBuffersCount() { 357 return mOutputBuffers != null ? mOutputBuffers.length : -1; 358 } 359 360 @CalledByNative getOutputBuffersCapacity()361 private int getOutputBuffersCapacity() { 362 return mOutputBuffers != null ? mOutputBuffers[0].capacity() : -1; 363 } 364 365 @CalledByNative getOutputBuffers()366 private boolean getOutputBuffers() { 367 try { 368 mOutputBuffers = mMediaCodec.getOutputBuffers(); 369 } catch (IllegalStateException e) { 370 Log.e(TAG, "Cannot get output buffers", e); 371 return false; 372 } 373 return true; 374 } 375 376 @CalledByNative queueInputBuffer( int index, int offset, int size, long presentationTimeUs, int flags)377 private int queueInputBuffer( 378 int index, int offset, int size, long presentationTimeUs, int flags) { 379 resetLastPresentationTimeIfNeeded(presentationTimeUs); 380 try { 381 mMediaCodec.queueInputBuffer(index, offset, size, presentationTimeUs, flags); 382 } catch (Exception e) { 383 Log.e(TAG, "Failed to queue input buffer", e); 384 return MEDIA_CODEC_ERROR; 385 } 386 return MEDIA_CODEC_OK; 387 } 388 389 @CalledByNative setVideoBitrate(int bps)390 private void setVideoBitrate(int bps) { 391 Bundle b = new Bundle(); 392 b.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bps); 393 mMediaCodec.setParameters(b); 394 } 395 396 @CalledByNative requestKeyFrameSoon()397 private void requestKeyFrameSoon() { 398 Bundle b = new Bundle(); 399 b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0); 400 mMediaCodec.setParameters(b); 401 } 402 403 @CalledByNative queueSecureInputBuffer( int index, int offset, byte[] iv, byte[] keyId, int[] numBytesOfClearData, int[] numBytesOfEncryptedData, int numSubSamples, long presentationTimeUs)404 private int queueSecureInputBuffer( 405 int index, int offset, byte[] iv, byte[] keyId, int[] numBytesOfClearData, 406 int[] numBytesOfEncryptedData, int numSubSamples, long presentationTimeUs) { 407 resetLastPresentationTimeIfNeeded(presentationTimeUs); 408 try { 409 MediaCodec.CryptoInfo cryptoInfo = new MediaCodec.CryptoInfo(); 410 cryptoInfo.set(numSubSamples, numBytesOfClearData, numBytesOfEncryptedData, 411 keyId, iv, MediaCodec.CRYPTO_MODE_AES_CTR); 412 mMediaCodec.queueSecureInputBuffer(index, offset, cryptoInfo, presentationTimeUs, 0); 413 } catch (MediaCodec.CryptoException e) { 414 Log.e(TAG, "Failed to queue secure input buffer", e); 415 if (e.getErrorCode() == MediaCodec.CryptoException.ERROR_NO_KEY) { 416 Log.e(TAG, "MediaCodec.CryptoException.ERROR_NO_KEY"); 417 return MEDIA_CODEC_NO_KEY; 418 } 419 Log.e(TAG, "MediaCodec.CryptoException with error code " + e.getErrorCode()); 420 return MEDIA_CODEC_ERROR; 421 } catch (IllegalStateException e) { 422 Log.e(TAG, "Failed to queue secure input buffer", e); 423 return MEDIA_CODEC_ERROR; 424 } 425 return MEDIA_CODEC_OK; 426 } 427 428 @CalledByNative releaseOutputBuffer(int index, boolean render)429 private void releaseOutputBuffer(int index, boolean render) { 430 try { 431 mMediaCodec.releaseOutputBuffer(index, render); 432 } catch (IllegalStateException e) { 433 // TODO(qinmin): May need to report the error to the caller. crbug.com/356498. 434 Log.e(TAG, "Failed to release output buffer", e); 435 } 436 } 437 438 @CalledByNative dequeueOutputBuffer(long timeoutUs)439 private DequeueOutputResult dequeueOutputBuffer(long timeoutUs) { 440 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 441 int status = MEDIA_CODEC_ERROR; 442 int index = -1; 443 try { 444 int indexOrStatus = mMediaCodec.dequeueOutputBuffer(info, timeoutUs); 445 if (info.presentationTimeUs < mLastPresentationTimeUs) { 446 // TODO(qinmin): return a special code through DequeueOutputResult 447 // to notify the native code the the frame has a wrong presentation 448 // timestamp and should be skipped. 449 info.presentationTimeUs = mLastPresentationTimeUs; 450 } 451 mLastPresentationTimeUs = info.presentationTimeUs; 452 453 if (indexOrStatus >= 0) { // index! 454 status = MEDIA_CODEC_OK; 455 index = indexOrStatus; 456 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 457 status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED; 458 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 459 status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED; 460 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 461 status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER; 462 } else { 463 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus); 464 assert false; 465 } 466 } catch (IllegalStateException e) { 467 Log.e(TAG, "Failed to dequeue output buffer", e); 468 } 469 470 return new DequeueOutputResult( 471 status, index, info.flags, info.offset, info.presentationTimeUs, info.size); 472 } 473 474 @CalledByNative configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto, int flags)475 private boolean configureVideo(MediaFormat format, Surface surface, MediaCrypto crypto, 476 int flags) { 477 try { 478 if (mAdaptivePlaybackSupported) { 479 format.setInteger(MediaFormat.KEY_MAX_WIDTH, MAX_ADAPTIVE_PLAYBACK_WIDTH); 480 format.setInteger(MediaFormat.KEY_MAX_HEIGHT, MAX_ADAPTIVE_PLAYBACK_HEIGHT); 481 } 482 mMediaCodec.configure(format, surface, crypto, flags); 483 return true; 484 } catch (IllegalStateException e) { 485 Log.e(TAG, "Cannot configure the video codec", e); 486 } 487 return false; 488 } 489 490 @CalledByNative createAudioFormat(String mime, int sampleRate, int channelCount)491 private static MediaFormat createAudioFormat(String mime, int sampleRate, int channelCount) { 492 return MediaFormat.createAudioFormat(mime, sampleRate, channelCount); 493 } 494 495 @CalledByNative createVideoDecoderFormat(String mime, int width, int height)496 private static MediaFormat createVideoDecoderFormat(String mime, int width, int height) { 497 return MediaFormat.createVideoFormat(mime, width, height); 498 } 499 500 @CalledByNative createVideoEncoderFormat(String mime, int width, int height, int bitRate, int frameRate, int iFrameInterval, int colorFormat)501 private static MediaFormat createVideoEncoderFormat(String mime, int width, int height, 502 int bitRate, int frameRate, int iFrameInterval, int colorFormat) { 503 MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); 504 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 505 format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate); 506 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, iFrameInterval); 507 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); 508 return format; 509 } 510 511 @CalledByNative isAdaptivePlaybackSupported(int width, int height)512 private boolean isAdaptivePlaybackSupported(int width, int height) { 513 if (!mAdaptivePlaybackSupported) 514 return false; 515 return width <= MAX_ADAPTIVE_PLAYBACK_WIDTH && height <= MAX_ADAPTIVE_PLAYBACK_HEIGHT; 516 } 517 codecSupportsAdaptivePlayback(MediaCodec mediaCodec, String mime)518 private static boolean codecSupportsAdaptivePlayback(MediaCodec mediaCodec, String mime) { 519 if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT || mediaCodec == null) { 520 return false; 521 } 522 try { 523 MediaCodecInfo info = mediaCodec.getCodecInfo(); 524 if (info.isEncoder()) { 525 return false; 526 } 527 MediaCodecInfo.CodecCapabilities capabilities = info.getCapabilitiesForType(mime); 528 return (capabilities != null) && capabilities.isFeatureSupported( 529 MediaCodecInfo.CodecCapabilities.FEATURE_AdaptivePlayback); 530 } catch (IllegalArgumentException e) { 531 Log.e(TAG, "Cannot retrieve codec information", e); 532 } 533 return false; 534 } 535 536 @CalledByNative setCodecSpecificData(MediaFormat format, int index, byte[] bytes)537 private static void setCodecSpecificData(MediaFormat format, int index, byte[] bytes) { 538 String name = null; 539 if (index == 0) { 540 name = "csd-0"; 541 } else if (index == 1) { 542 name = "csd-1"; 543 } 544 if (name != null) { 545 format.setByteBuffer(name, ByteBuffer.wrap(bytes)); 546 } 547 } 548 549 @CalledByNative setFrameHasADTSHeader(MediaFormat format)550 private static void setFrameHasADTSHeader(MediaFormat format) { 551 format.setInteger(MediaFormat.KEY_IS_ADTS, 1); 552 } 553 554 @CalledByNative configureAudio(MediaFormat format, MediaCrypto crypto, int flags, boolean playAudio)555 private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int flags, 556 boolean playAudio) { 557 try { 558 mMediaCodec.configure(format, null, crypto, flags); 559 if (playAudio) { 560 int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); 561 int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); 562 int channelConfig = getAudioFormat(channelCount); 563 // Using 16bit PCM for output. Keep this value in sync with 564 // kBytesPerAudioOutputSample in media_codec_bridge.cc. 565 int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, 566 AudioFormat.ENCODING_PCM_16BIT); 567 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, 568 AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_STREAM); 569 if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) { 570 mAudioTrack = null; 571 return false; 572 } 573 } 574 return true; 575 } catch (IllegalStateException e) { 576 Log.e(TAG, "Cannot configure the audio codec", e); 577 } 578 return false; 579 } 580 581 /** 582 * Play the audio buffer that is passed in. 583 * 584 * @param buf Audio buffer to be rendered. 585 * @return The number of frames that have already been consumed by the 586 * hardware. This number resets to 0 after each flush call. 587 */ 588 @CalledByNative playOutputBuffer(byte[] buf)589 private long playOutputBuffer(byte[] buf) { 590 if (mAudioTrack == null) { 591 return 0; 592 } 593 594 if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) { 595 mAudioTrack.play(); 596 } 597 int size = mAudioTrack.write(buf, 0, buf.length); 598 if (buf.length != size) { 599 Log.i(TAG, "Failed to send all data to audio output, expected size: " + 600 buf.length + ", actual size: " + size); 601 } 602 // TODO(qinmin): Returning the head position allows us to estimate 603 // the current presentation time in native code. However, it is 604 // better to use AudioTrack.getCurrentTimestamp() to get the last 605 // known time when a frame is played. However, we will need to 606 // convert the java nano time to C++ timestamp. 607 // If the stream runs too long, getPlaybackHeadPosition() could 608 // overflow. AudioTimestampHelper in MediaSourcePlayer has the same 609 // issue. See http://crbug.com/358801. 610 return mAudioTrack.getPlaybackHeadPosition(); 611 } 612 613 @CalledByNative setVolume(double volume)614 private void setVolume(double volume) { 615 if (mAudioTrack != null) { 616 mAudioTrack.setStereoVolume((float) volume, (float) volume); 617 } 618 } 619 resetLastPresentationTimeIfNeeded(long presentationTimeUs)620 private void resetLastPresentationTimeIfNeeded(long presentationTimeUs) { 621 if (mFlushed) { 622 mLastPresentationTimeUs = 623 Math.max(presentationTimeUs - MAX_PRESENTATION_TIMESTAMP_SHIFT_US, 0); 624 mFlushed = false; 625 } 626 } 627 getAudioFormat(int channelCount)628 private int getAudioFormat(int channelCount) { 629 switch (channelCount) { 630 case 1: 631 return AudioFormat.CHANNEL_OUT_MONO; 632 case 2: 633 return AudioFormat.CHANNEL_OUT_STEREO; 634 case 4: 635 return AudioFormat.CHANNEL_OUT_QUAD; 636 case 6: 637 return AudioFormat.CHANNEL_OUT_5POINT1; 638 case 8: 639 return AudioFormat.CHANNEL_OUT_7POINT1; 640 default: 641 return AudioFormat.CHANNEL_OUT_DEFAULT; 642 } 643 } 644 } 645