• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package android.media.cts;
17 
18 import android.media.AudioTimestamp;
19 import android.media.AudioTrack;
20 import android.media.MediaCodec;
21 import android.media.MediaExtractor;
22 import android.media.MediaFormat;
23 import android.os.Build;
24 import android.os.Bundle;
25 import android.os.Handler;
26 import android.os.Looper;
27 import android.util.Log;
28 import android.view.Surface;
29 
30 import androidx.test.filters.SdkSuppress;
31 
32 import com.android.compatibility.common.util.ApiLevelUtil;
33 import com.android.compatibility.common.util.MediaUtils;
34 
35 import com.google.common.collect.ImmutableList;
36 
37 import java.nio.ByteBuffer;
38 import java.util.ArrayList;
39 import java.util.LinkedList;
40 
41 /**
42  * Class for directly managing both audio and video playback by
43  * using {@link MediaCodec} and {@link AudioTrack}.
44  */
45 public class CodecState {
46     private static final String TAG = CodecState.class.getSimpleName();
47 
48     public static final int UNINITIALIZED_TIMESTAMP = Integer.MIN_VALUE;
49 
50     private boolean mSawInputEOS;
51     private volatile boolean mSawOutputEOS;
52     private boolean mLimitQueueDepth;
53     private boolean mIsTunneled;
54     private boolean mIsAudio;
55     private int mAudioSessionId;
56     private ByteBuffer[] mCodecInputBuffers;
57     private ByteBuffer[] mCodecOutputBuffers;
58     private int mTrackIndex;
59     private int mAvailableInputBufferIndex;
60     private LinkedList<Integer> mAvailableOutputBufferIndices;
61     private LinkedList<MediaCodec.BufferInfo> mAvailableOutputBufferInfos;
62 
63     /**
64      * The media timestamp of the latest frame decoded by this codec.
65      *
66      * Note: in tunnel mode, this coincides with the latest rendered frame.
67      */
68     private volatile long mDecodedFramePresentationTimeUs;
69     private volatile long mRenderedVideoFramePresentationTimeUs;
70     private volatile long mRenderedVideoFrameSystemTimeNano;
71     private long mFirstSampleTimeUs;
72     private long mPlaybackStartTimeUs;
73     private long mLastPresentTimeUs;
74     private MediaCodec mCodec;
75     private MediaTimeProvider mMediaTimeProvider;
76     private MediaExtractor mExtractor;
77     private MediaFormat mFormat;
78     private MediaFormat mOutputFormat;
79     private NonBlockingAudioTrack mAudioTrack;
80     private volatile OnFrameRenderedListener mOnFrameRenderedListener;
81     /** A list of reported rendered video frames' timestamps. */
82     private ArrayList<Long> mRenderedVideoFrameTimestampList;
83     private ArrayList<Long> mRenderedVideoFrameSystemTimeList;
84     private boolean mIsFirstTunnelFrameReady;
85     private volatile OnFirstTunnelFrameReadyListener mOnFirstTunnelFrameReadyListener;
86     /** If true, starves the underlying {@link MediaCodec} to simulate an underrun. */
87     private boolean mShouldStopDrainingOutputBuffers;
88 
89     private static boolean mIsAtLeastS = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.S);
90 
91     /** If true the video/audio will start from the beginning when it reaches the end. */
92     private boolean mLoopEnabled = false;
93 
94     /**
95      * Manages audio and video playback using MediaCodec and AudioTrack.
96      */
CodecState( MediaTimeProvider mediaTimeProvider, MediaExtractor extractor, int trackIndex, MediaFormat format, MediaCodec codec, boolean limitQueueDepth, boolean tunneled, int audioSessionId)97     public CodecState(
98             MediaTimeProvider mediaTimeProvider,
99             MediaExtractor extractor,
100             int trackIndex,
101             MediaFormat format,
102             MediaCodec codec,
103             boolean limitQueueDepth,
104             boolean tunneled,
105             int audioSessionId) {
106         mMediaTimeProvider = mediaTimeProvider;
107         mExtractor = extractor;
108         mTrackIndex = trackIndex;
109         mFormat = format;
110         mSawInputEOS = mSawOutputEOS = false;
111         mLimitQueueDepth = limitQueueDepth;
112         mIsTunneled = tunneled;
113         mAudioSessionId = audioSessionId;
114         mFirstSampleTimeUs = -1;
115         mPlaybackStartTimeUs = 0;
116         mLastPresentTimeUs = 0;
117 
118         mCodec = codec;
119 
120         mAvailableInputBufferIndex = -1;
121         mAvailableOutputBufferIndices = new LinkedList<Integer>();
122         mAvailableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
123         mRenderedVideoFrameTimestampList = new ArrayList<Long>();
124         mRenderedVideoFrameSystemTimeList = new ArrayList<Long>();
125 
126         mDecodedFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
127         mRenderedVideoFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
128         mRenderedVideoFrameSystemTimeNano = UNINITIALIZED_TIMESTAMP;
129 
130         mIsFirstTunnelFrameReady = false;
131         mShouldStopDrainingOutputBuffers = false;
132 
133         String mime = mFormat.getString(MediaFormat.KEY_MIME);
134         Log.d(TAG, "CodecState::CodecState " + mime);
135         mIsAudio = mime.startsWith("audio/");
136 
137         setFrameListeners(mCodec);
138     }
139 
release()140     public void release() {
141         mCodec.stop();
142         mCodecInputBuffers = null;
143         mCodecOutputBuffers = null;
144         mOutputFormat = null;
145 
146         mAvailableOutputBufferIndices.clear();
147         mAvailableOutputBufferInfos.clear();
148 
149         mAvailableInputBufferIndex = -1;
150         mAvailableOutputBufferIndices = null;
151         mAvailableOutputBufferInfos = null;
152 
153         releaseFrameListeners();
154 
155         mCodec.release();
156         mCodec = null;
157 
158         if (mAudioTrack != null) {
159             mAudioTrack.release();
160             mAudioTrack = null;
161         }
162     }
163 
startCodec()164     public void startCodec() {
165         mCodec.start();
166         mCodecInputBuffers = mCodec.getInputBuffers();
167         if (!mIsTunneled || mIsAudio) {
168             mCodecOutputBuffers = mCodec.getOutputBuffers();
169         }
170     }
171 
play()172     public void play() {
173         if (mAudioTrack != null) {
174             mAudioTrack.play();
175         }
176     }
177 
pause()178     public void pause() {
179         if (mAudioTrack != null) {
180             mAudioTrack.pause();
181         }
182     }
183 
184     /**
185      * Returns the media timestamp of the latest decoded sample/frame.
186      *
187      * TODO(b/202710709): Disambiguate getCurrentPosition's meaning
188      */
getCurrentPositionUs()189     public long getCurrentPositionUs() {
190         // Use decoded frame time when available, otherwise default to render time (typically, in
191         // tunnel mode).
192         if (mDecodedFramePresentationTimeUs != UNINITIALIZED_TIMESTAMP) {
193             return mDecodedFramePresentationTimeUs;
194         } else {
195             return mRenderedVideoFramePresentationTimeUs;
196         }
197     }
198 
199     /** Returns the system time of the latest rendered video frame. */
getRenderedVideoSystemTimeNano()200     public long getRenderedVideoSystemTimeNano() {
201         return mRenderedVideoFrameSystemTimeNano;
202     }
203 
flush()204     public void flush() {
205         if (!mIsTunneled || mIsAudio) {
206             mAvailableOutputBufferIndices.clear();
207             mAvailableOutputBufferInfos.clear();
208         }
209 
210         mAvailableInputBufferIndex = -1;
211         mSawInputEOS = false;
212         mSawOutputEOS = false;
213 
214         if (mAudioTrack != null
215                 && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
216             mAudioTrack.flush();
217         }
218 
219         mCodec.flush();
220         mDecodedFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
221         mRenderedVideoFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
222         mRenderedVideoFrameSystemTimeNano = UNINITIALIZED_TIMESTAMP;
223         mRenderedVideoFrameTimestampList = new ArrayList<Long>();
224         mRenderedVideoFrameSystemTimeList = new ArrayList<Long>();
225         mIsFirstTunnelFrameReady = false;
226     }
227 
isEnded()228     public boolean isEnded() {
229         return mSawInputEOS && mSawOutputEOS;
230     }
231 
232     /** @see #doSomeWork(Boolean) */
doSomeWork()233     public Long doSomeWork() {
234         return doSomeWork(false /* mustWait */);
235     }
236 
237     /**
238      * {@code doSomeWork} is the worker function that does all buffer handling and decoding works.
239      * It first reads data from {@link MediaExtractor} and pushes it into {@link MediaCodec}; it
240      * then dequeues buffer from {@link MediaCodec}, consumes it and pushes back to its own buffer
241      * queue for next round reading data from {@link MediaExtractor}.
242      *
243      * @param boolean  Whether to block on input buffer retrieval
244      *
245      * @return timestamp of the queued frame, if any.
246      */
doSomeWork(boolean mustWait)247     public Long doSomeWork(boolean mustWait) {
248         // Extract input data, if relevant
249         Long sampleTime = null;
250         if (mAvailableInputBufferIndex == -1) {
251             int indexInput = mCodec.dequeueInputBuffer(mustWait ? -1 : 0 /* timeoutUs */);
252             if (indexInput != MediaCodec.INFO_TRY_AGAIN_LATER) {
253                 mAvailableInputBufferIndex = indexInput;
254             }
255         }
256         if (mAvailableInputBufferIndex != -1) {
257             sampleTime = feedInputBuffer(mAvailableInputBufferIndex);
258             if (sampleTime != null) {
259                 mAvailableInputBufferIndex = -1;
260             }
261         }
262 
263         // Queue output data, if relevant
264         if (mIsAudio || !mIsTunneled) {
265             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
266             int indexOutput = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
267 
268             if (indexOutput == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
269                 mOutputFormat = mCodec.getOutputFormat();
270                 onOutputFormatChanged();
271             } else if (indexOutput == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
272                 mCodecOutputBuffers = mCodec.getOutputBuffers();
273             } else if (indexOutput != MediaCodec.INFO_TRY_AGAIN_LATER) {
274                 mAvailableOutputBufferIndices.add(indexOutput);
275                 mAvailableOutputBufferInfos.add(info);
276             }
277 
278             while (drainOutputBuffer()) {
279             }
280         }
281 
282         return sampleTime;
283     }
284 
setLoopEnabled(boolean enabled)285     public void setLoopEnabled(boolean enabled) {
286         mLoopEnabled = enabled;
287     }
288 
289     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
setFrameListeners(MediaCodec codec)290     private void setFrameListeners(MediaCodec codec) {
291         if (!mIsAudio) {
292             // Setup frame rendered callback for video codecs
293             mOnFrameRenderedListener = new OnFrameRenderedListener();
294             codec.setOnFrameRenderedListener(mOnFrameRenderedListener,
295                     new Handler(Looper.getMainLooper()));
296 
297             if (mIsTunneled) {
298                 mOnFirstTunnelFrameReadyListener = new OnFirstTunnelFrameReadyListener();
299                 codec.setOnFirstTunnelFrameReadyListener(new Handler(Looper.getMainLooper()),
300                         mOnFirstTunnelFrameReadyListener);
301             }
302         }
303     }
304 
305     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
releaseFrameListeners()306     private void releaseFrameListeners() {
307         if (mOnFrameRenderedListener != null) {
308             mCodec.setOnFrameRenderedListener(null, null);
309             mOnFrameRenderedListener = null;
310         }
311         if (mOnFirstTunnelFrameReadyListener != null) {
312             mCodec.setOnFirstTunnelFrameReadyListener(null, null);
313             mOnFirstTunnelFrameReadyListener = null;
314         }
315     }
316 
317     /**
318      * Extracts some data from the configured MediaExtractor and feeds it to the configured
319      * MediaCodec.
320      *
321      * Returns the timestamp of the queued buffer, if any.
322      * Returns null once all data has been extracted and queued.
323      */
feedInputBuffer(int inputBufferIndex)324     private Long feedInputBuffer(int inputBufferIndex)
325             throws MediaCodec.CryptoException, IllegalStateException {
326         if (mSawInputEOS || inputBufferIndex == -1) {
327             return null;
328         }
329 
330         // stalls read if audio queue is larger than 2MB full so we will not occupy too much heap
331         if (mLimitQueueDepth && mAudioTrack != null &&
332                 mAudioTrack.getNumBytesQueued() > 2 * 1024 * 1024) {
333             return null;
334         }
335 
336         ByteBuffer codecData = mCodecInputBuffers[inputBufferIndex];
337 
338         int trackIndex = mExtractor.getSampleTrackIndex();
339 
340         if (trackIndex == mTrackIndex) {
341             int sampleSize =
342                 mExtractor.readSampleData(codecData, 0 /* offset */);
343 
344             long sampleTime = mExtractor.getSampleTime();
345 
346             int sampleFlags = mExtractor.getSampleFlags();
347 
348             if (sampleSize <= 0) {
349                 Log.d(TAG, "sampleSize: " + sampleSize + " trackIndex:" + trackIndex +
350                         " sampleTime:" + sampleTime + " sampleFlags:" + sampleFlags);
351                 mSawInputEOS = true;
352                 return null;
353             }
354 
355             if (mIsTunneled) {
356                 if (mFirstSampleTimeUs == -1) {
357                     mFirstSampleTimeUs = sampleTime;
358                 }
359                 sampleTime -= mFirstSampleTimeUs;
360             }
361 
362             mLastPresentTimeUs = mPlaybackStartTimeUs + sampleTime;
363 
364             if ((sampleFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
365                 MediaCodec.CryptoInfo info = new MediaCodec.CryptoInfo();
366                 mExtractor.getSampleCryptoInfo(info);
367 
368                 mCodec.queueSecureInputBuffer(
369                         inputBufferIndex, 0 /* offset */, info, mLastPresentTimeUs, 0 /* flags */);
370             } else {
371                 mCodec.queueInputBuffer(
372                         inputBufferIndex, 0 /* offset */, sampleSize, mLastPresentTimeUs, 0 /* flags */);
373             }
374 
375             mExtractor.advance();
376             return mLastPresentTimeUs;
377         } else if (trackIndex < 0) {
378             Log.d(TAG, "saw input EOS on track " + mTrackIndex);
379 
380             if (mLoopEnabled) {
381                 Log.d(TAG, "looping from the beginning");
382                 mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
383                 mPlaybackStartTimeUs = mLastPresentTimeUs;
384                 return null;
385             }
386 
387             mSawInputEOS = true;
388             mCodec.queueInputBuffer(
389                     inputBufferIndex, 0 /* offset */, 0 /* sampleSize */,
390                     0 /* sampleTime */, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
391         }
392 
393         return null;
394     }
395 
onOutputFormatChanged()396     private void onOutputFormatChanged() {
397         String mime = mOutputFormat.getString(MediaFormat.KEY_MIME);
398         // b/9250789
399         Log.d(TAG, "CodecState::onOutputFormatChanged " + mime);
400 
401         mIsAudio = false;
402         if (mime.startsWith("audio/")) {
403             mIsAudio = true;
404             int sampleRate =
405                 mOutputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
406 
407             int channelCount =
408                 mOutputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
409 
410             Log.d(TAG, "CodecState::onOutputFormatChanged Audio" +
411                     " sampleRate:" + sampleRate + " channels:" + channelCount);
412             // We do a check here after we receive data from MediaExtractor and before
413             // we pass them down to AudioTrack. If MediaExtractor works properly, this
414             // check is not necessary, however, in our tests, we found that there
415             // are a few cases where ch=0 and samplerate=0 were returned by MediaExtractor.
416             if (channelCount < 1 || channelCount > 8 ||
417                     sampleRate < 8000 || sampleRate > 128000) {
418                 return;
419             }
420             mAudioTrack = new NonBlockingAudioTrack(sampleRate, channelCount,
421                                     mIsTunneled, mAudioSessionId);
422             mAudioTrack.play();
423         }
424 
425         if (mime.startsWith("video/")) {
426             int width = mOutputFormat.getInteger(MediaFormat.KEY_WIDTH);
427             int height = mOutputFormat.getInteger(MediaFormat.KEY_HEIGHT);
428             Log.d(TAG, "CodecState::onOutputFormatChanged Video" +
429                     " width:" + width + " height:" + height);
430         }
431     }
432 
433     /** Returns true if more output data could be drained. */
drainOutputBuffer()434     private boolean drainOutputBuffer() {
435         if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()
436                 || mShouldStopDrainingOutputBuffers) {
437             return false;
438         }
439 
440         int index = mAvailableOutputBufferIndices.peekFirst().intValue();
441         MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
442 
443         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
444             Log.d(TAG, "saw output EOS on track " + mTrackIndex);
445 
446             mSawOutputEOS = true;
447 
448             // Do not stop audio track here. Video presentation may not finish
449             // yet, stopping the audio track now would result in getAudioTimeUs
450             // returning 0 and prevent video samples from being presented.
451             // We stop the audio track before the playback thread exits.
452             return false;
453         }
454 
455         if (mAudioTrack != null) {
456             ByteBuffer buffer = mCodecOutputBuffers[index];
457             byte[] audioArray = new byte[info.size];
458             buffer.get(audioArray);
459             buffer.clear();
460 
461             mAudioTrack.write(ByteBuffer.wrap(audioArray), info.size,
462                     info.presentationTimeUs * 1000);
463 
464             mCodec.releaseOutputBuffer(index, false /* render */);
465 
466             mDecodedFramePresentationTimeUs = info.presentationTimeUs;
467 
468             mAvailableOutputBufferIndices.removeFirst();
469             mAvailableOutputBufferInfos.removeFirst();
470             return true;
471         } else {
472             // video
473             boolean render;
474             long realTimeUs =
475                     mMediaTimeProvider.getRealTimeUsForMediaTime(info.presentationTimeUs);
476 
477             long nowUs = mMediaTimeProvider.getNowUs();
478 
479             long lateUs = nowUs - realTimeUs;
480 
481             if (lateUs < -45000) {
482                 // too early;
483                 return false;
484             } else if (lateUs > 30000) {
485                 Log.d(TAG, "video late by " + lateUs + " us.");
486                 render = false;
487             } else {
488                 render = true;
489                 mDecodedFramePresentationTimeUs = info.presentationTimeUs;
490             }
491 
492             mCodec.releaseOutputBuffer(index, render);
493 
494             mAvailableOutputBufferIndices.removeFirst();
495             mAvailableOutputBufferInfos.removeFirst();
496             return true;
497         }
498     }
499 
500     /**
501      * Callback called by {@link MediaCodec} when it is notified that a decoded video frame has been
502      * rendered on the attached {@link Surface}.
503     */
504     private class OnFrameRenderedListener implements MediaCodec.OnFrameRenderedListener {
505         private static final long TUNNELING_EOS_PRESENTATION_TIME_US = Long.MAX_VALUE;
506 
507         @Override
onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime)508         public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) {
509             if (this != mOnFrameRenderedListener) {
510                 return; // stale event
511             }
512             if (presentationTimeUs == TUNNELING_EOS_PRESENTATION_TIME_US) {
513                  mSawOutputEOS = true;
514             } else {
515                 mRenderedVideoFramePresentationTimeUs = presentationTimeUs;
516             }
517             mRenderedVideoFrameSystemTimeNano = nanoTime;
518             mRenderedVideoFrameTimestampList.add(presentationTimeUs);
519             mRenderedVideoFrameSystemTimeList.add(mRenderedVideoFrameSystemTimeNano);
520         }
521     }
522 
getAudioTimeUs()523     public long getAudioTimeUs() {
524         if (mAudioTrack == null) {
525             return 0;
526         }
527 
528         return mAudioTrack.getAudioTimeUs();
529     }
530 
531     /** Returns the presentation timestamp of the last rendered video frame. */
getVideoTimeUs()532     public long getVideoTimeUs() {
533         return mRenderedVideoFramePresentationTimeUs;
534     }
535 
536     /** Callback called in tunnel mode when video peek is ready */
537     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
538     private class OnFirstTunnelFrameReadyListener
539         implements MediaCodec.OnFirstTunnelFrameReadyListener {
540 
541         @Override
onFirstTunnelFrameReady(MediaCodec codec)542         public void onFirstTunnelFrameReady(MediaCodec codec) {
543             if (this != mOnFirstTunnelFrameReadyListener) {
544                 return; // stale event
545             }
546             mIsFirstTunnelFrameReady = true;
547         }
548     }
549 
550     /**
551      * If a video codec, returns the list of rendered frames' timestamps. Otherwise, returns an
552      * empty list.
553      */
getRenderedVideoFrameTimestampList()554     public ImmutableList<Long> getRenderedVideoFrameTimestampList() {
555         return ImmutableList.<Long>copyOf(mRenderedVideoFrameTimestampList);
556     }
557 
558     /**
559      * If a video codec, returns the list system times when frames were rendered. Otherwise, returns
560      * an empty list.
561      */
getRenderedVideoFrameSystemTimeList()562     public ImmutableList<Long> getRenderedVideoFrameSystemTimeList() {
563         return ImmutableList.<Long>copyOf(mRenderedVideoFrameSystemTimeList);
564     }
565 
566 
567     /** Process the attached {@link AudioTrack}, if any. */
processAudioTrack()568     public void processAudioTrack() {
569         if (mAudioTrack != null) {
570             mAudioTrack.process();
571         }
572     }
573 
getFramesWritten()574     public int getFramesWritten() {
575         if (mAudioTrack != null) {
576             return mAudioTrack.getFramesWritten();
577         }
578         return 0;
579     }
580 
getTimestamp()581     public AudioTimestamp getTimestamp() {
582         if (mAudioTrack == null) {
583             return null;
584         }
585 
586         return mAudioTrack.getTimestamp();
587     }
588 
589     /** Stop the attached {@link AudioTrack}, if any. */
stopAudioTrack()590     public void stopAudioTrack() {
591         if (mAudioTrack != null) {
592             mAudioTrack.stop();
593         }
594     }
595 
596     /** Start associated audio track, if any. */
playAudioTrack()597     public void playAudioTrack() {
598         if (mAudioTrack != null) {
599             mAudioTrack.play();
600         }
601     }
602 
setOutputSurface(Surface surface)603     public void setOutputSurface(Surface surface) {
604         if (mAudioTrack != null) {
605             throw new UnsupportedOperationException("Cannot set surface on audio codec");
606         }
607         mCodec.setOutputSurface(surface);
608     }
609 
610     /** Configure video peek. */
setVideoPeek(boolean enable)611     public void setVideoPeek(boolean enable) {
612         if (MediaUtils.check(mIsAtLeastS, "setVideoPeek requires Android S")) {
613             Bundle parameters = new Bundle();
614             parameters.putInt(MediaCodec.PARAMETER_KEY_TUNNEL_PEEK, enable ? 1 : 0);
615             mCodec.setParameters(parameters);
616         }
617     }
618 
619     /** In tunnel mode, queries whether the first video frame is ready for video peek. */
isFirstTunnelFrameReady()620     public boolean isFirstTunnelFrameReady() {
621         return mIsFirstTunnelFrameReady;
622     }
623 
624     /**
625      * Stop draining output buffers which can simulate underrun condition.
626      */
stopDrainingOutputBuffers(boolean stop)627     public void stopDrainingOutputBuffers(boolean stop) {
628         mShouldStopDrainingOutputBuffers = stop;
629         if (mAudioTrack != null) {
630             mAudioTrack.setStopWriting(stop);
631         }
632     }
633 
634     /**
635      * Option to introduce an offset (positive or negative, in Ns) to content queued to the
636      * {@link AudioTrack}.
637      */
setAudioOffsetNs(long audioOffsetNs)638     public void setAudioOffsetNs(long audioOffsetNs) {
639         if (mAudioTrack != null) {
640             mAudioTrack.setAudioOffsetNs(audioOffsetNs);
641         }
642     }
643 
644     /** Returns the underlying {@code AudioTrack}, if any. */
getAudioTrack()645     public AudioTrack getAudioTrack() {
646         if (mAudioTrack != null) {
647             return mAudioTrack.getAudioTrack();
648         }
649         return null;
650     }
651 
652     /**
653      * Seek media extractor to the beginning of the configured track.
654      *
655      * @param presentationTimeOffsetUs The offset for the presentation time to start at.
656      */
seekToBeginning(long presentationTimeOffsetUs)657     public void seekToBeginning(long presentationTimeOffsetUs) {
658         mExtractor.seekTo(mFirstSampleTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
659         mPlaybackStartTimeUs = presentationTimeOffsetUs;
660     }
661 }
662