• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.decoder.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback;
20 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel31;
21 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel32;
22 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel4;
23 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel42;
24 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh;
25 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel31;
26 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel41;
27 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCProfileMain;
28 
29 import static org.junit.Assert.assertEquals;
30 import static org.junit.Assert.assertFalse;
31 import static org.junit.Assert.assertNotEquals;
32 import static org.junit.Assert.assertNotNull;
33 import static org.junit.Assert.assertTrue;
34 import static org.junit.Assert.fail;
35 
36 import android.app.ActivityManager;
37 import android.content.Context;
38 import android.content.pm.PackageManager;
39 import android.content.res.AssetFileDescriptor;
40 import android.graphics.ImageFormat;
41 import android.media.AudioFormat;
42 import android.media.AudioManager;
43 import android.media.AudioTimestamp;
44 import android.media.Image;
45 import android.media.MediaCodec;
46 import android.media.MediaCodecInfo;
47 import android.media.MediaCodecInfo.CodecCapabilities;
48 import android.media.MediaCodecList;
49 import android.media.MediaExtractor;
50 import android.media.MediaFormat;
51 import android.media.cts.CodecState;
52 import android.media.cts.MediaCodecTunneledPlayer;
53 import android.media.cts.MediaCodecWrapper;
54 import android.media.cts.MediaHeavyPresubmitTest;
55 import android.media.cts.MediaTestBase;
56 import android.media.cts.NdkMediaCodec;
57 import android.media.cts.SdkMediaCodec;
58 import android.media.cts.TestUtils;
59 import android.net.Uri;
60 import android.os.Build;
61 import android.os.ParcelFileDescriptor;
62 import android.platform.test.annotations.AppModeFull;
63 import android.util.Log;
64 import android.view.Surface;
65 
66 import androidx.test.ext.junit.runners.AndroidJUnit4;
67 import androidx.test.filters.SdkSuppress;
68 
69 import com.android.compatibility.common.util.ApiLevelUtil;
70 import com.android.compatibility.common.util.ApiTest;
71 import com.android.compatibility.common.util.CddTest;
72 import com.android.compatibility.common.util.DeviceReportLog;
73 import com.android.compatibility.common.util.DynamicConfigDeviceSide;
74 import com.android.compatibility.common.util.MediaUtils;
75 import com.android.compatibility.common.util.NonMainlineTest;
76 import com.android.compatibility.common.util.Preconditions;
77 import com.android.compatibility.common.util.ResultType;
78 import com.android.compatibility.common.util.ResultUnit;
79 
80 import com.google.common.collect.ImmutableList;
81 
82 import org.junit.After;
83 import org.junit.Assume;
84 import org.junit.Before;
85 import org.junit.Test;
86 import org.junit.runner.RunWith;
87 
88 import java.io.BufferedInputStream;
89 import java.io.File;
90 import java.io.FileNotFoundException;
91 import java.io.IOException;
92 import java.io.InputStream;
93 import java.nio.ByteBuffer;
94 import java.time.Duration;
95 import java.util.ArrayList;
96 import java.util.Arrays;
97 import java.util.HashMap;
98 import java.util.List;
99 import java.util.Map;
100 import java.util.concurrent.TimeUnit;
101 import java.util.function.Supplier;
102 import java.util.zip.CRC32;
103 
104 @MediaHeavyPresubmitTest
105 @AppModeFull(reason = "There should be no instant apps specific behavior related to decoders")
106 @RunWith(AndroidJUnit4.class)
107 public class DecoderTest extends MediaTestBase {
108     private static final String TAG = "DecoderTest";
109     private static final String REPORT_LOG_NAME = "CtsMediaDecoderTestCases";
110     private static boolean mIsAtLeastR = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.R);
111     private static boolean sIsBeforeS = ApiLevelUtil.isBefore(Build.VERSION_CODES.S);
112     private static boolean sIsAfterT = ApiLevelUtil.isAfter(Build.VERSION_CODES.TIRAMISU)
113             || ApiLevelUtil.codenameEquals("UpsideDownCake");
114 
115     private static final int RESET_MODE_NONE = 0;
116     private static final int RESET_MODE_RECONFIGURE = 1;
117     private static final int RESET_MODE_FLUSH = 2;
118     private static final int RESET_MODE_EOS_FLUSH = 3;
119 
120     private static final String[] CSD_KEYS = new String[] { "csd-0", "csd-1" };
121 
122     private static final int CONFIG_MODE_NONE = 0;
123     private static final int CONFIG_MODE_QUEUE = 1;
124 
125     public static final int CODEC_ALL = 0; // All codecs must support
126     public static final int CODEC_ANY = 1; // At least one codec must support
127     public static final int CODEC_DEFAULT = 2; // Default codec must support
128     public static final int CODEC_OPTIONAL = 3; // Codec support is optional
129 
130     short[] mMasterBuffer;
131     static final String mInpPrefix = WorkDir.getMediaDirString();
132 
133     private MediaCodecTunneledPlayer mMediaCodecPlayer;
134     private static final int SLEEP_TIME_MS = 1000;
135     private static final long PLAY_TIME_MS = TimeUnit.MILLISECONDS.convert(1, TimeUnit.MINUTES);
136 
137     private static final String MODULE_NAME = "CtsMediaDecoderTestCases";
138     private DynamicConfigDeviceSide dynamicConfig;
139 
140     static final Map<String, String> sDefaultDecoders = new HashMap<>();
141 
getAssetFileDescriptorFor(final String res)142     protected static AssetFileDescriptor getAssetFileDescriptorFor(final String res)
143             throws FileNotFoundException {
144         File inpFile = new File(mInpPrefix + res);
145         Preconditions.assertTestFileExists(mInpPrefix + res);
146         ParcelFileDescriptor parcelFD =
147                 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY);
148         return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize());
149     }
150 
151     @Before
152     @Override
setUp()153     public void setUp() throws Throwable {
154         super.setUp();
155 
156         // read primary file into memory
157         AssetFileDescriptor masterFd = getAssetFileDescriptorFor("sinesweepraw.raw");
158         long masterLength = masterFd.getLength();
159         mMasterBuffer = new short[(int) (masterLength / 2)];
160         InputStream is = masterFd.createInputStream();
161         BufferedInputStream bis = new BufferedInputStream(is);
162         for (int i = 0; i < mMasterBuffer.length; i++) {
163             int lo = bis.read();
164             int hi = bis.read();
165             if (hi >= 128) {
166                 hi -= 256;
167             }
168             int sample = hi * 256 + lo;
169             mMasterBuffer[i] = (short) sample;
170         }
171         bis.close();
172         masterFd.close();
173 
174         dynamicConfig = new DynamicConfigDeviceSide(MODULE_NAME);
175     }
176 
177     @After
178     @Override
tearDown()179     public void tearDown() {
180         // ensure MediaCodecPlayer resources are released even if an exception is thrown.
181         if (mMediaCodecPlayer != null) {
182             mMediaCodecPlayer.reset();
183             mMediaCodecPlayer = null;
184         }
185         super.tearDown();
186     }
187 
isDefaultCodec(String codecName, String mime)188     static boolean isDefaultCodec(String codecName, String mime) throws IOException {
189         if (sDefaultDecoders.containsKey(mime)) {
190             return sDefaultDecoders.get(mime).equalsIgnoreCase(codecName);
191         }
192         MediaCodec codec = MediaCodec.createDecoderByType(mime);
193         boolean isDefault = codec.getName().equalsIgnoreCase(codecName);
194         sDefaultDecoders.put(mime, codec.getName());
195         codec.release();
196 
197         return isDefault;
198     }
199 
200     // TODO: add similar tests for other audio and video formats
201     @Test
testBug11696552()202     public void testBug11696552() throws Exception {
203         MediaCodec mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_AUDIO_AAC);
204         MediaFormat mFormat = MediaFormat.createAudioFormat(
205                 MediaFormat.MIMETYPE_AUDIO_AAC, 48000 /* frequency */, 2 /* channels */);
206         mFormat.setByteBuffer("csd-0", ByteBuffer.wrap( new byte [] {0x13, 0x10} ));
207         mFormat.setInteger(MediaFormat.KEY_IS_ADTS, 1);
208         mMediaCodec.configure(mFormat, null, null, 0);
209         mMediaCodec.start();
210         int index = mMediaCodec.dequeueInputBuffer(250000);
211         mMediaCodec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
212         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
213         mMediaCodec.dequeueOutputBuffer(info, 250000);
214     }
215 
216     // The allowed errors in the following tests are the actual maximum measured
217     // errors with the standard decoders, plus 10%.
218     // This should allow for some variation in decoders, while still detecting
219     // phase and delay errors, channel swap, etc.
220     @Test
testDecodeMp3Lame()221     public void testDecodeMp3Lame() throws Exception {
222         decode("sinesweepmp3lame.mp3", 804.f);
223         testTimeStampOrdering("sinesweepmp3lame.mp3");
224     }
225     @Test
testDecodeMp3Smpb()226     public void testDecodeMp3Smpb() throws Exception {
227         decode("sinesweepmp3smpb.mp3", 413.f);
228         testTimeStampOrdering("sinesweepmp3smpb.mp3");
229     }
230     @Test
testDecodeM4a()231     public void testDecodeM4a() throws Exception {
232         decode("sinesweepm4a.m4a", 124.f);
233         testTimeStampOrdering("sinesweepm4a.m4a");
234     }
235     @Test
testDecodeOgg()236     public void testDecodeOgg() throws Exception {
237         decode("sinesweepogg.ogg", 168.f);
238         testTimeStampOrdering("sinesweepogg.ogg");
239     }
240     @Test
testDecodeOggMkv()241     public void testDecodeOggMkv() throws Exception {
242         decode("sinesweepoggmkv.mkv", 168.f);
243         testTimeStampOrdering("sinesweepoggmkv.mkv");
244     }
245     @Test
testDecodeOggMp4()246     public void testDecodeOggMp4() throws Exception {
247         decode("sinesweepoggmp4.mp4", 168.f);
248         testTimeStampOrdering("sinesweepoggmp4.mp4");
249     }
250     @Test
testDecodeWav()251     public void testDecodeWav() throws Exception {
252         decode("sinesweepwav.wav", 0.0f);
253         testTimeStampOrdering("sinesweepwav.wav");
254     }
255     @Test
testDecodeWav24()256     public void testDecodeWav24() throws Exception {
257         decode("sinesweepwav24.wav", 0.0f);
258         testTimeStampOrdering("sinesweepwav24.wav");
259     }
260     @Test
testDecodeFlacMkv()261     public void testDecodeFlacMkv() throws Exception {
262         decode("sinesweepflacmkv.mkv", 0.0f);
263         testTimeStampOrdering("sinesweepflacmkv.mkv");
264     }
265     @Test
testDecodeFlac()266     public void testDecodeFlac() throws Exception {
267         decode("sinesweepflac.flac", 0.0f);
268         testTimeStampOrdering("sinesweepflac.flac");
269     }
270     @Test
testDecodeFlac24()271     public void testDecodeFlac24() throws Exception {
272         decode("sinesweepflac24.flac", 0.0f);
273         testTimeStampOrdering("sinesweepflac24.flac");
274     }
275     @Test
testDecodeFlacMp4()276     public void testDecodeFlacMp4() throws Exception {
277         decode("sinesweepflacmp4.mp4", 0.0f);
278         testTimeStampOrdering("sinesweepflacmp4.mp4");
279     }
280 
281     @Test
testDecodeMonoMp3()282     public void testDecodeMonoMp3() throws Exception {
283         monoTest("monotestmp3.mp3", 44100);
284         testTimeStampOrdering("monotestmp3.mp3");
285     }
286 
287     @Test
testDecodeMonoM4a()288     public void testDecodeMonoM4a() throws Exception {
289         monoTest("monotestm4a.m4a", 44100);
290         testTimeStampOrdering("monotestm4a.m4a");
291     }
292 
293     @Test
testDecodeMonoOgg()294     public void testDecodeMonoOgg() throws Exception {
295         monoTest("monotestogg.ogg", 44100);
296         testTimeStampOrdering("monotestogg.ogg");
297     }
298     @Test
testDecodeMonoOggMkv()299     public void testDecodeMonoOggMkv() throws Exception {
300         monoTest("monotestoggmkv.mkv", 44100);
301         testTimeStampOrdering("monotestoggmkv.mkv");
302     }
303     @Test
testDecodeMonoOggMp4()304     public void testDecodeMonoOggMp4() throws Exception {
305         monoTest("monotestoggmp4.mp4", 44100);
306         testTimeStampOrdering("monotestoggmp4.mp4");
307     }
308 
309     @Test
testDecodeMonoGsm()310     public void testDecodeMonoGsm() throws Exception {
311         String fileName = "monotestgsm.wav";
312         Preconditions.assertTestFileExists(mInpPrefix + fileName);
313         if (MediaUtils.hasCodecsForResource(mInpPrefix + fileName)) {
314             monoTest(fileName, 8000);
315             testTimeStampOrdering(fileName);
316         } else {
317             MediaUtils.skipTest("not mandatory");
318         }
319     }
320 
321     @Test
testDecodeAacTs()322     public void testDecodeAacTs() throws Exception {
323         testTimeStampOrdering("sinesweeptsaac.m4a");
324     }
325 
326     @Test
testDecodeVorbis()327     public void testDecodeVorbis() throws Exception {
328         testTimeStampOrdering("sinesweepvorbis.mkv");
329     }
330     @Test
testDecodeVorbisMp4()331     public void testDecodeVorbisMp4() throws Exception {
332         testTimeStampOrdering("sinesweepvorbismp4.mp4");
333     }
334 
335     @Test
testDecodeOpus()336     public void testDecodeOpus() throws Exception {
337         testTimeStampOrdering("sinesweepopus.mkv");
338     }
339     @Test
testDecodeOpusMp4()340     public void testDecodeOpusMp4() throws Exception {
341         testTimeStampOrdering("sinesweepopusmp4.mp4");
342     }
343 
344     @CddTest(requirement="5.1.3")
345     @Test
testDecodeG711ChannelsAndRates()346     public void testDecodeG711ChannelsAndRates() throws Exception {
347         String[] mimetypes = { MediaFormat.MIMETYPE_AUDIO_G711_ALAW,
348                                MediaFormat.MIMETYPE_AUDIO_G711_MLAW };
349         int[] sampleRates = { 8000 };
350         int[] channelMasks = { AudioFormat.CHANNEL_OUT_MONO,
351                                AudioFormat.CHANNEL_OUT_STEREO,
352                                AudioFormat.CHANNEL_OUT_5POINT1 };
353 
354         verifyChannelsAndRates(mimetypes, sampleRates, channelMasks);
355     }
356 
357     @CddTest(requirement="5.1.3")
358     @Test
testDecodeOpusChannelsAndRates()359     public void testDecodeOpusChannelsAndRates() throws Exception {
360         String[] mimetypes = { MediaFormat.MIMETYPE_AUDIO_OPUS };
361         int[] sampleRates = { 8000, 12000, 16000, 24000, 48000 };
362         int[] channelMasks = { AudioFormat.CHANNEL_OUT_MONO,
363                                AudioFormat.CHANNEL_OUT_STEREO,
364                                AudioFormat.CHANNEL_OUT_5POINT1 };
365 
366         verifyChannelsAndRates(mimetypes, sampleRates, channelMasks);
367     }
368 
verifyChannelsAndRates(String[] mimetypes, int[] sampleRates, int[] channelMasks)369     private void verifyChannelsAndRates(String[] mimetypes, int[] sampleRates,
370                                        int[] channelMasks) throws Exception {
371 
372         if (!MediaUtils.check(mIsAtLeastR, "test invalid before Android 11")) return;
373 
374         for (String mimetype : mimetypes) {
375             // ensure we find a codec for all listed mime/channel/rate combinations
376             MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
377             for (int sampleRate : sampleRates) {
378                 for (int channelMask : channelMasks) {
379                     int channelCount = AudioFormat.channelCountFromOutChannelMask(channelMask);
380                     MediaFormat desiredFormat = MediaFormat.createAudioFormat(
381                                 mimetype,
382                                 sampleRate,
383                                 channelCount);
384                     String codecname = mcl.findDecoderForFormat(desiredFormat);
385 
386                     assertNotNull("findDecoderForFormat() failed for mime=" + mimetype
387                                     + " sampleRate=" + sampleRate + " channelCount=" + channelCount,
388                             codecname);
389                 }
390             }
391 
392             // check all mime-matching codecs successfully configure the desired rate/channels
393             ArrayList<MediaCodecInfo> codecInfoList = getDecoderMediaCodecInfoList(mimetype);
394             if (codecInfoList == null) {
395                 continue;
396             }
397             for (MediaCodecInfo codecInfo : codecInfoList) {
398                 MediaCodec codec = MediaCodec.createByCodecName(codecInfo.getName());
399                 for (int sampleRate : sampleRates) {
400                     for (int channelMask : channelMasks) {
401                         int channelCount = AudioFormat.channelCountFromOutChannelMask(channelMask);
402 
403                         codec.reset();
404                         MediaFormat desiredFormat = MediaFormat.createAudioFormat(
405                                 mimetype,
406                                 sampleRate,
407                                 channelCount);
408                         codec.configure(desiredFormat, null, null, 0);
409                         codec.start();
410 
411                         Log.d(TAG, "codec: " + codecInfo.getName() +
412                                 " sample rate: " + sampleRate +
413                                 " channelcount:" + channelCount);
414 
415                         MediaFormat actual = codec.getInputFormat();
416                         int actualChannels = actual.getInteger(MediaFormat.KEY_CHANNEL_COUNT, -1);
417                         int actualSampleRate = actual.getInteger(MediaFormat.KEY_SAMPLE_RATE, -1);
418                         assertTrue("channels: configured " + actualChannels +
419                                    " != desired " + channelCount, actualChannels == channelCount);
420                         assertTrue("sample rate: configured " + actualSampleRate +
421                                    " != desired " + sampleRate, actualSampleRate == sampleRate);
422                     }
423                 }
424                 codec.release();
425             }
426         }
427     }
428 
getDecoderMediaCodecInfoList(String mimeType)429     private ArrayList<MediaCodecInfo> getDecoderMediaCodecInfoList(String mimeType) {
430         MediaCodecList mediaCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS);
431         ArrayList<MediaCodecInfo> decoderInfos = new ArrayList<MediaCodecInfo>();
432         for (MediaCodecInfo codecInfo : mediaCodecList.getCodecInfos()) {
433             if (!codecInfo.isEncoder() && isMimeTypeSupported(codecInfo, mimeType)) {
434                 decoderInfos.add(codecInfo);
435             }
436         }
437         return decoderInfos;
438     }
439 
isMimeTypeSupported(MediaCodecInfo codecInfo, String mimeType)440     private boolean isMimeTypeSupported(MediaCodecInfo codecInfo, String mimeType) {
441         for (String type : codecInfo.getSupportedTypes()) {
442             if (type.equalsIgnoreCase(mimeType)) {
443                 return true;
444             }
445         }
446         return false;
447     }
448 
449     @Test
testDecode51M4a()450     public void testDecode51M4a() throws Exception {
451         for (String codecName : codecsFor("sinesweep51m4a.m4a")) {
452             decodeToMemory(codecName, "sinesweep51m4a.m4a", RESET_MODE_NONE, CONFIG_MODE_NONE, -1,
453                     null);
454         }
455     }
456 
testTimeStampOrdering(final String res)457     private void testTimeStampOrdering(final String res) throws Exception {
458         for (String codecName : codecsFor(res)) {
459             List<Long> timestamps = new ArrayList<Long>();
460             decodeToMemory(codecName, res, RESET_MODE_NONE, CONFIG_MODE_NONE, -1, timestamps);
461             Long lastTime = Long.MIN_VALUE;
462             for (int i = 0; i < timestamps.size(); i++) {
463                 Long thisTime = timestamps.get(i);
464                 assertTrue(codecName + ": timetravel occurred: " + lastTime + " > " + thisTime,
465                        thisTime >= lastTime);
466                 lastTime = thisTime;
467             }
468         }
469     }
470 
471     @Test
testTrackSelection()472     public void testTrackSelection() throws Exception {
473         testTrackSelection("video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz.mp4");
474         testTrackSelection(
475                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_fragmented.mp4");
476         testTrackSelection(
477                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4");
478     }
479 
480     @Test
testTrackSelectionMkv()481     public void testTrackSelectionMkv() throws Exception {
482         Log.d(TAG, "testTrackSelectionMkv!!!!!! ");
483         testTrackSelection("mkv_avc_adpcm_ima.mkv");
484         Log.d(TAG, "mkv_avc_adpcm_ima finished!!!!!! ");
485         testTrackSelection("mkv_avc_adpcm_ms.mkv");
486         Log.d(TAG, "mkv_avc_adpcm_ms finished!!!!!! ");
487         testTrackSelection("mkv_avc_wma.mkv");
488         Log.d(TAG, "mkv_avc_wma finished!!!!!! ");
489         testTrackSelection("mkv_avc_mp2.mkv");
490         Log.d(TAG, "mkv_avc_mp2 finished!!!!!! ");
491     }
492 
493     @Test
testBFrames()494     public void testBFrames() throws Exception {
495         int testsRun =
496             testBFrames("video_h264_main_b_frames.mp4") +
497             testBFrames("video_h264_main_b_frames_frag.mp4");
498         if (testsRun == 0) {
499             MediaUtils.skipTest("no codec found");
500         }
501     }
502 
testBFrames(final String res)503     public int testBFrames(final String res) throws Exception {
504         MediaExtractor ex = new MediaExtractor();
505         Preconditions.assertTestFileExists(mInpPrefix + res);
506         ex.setDataSource(mInpPrefix + res);
507         MediaFormat format = ex.getTrackFormat(0);
508         String mime = format.getString(MediaFormat.KEY_MIME);
509         assertTrue("not a video track. Wrong test file?", mime.startsWith("video/"));
510         if (!MediaUtils.canDecode(format)) {
511             ex.release();
512             return 0; // skip
513         }
514         MediaCodec dec = MediaCodec.createDecoderByType(mime);
515         Surface s = getActivity().getSurfaceHolder().getSurface();
516         dec.configure(format, s, null, 0);
517         dec.start();
518         ByteBuffer[] buf = dec.getInputBuffers();
519         ex.selectTrack(0);
520         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
521         long lastPresentationTimeUsFromExtractor = -1;
522         long lastPresentationTimeUsFromDecoder = -1;
523         boolean inputoutoforder = false;
524         while(true) {
525             int flags = ex.getSampleFlags();
526             long time = ex.getSampleTime();
527             if (time >= 0 && time < lastPresentationTimeUsFromExtractor) {
528                 inputoutoforder = true;
529             }
530             lastPresentationTimeUsFromExtractor = time;
531             int bufidx = dec.dequeueInputBuffer(5000);
532             if (bufidx >= 0) {
533                 int n = ex.readSampleData(buf[bufidx], 0);
534                 if (n < 0) {
535                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
536                     time = 0;
537                     n = 0;
538                 }
539                 dec.queueInputBuffer(bufidx, 0, n, time, flags);
540                 ex.advance();
541             }
542             int status = dec.dequeueOutputBuffer(info, 5000);
543             if (status >= 0) {
544                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
545                     break;
546                 }
547                 assertTrue("out of order timestamp from decoder",
548                         info.presentationTimeUs > lastPresentationTimeUsFromDecoder);
549                 dec.releaseOutputBuffer(status, true);
550                 lastPresentationTimeUsFromDecoder = info.presentationTimeUs;
551             }
552         }
553         assertTrue("extractor timestamps were ordered, wrong test file?", inputoutoforder);
554         dec.release();
555         ex.release();
556         return 1;
557       }
558 
559     /**
560      * Test ColorAspects of all the AVC decoders. Decoders should handle
561      * the colors aspects presented in both the mp4 atom 'colr' and VUI
562      * in the bitstream correctly. The following table lists the color
563      * aspects contained in the color box and VUI for the test stream.
564      * P = primaries, T = transfer, M = coeffs, R = range. '-' means
565      * empty value.
566      *                                      |     colr     |    VUI
567      * -------------------------------------------------------------------
568      *         File Name                    |  P  T  M  R  |  P  T  M  R
569      * -------------------------------------------------------------------
570      *  color_176x144_bt709_lr_sdr_h264     |  1  1  1  0  |  -  -  -  -
571      *  color_176x144_bt601_625_fr_sdr_h264 |  1  6  6  0  |  5  2  2  1
572      *  color_176x144_bt601_525_lr_sdr_h264 |  6  5  4  0  |  2  6  6  0
573      *  color_176x144_srgb_lr_sdr_h264      |  2  0  2  1  |  1  13 1  0
574      */
575     @Test
testH264ColorAspects()576     public void testH264ColorAspects() throws Exception {
577         testColorAspects(
578                 "color_176x144_bt709_lr_sdr_h264.mp4", 1 /* testId */,
579                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
580                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
581         testColorAspects(
582                 "color_176x144_bt601_625_fr_sdr_h264.mp4", 2 /* testId */,
583                 MediaFormat.COLOR_RANGE_FULL, MediaFormat.COLOR_STANDARD_BT601_PAL,
584                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
585         testColorAspects(
586                 "color_176x144_bt601_525_lr_sdr_h264.mp4", 3 /* testId */,
587                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_NTSC,
588                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
589         testColorAspects(
590                 "color_176x144_srgb_lr_sdr_h264.mp4", 4 /* testId */,
591                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
592                 2 /* MediaFormat.COLOR_TRANSFER_SRGB */);
593     }
594 
595     /**
596      * Test ColorAspects of all the HEVC decoders. Decoders should handle
597      * the colors aspects presented in both the mp4 atom 'colr' and VUI
598      * in the bitstream correctly. The following table lists the color
599      * aspects contained in the color box and VUI for the test stream.
600      * P = primaries, T = transfer, M = coeffs, R = range. '-' means
601      * empty value.
602      *                                      |     colr     |    VUI
603      * -------------------------------------------------------------------
604      *         File Name                    |  P  T  M  R  |  P  T  M  R
605      * -------------------------------------------------------------------
606      *  color_176x144_bt709_lr_sdr_h265     |  1  1  1  0  |  -  -  -  -
607      *  color_176x144_bt601_625_fr_sdr_h265 |  1  6  6  0  |  5  2  2  1
608      *  color_176x144_bt601_525_lr_sdr_h265 |  6  5  4  0  |  2  6  6  0
609      *  color_176x144_srgb_lr_sdr_h265      |  2  0  2  1  |  1  13 1  0
610      */
611     @Test
testH265ColorAspects()612     public void testH265ColorAspects() throws Exception {
613         testColorAspects(
614                 "color_176x144_bt709_lr_sdr_h265.mp4", 1 /* testId */,
615                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
616                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
617         testColorAspects(
618                 "color_176x144_bt601_625_fr_sdr_h265.mp4", 2 /* testId */,
619                 MediaFormat.COLOR_RANGE_FULL, MediaFormat.COLOR_STANDARD_BT601_PAL,
620                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
621         testColorAspects(
622                 "color_176x144_bt601_525_lr_sdr_h265.mp4", 3 /* testId */,
623                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_NTSC,
624                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
625         testColorAspects(
626                 "color_176x144_srgb_lr_sdr_h265.mp4", 4 /* testId */,
627                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
628                 2 /* MediaFormat.COLOR_TRANSFER_SRGB */);
629         // Test the main10 streams with surface as the decoder might
630         // support opaque buffers only.
631         testColorAspects(
632                 "color_176x144_bt2020_lr_smpte2084_h265.mp4", 5 /* testId */,
633                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT2020,
634                 MediaFormat.COLOR_TRANSFER_ST2084,
635                 getActivity().getSurfaceHolder().getSurface());
636         testColorAspects(
637                 "color_176x144_bt2020_lr_hlg_h265.mp4", 6 /* testId */,
638                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT2020,
639                 MediaFormat.COLOR_TRANSFER_HLG,
640                 getActivity().getSurfaceHolder().getSurface());
641     }
642 
643     /**
644      * Test ColorAspects of all the MPEG2 decoders if avaiable. Decoders should
645      * handle the colors aspects presented in both the mp4 atom 'colr' and Sequence
646      * in the bitstream correctly. The following table lists the color aspects
647      * contained in the color box and SeqInfo for the test stream.
648      * P = primaries, T = transfer, M = coeffs, R = range. '-' means
649      * empty value.
650      *                                       |     colr     |    SeqInfo
651      * -------------------------------------------------------------------
652      *         File Name                     |  P  T  M  R  |  P  T  M  R
653      * -------------------------------------------------------------------
654      *  color_176x144_bt709_lr_sdr_mpeg2     |  1  1  1  0  |  -  -  -  -
655      *  color_176x144_bt601_625_lr_sdr_mpeg2 |  1  6  6  0  |  5  2  2  0
656      *  color_176x144_bt601_525_lr_sdr_mpeg2 |  6  5  4  0  |  2  6  6  0
657      *  color_176x144_srgb_lr_sdr_mpeg2      |  2  0  2  0  |  1  13 1  0
658      */
659     @Test
testMPEG2ColorAspectsTV()660     public void testMPEG2ColorAspectsTV() throws Exception {
661         testColorAspects(
662                 "color_176x144_bt709_lr_sdr_mpeg2.mp4", 1 /* testId */,
663                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
664                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
665         testColorAspects(
666                 "color_176x144_bt601_625_lr_sdr_mpeg2.mp4", 2 /* testId */,
667                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_PAL,
668                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
669         testColorAspects(
670                 "color_176x144_bt601_525_lr_sdr_mpeg2.mp4", 3 /* testId */,
671                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_NTSC,
672                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
673         testColorAspects(
674                 "color_176x144_srgb_lr_sdr_mpeg2.mp4", 4 /* testId */,
675                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
676                 2 /* MediaFormat.COLOR_TRANSFER_SRGB */);
677     }
678 
testColorAspects( final String res, int testId, int expectRange, int expectStandard, int expectTransfer)679     private void testColorAspects(
680             final String res, int testId, int expectRange, int expectStandard, int expectTransfer)
681             throws Exception {
682         testColorAspects(
683                 res, testId, expectRange, expectStandard, expectTransfer, null /*surface*/);
684     }
685 
testColorAspects( final String res, int testId, int expectRange, int expectStandard, int expectTransfer, Surface surface)686     private void testColorAspects(
687             final String res, int testId, int expectRange, int expectStandard, int expectTransfer,
688             Surface surface) throws Exception {
689         Preconditions.assertTestFileExists(mInpPrefix + res);
690         MediaFormat format = MediaUtils.getTrackFormatForResource(mInpPrefix + res, "video");
691         MediaFormat mimeFormat = new MediaFormat();
692         mimeFormat.setString(MediaFormat.KEY_MIME, format.getString(MediaFormat.KEY_MIME));
693 
694         for (String decoderName: MediaUtils.getDecoderNames(mimeFormat)) {
695             if (!MediaUtils.supports(decoderName, format)) {
696                 MediaUtils.skipTest(decoderName + " cannot play resource " + mInpPrefix + res);
697             } else {
698                 testColorAspects(decoderName, res, testId,
699                         expectRange, expectStandard, expectTransfer, surface);
700             }
701         }
702     }
703 
testColorAspects( String decoderName, final String res, int testId, int expectRange, int expectStandard, int expectTransfer, Surface surface)704     private void testColorAspects(
705             String decoderName, final String res, int testId, int expectRange,
706             int expectStandard, int expectTransfer, Surface surface) throws Exception {
707         Preconditions.assertTestFileExists(mInpPrefix + res);
708         MediaExtractor ex = new MediaExtractor();
709         ex.setDataSource(mInpPrefix + res);
710         MediaFormat format = ex.getTrackFormat(0);
711         MediaCodec dec = MediaCodec.createByCodecName(decoderName);
712         dec.configure(format, surface, null, 0);
713         dec.start();
714         ByteBuffer[] buf = dec.getInputBuffers();
715         ex.selectTrack(0);
716         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
717         boolean sawInputEOS = false;
718         boolean getOutputFormat = false;
719         boolean rangeMatch = false;
720         boolean colorMatch = false;
721         boolean transferMatch = false;
722         int colorRange = 0;
723         int colorStandard = 0;
724         int colorTransfer = 0;
725 
726         while (true) {
727             if (!sawInputEOS) {
728                 int flags = ex.getSampleFlags();
729                 long time = ex.getSampleTime();
730                 int bufidx = dec.dequeueInputBuffer(200 * 1000);
731                 if (bufidx >= 0) {
732                     int n = ex.readSampleData(buf[bufidx], 0);
733                     if (n < 0) {
734                         flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
735                         sawInputEOS = true;
736                         n = 0;
737                     }
738                     dec.queueInputBuffer(bufidx, 0, n, time, flags);
739                     ex.advance();
740                 } else {
741                     assertEquals(
742                             "codec.dequeueInputBuffer() unrecognized return value: " + bufidx,
743                             MediaCodec.INFO_TRY_AGAIN_LATER, bufidx);
744                 }
745             }
746 
747             int status = dec.dequeueOutputBuffer(info, sawInputEOS ? 3000 * 1000 : 100 * 1000);
748             if (status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
749                 MediaFormat fmt = dec.getOutputFormat();
750                 colorRange = fmt.containsKey("color-range") ? fmt.getInteger("color-range") : 0;
751                 colorStandard = fmt.containsKey("color-standard") ? fmt.getInteger("color-standard") : 0;
752                 colorTransfer = fmt.containsKey("color-transfer") ? fmt.getInteger("color-transfer") : 0;
753                 rangeMatch = colorRange == expectRange;
754                 colorMatch = colorStandard == expectStandard;
755                 transferMatch = colorTransfer == expectTransfer;
756                 getOutputFormat = true;
757                 // Test only needs to check the color format in the first format changed event.
758                 break;
759             } else if (status >= 0) {
760                 // Test should get at least one format changed event before getting first frame.
761                 assertTrue(getOutputFormat);
762                 break;
763             } else {
764                 assertFalse(
765                         "codec.dequeueOutputBuffer() timeout after seeing input EOS",
766                         status == MediaCodec.INFO_TRY_AGAIN_LATER && sawInputEOS);
767             }
768         }
769 
770         String reportName = decoderName + "_colorAspectsTest Test " + testId +
771                 " (Get R: " + colorRange + " S: " + colorStandard + " T: " + colorTransfer + ")" +
772                 " (Expect R: " + expectRange + " S: " + expectStandard + " T: " + expectTransfer + ")";
773         Log.d(TAG, reportName);
774 
775         DeviceReportLog log = new DeviceReportLog("CtsMediaDecoderTestCases", "color_aspects_test");
776         log.addValue("decoder_name", decoderName, ResultType.NEUTRAL, ResultUnit.NONE);
777         log.addValue("test_id", testId, ResultType.NEUTRAL, ResultUnit.NONE);
778         log.addValues(
779                 "rst_actual", new int[] { colorRange, colorStandard, colorTransfer },
780                 ResultType.NEUTRAL, ResultUnit.NONE);
781         log.addValues(
782                 "rst_expected", new int[] { expectRange, expectStandard, expectTransfer },
783                 ResultType.NEUTRAL, ResultUnit.NONE);
784 
785         if (rangeMatch && colorMatch && transferMatch) {
786             log.setSummary("result", 1, ResultType.HIGHER_BETTER, ResultUnit.COUNT);
787         } else {
788             log.setSummary("result", 0, ResultType.HIGHER_BETTER, ResultUnit.COUNT);
789         }
790         log.submit(getInstrumentation());
791 
792         assertTrue(rangeMatch && colorMatch && transferMatch);
793 
794         dec.release();
795         ex.release();
796     }
797 
testTrackSelection(final String res)798     private void testTrackSelection(final String res) throws Exception {
799         MediaExtractor ex1 = new MediaExtractor();
800         Preconditions.assertTestFileExists(mInpPrefix + res);
801         try {
802             ex1.setDataSource(mInpPrefix + res);
803 
804             ByteBuffer buf1 = ByteBuffer.allocate(1024*1024);
805             ArrayList<Integer> vid = new ArrayList<Integer>();
806             ArrayList<Integer> aud = new ArrayList<Integer>();
807 
808             // scan the file once and build lists of audio and video samples
809             ex1.selectTrack(0);
810             ex1.selectTrack(1);
811             while(true) {
812                 int n1 = ex1.readSampleData(buf1, 0);
813                 if (n1 < 0) {
814                     break;
815                 }
816                 int idx = ex1.getSampleTrackIndex();
817                 if (idx == 0) {
818                     vid.add(n1);
819                 } else if (idx == 1) {
820                     aud.add(n1);
821                 } else {
822                     fail("unexpected track index: " + idx);
823                 }
824                 ex1.advance();
825             }
826 
827             // read the video track once, then rewind and do it again, and
828             // verify we get the right samples
829             ex1.release();
830             ex1 = new MediaExtractor();
831             ex1.setDataSource(mInpPrefix + res);
832             ex1.selectTrack(0);
833             for (int i = 0; i < 2; i++) {
834                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
835                 int idx = 0;
836                 while(true) {
837                     int n1 = ex1.readSampleData(buf1, 0);
838                     if (n1 < 0) {
839                         assertEquals(vid.size(), idx);
840                         break;
841                     }
842                     assertEquals(vid.get(idx++).intValue(), n1);
843                     ex1.advance();
844                 }
845             }
846 
847             // read the audio track once, then rewind and do it again, and
848             // verify we get the right samples
849             ex1.release();
850             ex1 = new MediaExtractor();
851             ex1.setDataSource(mInpPrefix + res);
852             ex1.selectTrack(1);
853             for (int i = 0; i < 2; i++) {
854                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
855                 int idx = 0;
856                 while(true) {
857                     int n1 = ex1.readSampleData(buf1, 0);
858                     if (n1 < 0) {
859                         assertEquals(aud.size(), idx);
860                         break;
861                     }
862                     assertEquals(aud.get(idx++).intValue(), n1);
863                     ex1.advance();
864                 }
865             }
866 
867             // read the video track first, then rewind and get the audio track instead, and
868             // verify we get the right samples
869             ex1.release();
870             ex1 = new MediaExtractor();
871             ex1.setDataSource(mInpPrefix + res);
872             for (int i = 0; i < 2; i++) {
873                 ex1.selectTrack(i);
874                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
875                 int idx = 0;
876                 while(true) {
877                     int n1 = ex1.readSampleData(buf1, 0);
878                     if (i == 0) {
879                         if (n1 < 0) {
880                             assertEquals(vid.size(), idx);
881                             break;
882                         }
883                         assertEquals(vid.get(idx++).intValue(), n1);
884                     } else if (i == 1) {
885                         if (n1 < 0) {
886                             assertEquals(aud.size(), idx);
887                             break;
888                         }
889                         assertEquals(aud.get(idx++).intValue(), n1);
890                     } else {
891                         fail("unexpected track index: " + idx);
892                     }
893                     ex1.advance();
894                 }
895                 ex1.unselectTrack(i);
896             }
897 
898             // read the video track first, then rewind, enable the audio track in addition
899             // to the video track, and verify we get the right samples
900             ex1.release();
901             ex1 = new MediaExtractor();
902             ex1.setDataSource(mInpPrefix + res);
903             for (int i = 0; i < 2; i++) {
904                 ex1.selectTrack(i);
905                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
906                 int vididx = 0;
907                 int audidx = 0;
908                 while(true) {
909                     int n1 = ex1.readSampleData(buf1, 0);
910                     if (n1 < 0) {
911                         // we should have read all audio and all video samples at this point
912                         assertEquals(vid.size(), vididx);
913                         if (i == 1) {
914                             assertEquals(aud.size(), audidx);
915                         }
916                         break;
917                     }
918                     int trackidx = ex1.getSampleTrackIndex();
919                     if (trackidx == 0) {
920                         assertEquals(vid.get(vididx++).intValue(), n1);
921                     } else if (trackidx == 1) {
922                         assertEquals(aud.get(audidx++).intValue(), n1);
923                     } else {
924                         fail("unexpected track index: " + trackidx);
925                     }
926                     ex1.advance();
927                 }
928             }
929 
930             // read both tracks from the start, then rewind and verify we get the right
931             // samples both times
932             ex1.release();
933             ex1 = new MediaExtractor();
934             ex1.setDataSource(mInpPrefix + res);
935             for (int i = 0; i < 2; i++) {
936                 ex1.selectTrack(0);
937                 ex1.selectTrack(1);
938                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
939                 int vididx = 0;
940                 int audidx = 0;
941                 while(true) {
942                     int n1 = ex1.readSampleData(buf1, 0);
943                     if (n1 < 0) {
944                         // we should have read all audio and all video samples at this point
945                         assertEquals(vid.size(), vididx);
946                         assertEquals(aud.size(), audidx);
947                         break;
948                     }
949                     int trackidx = ex1.getSampleTrackIndex();
950                     if (trackidx == 0) {
951                         assertEquals(vid.get(vididx++).intValue(), n1);
952                     } else if (trackidx == 1) {
953                         assertEquals(aud.get(audidx++).intValue(), n1);
954                     } else {
955                         fail("unexpected track index: " + trackidx);
956                     }
957                     ex1.advance();
958                 }
959             }
960 
961         } finally {
962             if (ex1 != null) {
963                 ex1.release();
964             }
965         }
966     }
967 
968     @Test
testDecodeFragmented()969     public void testDecodeFragmented() throws Exception {
970         testDecodeFragmented("video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz.mp4",
971                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_fragmented.mp4");
972         testDecodeFragmented("video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz.mp4",
973                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4");
974     }
975 
testDecodeFragmented(final String reference, final String teststream)976     private void testDecodeFragmented(final String reference, final String teststream)
977             throws Exception {
978         Preconditions.assertTestFileExists(mInpPrefix + reference);
979         Preconditions.assertTestFileExists(mInpPrefix + teststream);
980         try {
981             MediaExtractor ex1 = new MediaExtractor();
982             ex1.setDataSource(mInpPrefix + reference);
983             MediaExtractor ex2 = new MediaExtractor();
984             ex2.setDataSource(mInpPrefix + teststream);
985 
986             assertEquals("different track count", ex1.getTrackCount(), ex2.getTrackCount());
987 
988             ByteBuffer buf1 = ByteBuffer.allocate(1024*1024);
989             ByteBuffer buf2 = ByteBuffer.allocate(1024*1024);
990 
991             for (int i = 0; i < ex1.getTrackCount(); i++) {
992                 // note: this assumes the tracks are reported in the order in which they appear
993                 // in the file.
994                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
995                 ex1.selectTrack(i);
996                 ex2.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
997                 ex2.selectTrack(i);
998 
999                 while(true) {
1000                     int n1 = ex1.readSampleData(buf1, 0);
1001                     int n2 = ex2.readSampleData(buf2, 0);
1002                     assertEquals("different buffer size on track " + i, n1, n2);
1003 
1004                     if (n1 < 0) {
1005                         break;
1006                     }
1007                     // see bug 13008204
1008                     buf1.limit(n1);
1009                     buf2.limit(n2);
1010                     buf1.rewind();
1011                     buf2.rewind();
1012 
1013                     assertEquals("limit does not match return value on track " + i,
1014                             n1, buf1.limit());
1015                     assertEquals("limit does not match return value on track " + i,
1016                             n2, buf2.limit());
1017 
1018                     assertEquals("buffer data did not match on track " + i, buf1, buf2);
1019 
1020                     ex1.advance();
1021                     ex2.advance();
1022                 }
1023                 ex1.unselectTrack(i);
1024                 ex2.unselectTrack(i);
1025             }
1026         } catch (IOException e) {
1027             e.printStackTrace();
1028         }
1029     }
1030 
1031     /**
1032      * Verify correct decoding of MPEG-4 AAC-LC mono and stereo streams
1033      */
1034     @Test
testDecodeAacLcM4a()1035     public void testDecodeAacLcM4a() throws Exception {
1036         // mono
1037         decodeNtest("sinesweep1_1ch_8khz_aot2_mp4.m4a", 40.f);
1038         decodeNtest("sinesweep1_1ch_11khz_aot2_mp4.m4a", 40.f);
1039         decodeNtest("sinesweep1_1ch_12khz_aot2_mp4.m4a", 40.f);
1040         decodeNtest("sinesweep1_1ch_16khz_aot2_mp4.m4a", 40.f);
1041         decodeNtest("sinesweep1_1ch_22khz_aot2_mp4.m4a", 40.f);
1042         decodeNtest("sinesweep1_1ch_24khz_aot2_mp4.m4a", 40.f);
1043         decodeNtest("sinesweep1_1ch_32khz_aot2_mp4.m4a", 40.f);
1044         decodeNtest("sinesweep1_1ch_44khz_aot2_mp4.m4a", 40.f);
1045         decodeNtest("sinesweep1_1ch_48khz_aot2_mp4.m4a", 40.f);
1046         // stereo
1047         decodeNtest("sinesweep_2ch_8khz_aot2_mp4.m4a", 40.f);
1048         decodeNtest("sinesweep_2ch_11khz_aot2_mp4.m4a", 40.f);
1049         decodeNtest("sinesweep_2ch_12khz_aot2_mp4.m4a", 40.f);
1050         decodeNtest("sinesweep_2ch_16khz_aot2_mp4.m4a", 40.f);
1051         decodeNtest("sinesweep_2ch_22khz_aot2_mp4.m4a", 40.f);
1052         decodeNtest("sinesweep_2ch_24khz_aot2_mp4.m4a", 40.f);
1053         decodeNtest("sinesweep_2ch_32khz_aot2_mp4.m4a", 40.f);
1054         decodeNtest("sinesweep_2ch_44khz_aot2_mp4.m4a", 40.f);
1055         decodeNtest("sinesweep_2ch_48khz_aot2_mp4.m4a", 40.f);
1056     }
1057 
1058     /**
1059      * Verify correct decoding of MPEG-4 AAC-LC 5.0 and 5.1 channel streams
1060      */
1061     @Test
testDecodeAacLcMcM4a()1062     public void testDecodeAacLcMcM4a() throws Exception {
1063         for (String codecName : codecsFor("noise_6ch_48khz_aot2_mp4.m4a")) {
1064             AudioParameter decParams = new AudioParameter();
1065             short[] decSamples = decodeToMemory(codecName, decParams,
1066                     "noise_6ch_48khz_aot2_mp4.m4a", RESET_MODE_NONE,
1067                     CONFIG_MODE_NONE, -1, null);
1068             checkEnergy(decSamples, decParams, 6);
1069             decParams.reset();
1070 
1071             decSamples = decodeToMemory(codecName, decParams, "noise_5ch_44khz_aot2_mp4.m4a",
1072                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1073             checkEnergy(decSamples, decParams, 5);
1074             decParams.reset();
1075         }
1076     }
1077 
1078     /**
1079      * Verify correct decoding of MPEG-4 HE-AAC mono and stereo streams
1080      */
1081     @Test
testDecodeHeAacM4a()1082     public void testDecodeHeAacM4a() throws Exception {
1083         Object [][] samples = {
1084                 //  {resource, numChannels},
1085                 {"noise_1ch_24khz_aot5_dr_sbr_sig1_mp4.m4a", 1},
1086                 {"noise_1ch_24khz_aot5_ds_sbr_sig1_mp4.m4a", 1},
1087                 {"noise_1ch_32khz_aot5_dr_sbr_sig2_mp4.m4a", 1},
1088                 {"noise_1ch_44khz_aot5_dr_sbr_sig0_mp4.m4a", 1},
1089                 {"noise_1ch_44khz_aot5_ds_sbr_sig2_mp4.m4a", 1},
1090                 {"noise_2ch_24khz_aot5_dr_sbr_sig2_mp4.m4a", 2},
1091                 {"noise_2ch_32khz_aot5_ds_sbr_sig2_mp4.m4a", 2},
1092                 {"noise_2ch_48khz_aot5_dr_sbr_sig1_mp4.m4a", 2},
1093                 {"noise_2ch_48khz_aot5_ds_sbr_sig1_mp4.m4a", 2},
1094         };
1095 
1096         for (Object [] sample: samples) {
1097             for (String codecName : codecsFor((String)sample[0], CODEC_DEFAULT)) {
1098                 AudioParameter decParams = new AudioParameter();
1099                 short[] decSamples = decodeToMemory(codecName, decParams,
1100                         (String)sample[0] /* resource */, RESET_MODE_NONE, CONFIG_MODE_NONE,
1101                         -1, null);
1102                 checkEnergy(decSamples, decParams, (Integer)sample[1] /* number of channels */);
1103                 decParams.reset();
1104             }
1105         }
1106     }
1107 
1108     /**
1109      * Verify correct decoding of MPEG-4 HE-AAC 5.0 and 5.1 channel streams
1110      */
1111     @Test
testDecodeHeAacMcM4a()1112     public void testDecodeHeAacMcM4a() throws Exception {
1113         Object [][] samples = {
1114                 //  {resource, numChannels},
1115                 {"noise_5ch_48khz_aot5_dr_sbr_sig1_mp4.m4a", 5},
1116                 {"noise_6ch_44khz_aot5_dr_sbr_sig2_mp4.m4a", 6},
1117         };
1118         for (Object [] sample: samples) {
1119             for (String codecName : codecsFor((String)sample[0] /* resource */, CODEC_DEFAULT)) {
1120                 AudioParameter decParams = new AudioParameter();
1121                 short[] decSamples = decodeToMemory(codecName, decParams,
1122                         (String)sample[0] /* resource */, RESET_MODE_NONE, CONFIG_MODE_NONE,
1123                         -1, null);
1124                 checkEnergy(decSamples, decParams, (Integer)sample[1] /* number of channels */);
1125                 decParams.reset();
1126             }
1127         }
1128     }
1129 
1130     /**
1131      * Verify correct decoding of MPEG-4 HE-AAC v2 stereo streams
1132      */
1133     @Test
testDecodeHeAacV2M4a()1134     public void testDecodeHeAacV2M4a() throws Exception {
1135         String [] samples = {
1136                 "noise_2ch_24khz_aot29_dr_sbr_sig0_mp4.m4a",
1137                 "noise_2ch_44khz_aot29_dr_sbr_sig1_mp4.m4a",
1138                 "noise_2ch_48khz_aot29_dr_sbr_sig2_mp4.m4a"
1139         };
1140         for (String sample: samples) {
1141             for (String codecName : codecsFor(sample, CODEC_DEFAULT)) {
1142                 AudioParameter decParams = new AudioParameter();
1143                 short[] decSamples = decodeToMemory(codecName, decParams, sample,
1144                         RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1145                 checkEnergy(decSamples, decParams, 2);
1146             }
1147         }
1148     }
1149 
1150     /**
1151      * Verify correct decoding of MPEG-4 AAC-ELD mono and stereo streams
1152      */
1153     @Test
testDecodeAacEldM4a()1154     public void testDecodeAacEldM4a() throws Exception {
1155         // mono
1156         decodeNtest("sinesweep1_1ch_16khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1157         decodeNtest("sinesweep1_1ch_22khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1158         decodeNtest("sinesweep1_1ch_24khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1159         decodeNtest("sinesweep1_1ch_32khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1160         decodeNtest("sinesweep1_1ch_44khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1161         decodeNtest("sinesweep1_1ch_48khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1162 
1163         // stereo
1164         decodeNtest("sinesweep_2ch_16khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1165         decodeNtest("sinesweep_2ch_22khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1166         decodeNtest("sinesweep_2ch_24khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1167         decodeNtest("sinesweep_2ch_32khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1168         decodeNtest("sinesweep_2ch_44khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1169         decodeNtest("sinesweep_2ch_48khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1170 
1171         AudioParameter decParams = new AudioParameter();
1172 
1173         Object [][] samples = {
1174                 //  {resource, numChannels},
1175                 {"noise_1ch_16khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1176                 {"noise_1ch_24khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1177                 {"noise_1ch_32khz_aot39_dr_sbr_fl480_mp4.m4a", 1},
1178                 {"noise_1ch_44khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1179                 {"noise_1ch_44khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1180                 {"noise_1ch_48khz_aot39_dr_sbr_fl480_mp4.m4a", 1},
1181                 {"noise_2ch_22khz_aot39_ds_sbr_fl512_mp4.m4a", 2},
1182                 {"noise_2ch_32khz_aot39_ds_sbr_fl512_mp4.m4a", 2},
1183                 {"noise_2ch_44khz_aot39_dr_sbr_fl480_mp4.m4a", 2},
1184                 {"noise_2ch_48khz_aot39_ds_sbr_fl512_mp4.m4a", 2},
1185         };
1186         for (Object [] sample: samples) {
1187             for (String codecName : codecsFor((String)sample[0], CODEC_DEFAULT)) {
1188                 short[] decSamples = decodeToMemory(codecName, decParams,
1189                         (String)sample[0] /* resource */, RESET_MODE_NONE, CONFIG_MODE_NONE,
1190                         -1, null);
1191                 checkEnergy(decSamples, decParams, (Integer)sample[1] /* number of channels */);
1192                 decParams.reset();
1193             }
1194         }
1195     }
1196 
1197     /**
1198      * Perform a segmented energy analysis on given audio signal samples and run several tests on
1199      * the energy values.
1200      *
1201      * The main purpose is to verify whether an AAC decoder implementation applies Spectral Band
1202      * Replication (SBR) and Parametric Stereo (PS) correctly. Both tools are inherent parts to the
1203      * MPEG-4 HE-AAC and HE-AAC v2 audio codecs.
1204      *
1205      * In addition, this test can verify the correct decoding of multi-channel (e.g. 5.1 channel)
1206      * streams or the creation of a mixdown signal.
1207      *
1208      * Note: This test procedure is not an MPEG Conformance Test and can not serve as a replacement.
1209      *
1210      * @param decSamples the decoded audio samples to be tested
1211      * @param decParams the audio parameters of the given audio samples (decSamples)
1212      * @param encNch the encoded number of audio channels (number of channels of the original
1213      *               input)
1214      * @param nrgRatioThresh threshold to classify the energy ratios ]0.0, 1.0[
1215      * @throws RuntimeException
1216      */
checkEnergy(short[] decSamples, AudioParameter decParams, int encNch, float nrgRatioThresh)1217     protected void checkEnergy(short[] decSamples, AudioParameter decParams, int encNch,
1218                              float nrgRatioThresh) throws RuntimeException
1219     {
1220         final int nSegPerBlk = 4;                          // the number of segments per block
1221         final int nCh = decParams.getNumChannels();        // the number of input channels
1222         final int nBlkSmp = decParams.getSamplingRate();   // length of one (LB/HB) block [samples]
1223         final int nSegSmp = nBlkSmp / nSegPerBlk;          // length of one segment [samples]
1224         final int smplPerChan = decSamples.length / nCh;   // actual # samples per channel (total)
1225 
1226         final int nSegSmpTot = nSegSmp * nCh;              // actual # samples per segment (all ch)
1227         final int nSegChOffst = 2 * nSegPerBlk;            // signal offset between chans [segments]
1228         final int procNch = Math.min(nCh, encNch);         // the number of channels to be analyzed
1229         if (encNch > 4) {
1230             assertTrue(String.format("multichannel content (%dch) was downmixed (%dch)",
1231                     encNch, nCh), procNch > 4);
1232         }
1233         assertTrue(String.format("got less channels(%d) than encoded (%d)", nCh, encNch),
1234                 nCh >= encNch);
1235 
1236         final int encEffNch = (encNch > 5) ? encNch-1 : encNch;  // all original configs with more
1237                                                            // ... than five channel have an LFE */
1238         final int expSmplPerChan = Math.max(encEffNch, 2) * nSegChOffst * nSegSmp;
1239         final boolean isDmx = nCh < encNch;                // flag telling that input is dmx signal
1240         int effProcNch = procNch;                          // the num analyzed channels with signal
1241 
1242         assertTrue("got less input samples than expected", smplPerChan >= expSmplPerChan);
1243 
1244         // get the signal offset by counting zero samples at the very beginning (over all channels)
1245         final int zeroSigThresh = 1;                     // sample value threshold for signal search
1246         int signalStart = smplPerChan;                   // receives the number of samples that
1247                                                          // ... are in front of the actual signal
1248         int noiseStart = signalStart;                    // receives the number of null samples
1249                                                          // ... (per chan) at the very beginning
1250         for (int smpl = 0; smpl < decSamples.length; smpl++) {
1251             int value = Math.abs(decSamples[smpl]);
1252             if (value > 0 && noiseStart == signalStart) {
1253                 noiseStart = smpl / nCh;                   // store start of prepended noise
1254             }                                              // ... (can be same as signalStart)
1255             if (value > zeroSigThresh) {
1256                 signalStart = smpl / nCh;                  // store signal start offset [samples]
1257                 break;
1258             }
1259         }
1260         signalStart = (signalStart > noiseStart+1) ? signalStart : noiseStart;
1261         assertTrue ("no signal found in any channel!", signalStart < smplPerChan);
1262         final int totSeg = (smplPerChan-signalStart) / nSegSmp; // max num seg that fit into signal
1263         final int totSmp = nSegSmp * totSeg;               // max num relevant samples (per channel)
1264         assertTrue("no segments left to test after signal search", totSeg > 0);
1265 
1266         // get the energies and the channel offsets by searching for the first segment above the
1267         //  energy threshold
1268         final double zeroMaxNrgRatio = 0.001f;             // ratio of zeroNrgThresh to the max nrg
1269         double zeroNrgThresh = nSegSmp * nSegSmp;          // threshold to classify segment energies
1270         double totMaxNrg = 0.0f;                           // will store the max seg nrg over all ch
1271         double[][] nrg = new double[procNch][totSeg];      // array receiving the segment energies
1272         int[] offset = new int[procNch];                   // array for channel offsets
1273         boolean[] sigSeg = new boolean[totSeg];            // array receiving the segment ...
1274                                                            // ... energy status over all channels
1275         for (int ch = 0; ch < procNch; ch++) {
1276             offset[ch] = -1;
1277             for (int seg = 0; seg < totSeg; seg++) {
1278                 final int smpStart = (signalStart * nCh) + (seg * nSegSmpTot) + ch;
1279                 final int smpStop = smpStart + nSegSmpTot;
1280                 for (int smpl = smpStart; smpl < smpStop; smpl += nCh) {
1281                     nrg[ch][seg] += decSamples[smpl] * decSamples[smpl];  // accumulate segment nrg
1282                 }
1283                 if (nrg[ch][seg] > zeroNrgThresh && offset[ch] < 0) { // store 1st segment (index)
1284                     offset[ch] = seg / nSegChOffst;        // ... per ch which has energy above the
1285                 }                                          // ... threshold to get the ch offsets
1286                 if (nrg[ch][seg] > totMaxNrg) {
1287                     totMaxNrg = nrg[ch][seg];              // store the max segment nrg over all ch
1288                 }
1289                 sigSeg[seg] |= nrg[ch][seg] > zeroNrgThresh;  // store whether the channel has
1290                                                            // ... energy in this segment
1291             }
1292             if (offset[ch] < 0) {                          // if one channel has no signal it is
1293                 effProcNch -= 1;                           // ... most probably the LFE
1294                 offset[ch] = effProcNch;                   // the LFE is no effective channel
1295             }
1296             if (ch == 0) {                                 // recalculate the zero signal threshold
1297                 zeroNrgThresh = zeroMaxNrgRatio * totMaxNrg; // ... based on the 1st channels max
1298             }                                              // ... energy for all subsequent checks
1299         }
1300         // check the channel mapping
1301         assertTrue("more than one LFE detected", effProcNch >= procNch - 1);
1302         assertTrue(String.format("less samples decoded than expected: %d < %d",
1303                 decSamples.length-(signalStart * nCh), totSmp * effProcNch),
1304                 decSamples.length-(signalStart * nCh) >= totSmp * effProcNch);
1305         if (procNch >= 5) {                                // for multi-channel signals the only
1306             final int[] frontChMap1 = {2, 0, 1};           // valid front channel orders are L, R, C
1307             final int[] frontChMap2 = {0, 1, 2};           // or C, L, R (L=left, R=right, C=center)
1308             if ( !(Arrays.equals(Arrays.copyOfRange(offset, 0, 3), frontChMap1)
1309                     || Arrays.equals(Arrays.copyOfRange(offset, 0, 3), frontChMap2)) ) {
1310                 fail("wrong front channel mapping");
1311             }
1312         }
1313         // check whether every channel occurs exactly once
1314         int[] chMap = new int[nCh];                        // mapping array to sort channels
1315         for (int ch = 0; ch < effProcNch; ch++) {
1316             int occurred = 0;
1317             for (int idx = 0; idx < procNch; idx++) {
1318                 if (offset[idx] == ch) {
1319                     occurred += 1;
1320                     chMap[ch] = idx;                       // create mapping table to address chans
1321                 }                                          // ... from front to back
1322             }                                              // the LFE must be last
1323             assertTrue(String.format("channel %d occurs %d times in the mapping", ch, occurred),
1324                     occurred == 1);
1325         }
1326 
1327         // go over all segment energies in all channels and check them
1328         double refMinNrg = zeroNrgThresh;                  // reference min energy for the 1st ch;
1329                                                            // others will be compared against 1st
1330         for (int ch = 0; ch < procNch; ch++) {
1331             int idx = chMap[ch];                           // resolve channel mapping
1332             final int ofst = offset[idx] * nSegChOffst;    // signal offset [segments]
1333             if (ch < effProcNch && ofst < totSeg) {
1334                 int nrgSegEnd;                             // the last segment that has energy
1335                 int nrgSeg;                                // the number of segments with energy
1336                 if ((encNch <= 2) && (ch == 0)) {          // the first channel of a mono or ...
1337                     nrgSeg = totSeg;                       // stereo signal has full signal ...
1338                 } else {                                   // all others have one LB + one HB block
1339                     nrgSeg = Math.min(totSeg, (2 * nSegPerBlk) + ofst) - ofst;
1340                 }
1341                 nrgSegEnd = ofst + nrgSeg;
1342                 // find min and max energy of all segments that should have signal
1343                 double minNrg = nrg[idx][ofst];            // channels minimum segment energy
1344                 double maxNrg = nrg[idx][ofst];            // channels maximum segment energy
1345                 for (int seg = ofst+1; seg < nrgSegEnd; seg++) {          // values of 1st segment
1346                     if (nrg[idx][seg] < minNrg) minNrg = nrg[idx][seg];   // ... already assigned
1347                     if (nrg[idx][seg] > maxNrg) maxNrg = nrg[idx][seg];
1348                 }
1349                 assertTrue(String.format("max energy of channel %d is zero", ch),
1350                         maxNrg > 0.0f);
1351                 assertTrue(String.format("channel %d has not enough energy", ch),
1352                         minNrg >= refMinNrg);              // check the channels minimum energy
1353                 if (ch == 0) {                             // use 85% of 1st channels min energy as
1354                     refMinNrg = minNrg * 0.85f;            // ... reference the other chs must meet
1355                 } else if (isDmx && (ch == 1)) {           // in case of mixdown signal the energy
1356                     refMinNrg *= 0.50f;                    // ... can be lower depending on the
1357                 }                                          // ... downmix equation
1358                 // calculate and check the energy ratio
1359                 final double nrgRatio = minNrg / maxNrg;
1360                 assertTrue(String.format("energy ratio of channel %d below threshold", ch),
1361                         nrgRatio >= nrgRatioThresh);
1362                 if (!isDmx) {
1363                     if (nrgSegEnd < totSeg) {
1364                         // consider that some noise can extend into the subsequent segment
1365                         // allow this to be at max 20% of the channels minimum energy
1366                         assertTrue(String.format("min energy after noise above threshold (%.2f)",
1367                                 nrg[idx][nrgSegEnd]),
1368                                 nrg[idx][nrgSegEnd] < minNrg * 0.20f);
1369                         nrgSegEnd += 1;
1370                     }
1371                 } else {                                   // ignore all subsequent segments
1372                     nrgSegEnd = totSeg;                    // ... in case of a mixdown signal
1373                 }
1374                 // zero-out the verified energies to simplify the subsequent check
1375                 for (int seg = ofst; seg < nrgSegEnd; seg++) nrg[idx][seg] = 0.0f;
1376             }
1377             // check zero signal parts
1378             for (int seg = 0; seg < totSeg; seg++) {
1379                 assertTrue(String.format("segment %d in channel %d has signal where should " +
1380                         "be none (%.2f)", seg, ch, nrg[idx][seg]), nrg[idx][seg] < zeroNrgThresh);
1381             }
1382         }
1383         // test whether each segment has energy in at least one channel
1384         for (int seg = 0; seg < totSeg; seg++) {
1385             assertTrue(String.format("no channel has energy in segment %d", seg), sigSeg[seg]);
1386         }
1387     }
1388 
1389     private void checkEnergy(short[] decSamples, AudioParameter decParams, int encNch)
1390             throws RuntimeException {
1391         checkEnergy(decSamples, decParams, encNch, 0.50f);  // default energy ratio threshold: 0.50
1392     }
1393 
1394     /**
1395      * Calculate the RMS of the difference signal between a given signal and the reference samples
1396      * located in mMasterBuffer.
1397      * @param signal the decoded samples to test
1398      * @return RMS of error signal
1399      * @throws RuntimeException
1400      */
1401     private double getRmsError(short[] signal) throws RuntimeException {
1402         long totalErrorSquared = 0;
1403         int stride = mMasterBuffer.length / signal.length;
1404         assertEquals("wrong data size", mMasterBuffer.length, signal.length * stride);
1405 
1406         for (int i = 0; i < signal.length; i++) {
1407             short sample = signal[i];
1408             short mastersample = mMasterBuffer[i * stride];
1409             int d = sample - mastersample;
1410             totalErrorSquared += d * d;
1411         }
1412         long avgErrorSquared = (totalErrorSquared / signal.length);
1413         return Math.sqrt(avgErrorSquared);
1414     }
1415 
1416     /**
1417      * Decode a given input stream and compare the output against the reference signal. The RMS of
1418      * the error signal must be below the given threshold (maxerror).
1419      * Important note about the test signals: this method expects test signals to have been
1420      *   "stretched" relative to the reference signal. The reference, sinesweepraw, is 3s long at
1421      *   44100Hz. For instance for comparing this reference to a test signal at 8000Hz, the test
1422      *   signal needs to be 44100/8000 = 5.5125 times longer, containing frequencies 5.5125
1423      *   times lower than the reference.
1424      * @param testinput the file to decode
1425      * @param maxerror  the maximum allowed root mean squared error
1426      * @throws Exception
1427      */
1428     private void decodeNtest(final String testinput, float maxerror) throws Exception {
1429         decodeNtest(testinput, maxerror, CODEC_ALL);
1430     }
1431 
1432     private void decodeNtest(final String testinput, float maxerror, int codecSupportMode)
1433             throws Exception {
1434         String localTag = TAG + "#decodeNtest";
1435 
1436         for (String codecName: codecsFor(testinput, codecSupportMode)) {
1437             AudioParameter decParams = new AudioParameter();
1438             short[] decoded = decodeToMemory(codecName, decParams, testinput,
1439                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1440             double rmse = getRmsError(decoded);
1441 
1442             assertTrue(codecName + ": decoding error too big: " + rmse, rmse <= maxerror);
1443             Log.v(localTag, String.format("rms = %f (max = %f)", rmse, maxerror));
1444         }
1445     }
1446 
1447     private void monoTest(final String res, int expectedLength) throws Exception {
1448         for (String codecName: codecsFor(res)) {
1449             short [] mono = decodeToMemory(codecName, res,
1450                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1451             if (mono.length == expectedLength) {
1452                 // expected
1453             } else if (mono.length == expectedLength * 2) {
1454                 // the decoder output 2 channels instead of 1, check that the left and right channel
1455                 // are identical
1456                 for (int i = 0; i < mono.length; i += 2) {
1457                     assertEquals(codecName + ": mismatched samples at " + i, mono[i], mono[i+1]);
1458                 }
1459             } else {
1460                 fail(codecName + ": wrong number of samples: " + mono.length);
1461             }
1462 
1463             short [] mono2 = decodeToMemory(codecName, res,
1464                     RESET_MODE_RECONFIGURE, CONFIG_MODE_NONE, -1, null);
1465 
1466             assertEquals(codecName + ": count different after reconfigure: ",
1467                     mono.length, mono2.length);
1468             for (int i = 0; i < mono.length; i++) {
1469                 assertEquals(codecName + ": samples at " + i + " don't match", mono[i], mono2[i]);
1470             }
1471 
1472             short [] mono3 = decodeToMemory(codecName, res,
1473                     RESET_MODE_FLUSH, CONFIG_MODE_NONE, -1, null);
1474 
1475             assertEquals(codecName + ": count different after flush: ", mono.length, mono3.length);
1476             for (int i = 0; i < mono.length; i++) {
1477                 assertEquals(codecName + ": samples at " + i + " don't match", mono[i], mono3[i]);
1478             }
1479         }
1480     }
1481 
1482     protected static List<String> codecsFor(String resource) throws IOException {
1483         return codecsFor(resource, CODEC_ALL);
1484     }
1485 
1486     protected static List<String> codecsFor(String resource, int codecSupportMode)
1487             throws IOException {
1488 
1489         // CODEC_DEFAULT behaviors started with S
1490         if (sIsBeforeS) {
1491             codecSupportMode = CODEC_ALL;
1492         }
1493         MediaExtractor ex = new MediaExtractor();
1494         AssetFileDescriptor fd = getAssetFileDescriptorFor(resource);
1495         try {
1496             ex.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
1497         } finally {
1498             fd.close();
1499         }
1500         MediaCodecInfo[] codecInfos = new MediaCodecList(
1501                 MediaCodecList.REGULAR_CODECS).getCodecInfos();
1502         ArrayList<String> matchingCodecs = new ArrayList<String>();
1503         MediaFormat format = ex.getTrackFormat(0);
1504         String mime = format.getString(MediaFormat.KEY_MIME);
1505         for (MediaCodecInfo info: codecInfos) {
1506             if (info.isEncoder()) {
1507                 continue;
1508             }
1509             try {
1510                 MediaCodecInfo.CodecCapabilities caps = info.getCapabilitiesForType(mime);
1511                 if (caps != null) {
1512                     // do we test this codec in current mode?
1513                     if (!TestUtils.isTestableCodecInCurrentMode(info.getName())) {
1514                         Log.i(TAG, "skip codec " + info.getName() + " in current mode");
1515                         continue;
1516                     }
1517                     if (codecSupportMode == CODEC_ALL) {
1518                         if (sIsAfterT) {
1519                             // This is an extractor failure as often as it is a codec failure
1520                             assertTrue(info.getName() + " does not declare support for "
1521                                     + format.toString(),
1522                                     caps.isFormatSupported(format));
1523                         }
1524                         matchingCodecs.add(info.getName());
1525                     } else if (codecSupportMode == CODEC_DEFAULT) {
1526                         if (caps.isFormatSupported(format)) {
1527                             matchingCodecs.add(info.getName());
1528                         } else if (isDefaultCodec(info.getName(), mime)) {
1529                             // This is an extractor failure as often as it is a codec failure
1530                             fail(info.getName() + " which is a default decoder for mime " + mime
1531                                    + ", does not declare support for " + format.toString());
1532                         }
1533                     } else {
1534                         fail("Unhandled codec support mode " + codecSupportMode);
1535                     }
1536                 }
1537             } catch (IllegalArgumentException e) {
1538                 // type is not supported
1539             }
1540         }
1541         if (TestUtils.isMtsMode()) {
1542             // not fatal in MTS mode
1543             Assume.assumeTrue("no MTS-mode codecs found for format " + format.toString(),
1544                             matchingCodecs.size() != 0);
1545         } else {
1546             // but fatal in CTS mode
1547             assertTrue("no codecs found for format " + format.toString(),
1548                             matchingCodecs.size() != 0);
1549         }
1550         return matchingCodecs;
1551     }
1552 
1553     /**
1554      * @param testinput the file to decode
1555      * @param maxerror the maximum allowed root mean squared error
1556      * @throws IOException
1557      */
1558     private void decode(final String testinput, float maxerror) throws IOException {
1559 
1560         for (String codecName: codecsFor(testinput)) {
1561             short[] decoded = decodeToMemory(codecName, testinput,
1562                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1563 
1564             assertEquals(codecName + ": wrong data size", mMasterBuffer.length, decoded.length);
1565 
1566             double rmse = getRmsError(decoded);
1567 
1568             assertTrue(codecName + ": decoding error too big: " + rmse, rmse <= maxerror);
1569 
1570             int[] resetModes = new int[] { RESET_MODE_NONE, RESET_MODE_RECONFIGURE,
1571                     RESET_MODE_FLUSH, RESET_MODE_EOS_FLUSH };
1572             int[] configModes = new int[] { CONFIG_MODE_NONE, CONFIG_MODE_QUEUE };
1573 
1574             for (int conf : configModes) {
1575                 for (int reset : resetModes) {
1576                     if (conf == CONFIG_MODE_NONE && reset == RESET_MODE_NONE) {
1577                         // default case done outside of loop
1578                         continue;
1579                     }
1580                     if (conf == CONFIG_MODE_QUEUE && !hasAudioCsd(testinput)) {
1581                         continue;
1582                     }
1583 
1584                     String params = String.format("(using reset: %d, config: %s)", reset, conf);
1585                     short[] decoded2 = decodeToMemory(codecName, testinput, reset, conf, -1, null);
1586                     assertEquals(codecName + ": count different with reconfigure" + params,
1587                             decoded.length, decoded2.length);
1588                     for (int i = 0; i < decoded.length; i++) {
1589                         assertEquals(codecName + ": samples don't match" + params,
1590                                 decoded[i], decoded2[i]);
1591                     }
1592                 }
1593             }
1594         }
1595     }
1596 
1597     private boolean hasAudioCsd(final String testinput) throws IOException {
1598         AssetFileDescriptor fd = null;
1599         try {
1600             MediaExtractor extractor = new MediaExtractor();
1601             extractor.setDataSource(mInpPrefix + testinput);
1602             MediaFormat format = extractor.getTrackFormat(0);
1603 
1604             return format.containsKey(CSD_KEYS[0]);
1605 
1606         } finally {
1607             if (fd != null) {
1608                 fd.close();
1609             }
1610         }
1611     }
1612 
1613     protected static int getOutputFormatInteger(MediaCodec codec, String key) {
1614         if (codec == null) {
1615             fail("Null MediaCodec before attempting to retrieve output format key " + key);
1616         }
1617         MediaFormat format = null;
1618         try {
1619             format = codec.getOutputFormat();
1620         } catch (Exception e) {
1621             fail("Exception " + e + " when attempting to obtain output format");
1622         }
1623         if (format == null) {
1624             fail("Null output format returned from MediaCodec");
1625         }
1626         try {
1627             return format.getInteger(key);
1628         } catch (NullPointerException e) {
1629             fail("Key " + key + " not present in output format");
1630         } catch (ClassCastException e) {
1631             fail("Key " + key + " not stored as integer in output format");
1632         } catch (Exception e) {
1633             fail("Exception " + e + " when attempting to retrieve output format key " + key);
1634         }
1635         // never used
1636         return Integer.MIN_VALUE;
1637     }
1638 
1639     // Class handling all audio parameters relevant for testing
1640     protected static class AudioParameter {
1641 
1642         public AudioParameter() {
1643             reset();
1644         }
1645 
1646         public void reset() {
1647             mNumChannels = 0;
1648             mSamplingRate = 0;
1649             mChannelMask = 0;
1650         }
1651 
1652         public int getNumChannels() {
1653             return mNumChannels;
1654         }
1655 
1656         public int getSamplingRate() {
1657             return mSamplingRate;
1658         }
1659 
1660         public int getChannelMask() {
1661             return mChannelMask;
1662         }
1663 
1664         public void setNumChannels(int numChannels) {
1665             mNumChannels = numChannels;
1666         }
1667 
1668         public void setSamplingRate(int samplingRate) {
1669             mSamplingRate = samplingRate;
1670         }
1671 
1672         public void setChannelMask(int mask) {
1673             mChannelMask = mask;
1674         }
1675 
1676         private int mNumChannels;
1677         private int mSamplingRate;
1678         private int mChannelMask;
1679     }
1680 
1681     private short[] decodeToMemory(String codecName, final String testinput, int resetMode,
1682             int configMode, int eossample, List<Long> timestamps) throws IOException {
1683 
1684         AudioParameter audioParams = new AudioParameter();
1685         return decodeToMemory(codecName, audioParams, testinput,
1686                 resetMode, configMode, eossample, timestamps);
1687     }
1688 
1689     private short[] decodeToMemory(String codecName, AudioParameter audioParams,
1690             final String testinput, int resetMode, int configMode, int eossample,
1691             List<Long> timestamps) throws IOException {
1692         String localTag = TAG + "#decodeToMemory";
1693         Log.v(localTag, String.format("reset = %d; config: %s", resetMode, configMode));
1694         short [] decoded = new short[0];
1695         int decodedIdx = 0;
1696 
1697         MediaExtractor extractor;
1698         MediaCodec codec;
1699         ByteBuffer[] codecInputBuffers;
1700         ByteBuffer[] codecOutputBuffers;
1701 
1702         extractor = new MediaExtractor();
1703         extractor.setDataSource(mInpPrefix + testinput);
1704 
1705         assertEquals("wrong number of tracks", 1, extractor.getTrackCount());
1706         MediaFormat format = extractor.getTrackFormat(0);
1707         String mime = format.getString(MediaFormat.KEY_MIME);
1708         assertTrue("not an audio file", mime.startsWith("audio/"));
1709 
1710         MediaFormat configFormat = format;
1711         codec = MediaCodec.createByCodecName(codecName);
1712         if (configMode == CONFIG_MODE_QUEUE && format.containsKey(CSD_KEYS[0])) {
1713             configFormat = MediaFormat.createAudioFormat(mime,
1714                     format.getInteger(MediaFormat.KEY_SAMPLE_RATE),
1715                     format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
1716 
1717             configFormat.setLong(MediaFormat.KEY_DURATION,
1718                     format.getLong(MediaFormat.KEY_DURATION));
1719             String[] keys = new String[] { "max-input-size", "encoder-delay", "encoder-padding" };
1720             for (String k : keys) {
1721                 if (format.containsKey(k)) {
1722                     configFormat.setInteger(k, format.getInteger(k));
1723                 }
1724             }
1725         }
1726         Log.v(localTag, "configuring with " + configFormat);
1727         codec.configure(configFormat, null /* surface */, null /* crypto */, 0 /* flags */);
1728 
1729         codec.start();
1730         codecInputBuffers = codec.getInputBuffers();
1731         codecOutputBuffers = codec.getOutputBuffers();
1732 
1733         if (resetMode == RESET_MODE_RECONFIGURE) {
1734             codec.stop();
1735             codec.configure(configFormat, null /* surface */, null /* crypto */, 0 /* flags */);
1736             codec.start();
1737             codecInputBuffers = codec.getInputBuffers();
1738             codecOutputBuffers = codec.getOutputBuffers();
1739         } else if (resetMode == RESET_MODE_FLUSH) {
1740             codec.flush();
1741         }
1742 
1743         extractor.selectTrack(0);
1744 
1745         if (configMode == CONFIG_MODE_QUEUE) {
1746             queueConfig(codec, format);
1747         }
1748 
1749         // start decoding
1750         final long kTimeOutUs = 5000;
1751         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1752         boolean sawInputEOS = false;
1753         boolean sawOutputEOS = false;
1754         int noOutputCounter = 0;
1755         int samplecounter = 0;
1756         while (!sawOutputEOS && noOutputCounter < 50) {
1757             noOutputCounter++;
1758             if (!sawInputEOS) {
1759                 int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
1760 
1761                 if (inputBufIndex >= 0) {
1762                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
1763 
1764                     int sampleSize =
1765                         extractor.readSampleData(dstBuf, 0 /* offset */);
1766 
1767                     long presentationTimeUs = 0;
1768 
1769                     if (sampleSize < 0 && eossample > 0) {
1770                         fail("test is broken: never reached eos sample");
1771                     }
1772                     if (sampleSize < 0) {
1773                         Log.d(TAG, "saw input EOS.");
1774                         sawInputEOS = true;
1775                         sampleSize = 0;
1776                     } else {
1777                         if (samplecounter == eossample) {
1778                             sawInputEOS = true;
1779                         }
1780                         samplecounter++;
1781                         presentationTimeUs = extractor.getSampleTime();
1782                     }
1783                     codec.queueInputBuffer(
1784                             inputBufIndex,
1785                             0 /* offset */,
1786                             sampleSize,
1787                             presentationTimeUs,
1788                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
1789 
1790                     if (!sawInputEOS) {
1791                         extractor.advance();
1792                     }
1793                 }
1794             }
1795 
1796             int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
1797 
1798             if (res >= 0) {
1799                 //Log.d(TAG, "got frame, size " + info.size + "/" + info.presentationTimeUs);
1800 
1801                 if (info.size > 0) {
1802                     noOutputCounter = 0;
1803                     if (timestamps != null) {
1804                         timestamps.add(info.presentationTimeUs);
1805                     }
1806                 }
1807                 if (info.size > 0 &&
1808                         resetMode != RESET_MODE_NONE && resetMode != RESET_MODE_EOS_FLUSH) {
1809                     // once we've gotten some data out of the decoder, reset and start again
1810                     if (resetMode == RESET_MODE_RECONFIGURE) {
1811                         codec.stop();
1812                         codec.configure(configFormat, null /* surface */, null /* crypto */,
1813                                 0 /* flags */);
1814                         codec.start();
1815                         codecInputBuffers = codec.getInputBuffers();
1816                         codecOutputBuffers = codec.getOutputBuffers();
1817                         if (configMode == CONFIG_MODE_QUEUE) {
1818                             queueConfig(codec, format);
1819                         }
1820                     } else /* resetMode == RESET_MODE_FLUSH */ {
1821                         codec.flush();
1822                     }
1823                     resetMode = RESET_MODE_NONE;
1824                     extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1825                     sawInputEOS = false;
1826                     samplecounter = 0;
1827                     if (timestamps != null) {
1828                         timestamps.clear();
1829                     }
1830                     continue;
1831                 }
1832 
1833                 int outputBufIndex = res;
1834                 ByteBuffer buf = codecOutputBuffers[outputBufIndex];
1835 
1836                 if (decodedIdx + (info.size / 2) >= decoded.length) {
1837                     decoded = Arrays.copyOf(decoded, decodedIdx + (info.size / 2));
1838                 }
1839 
1840                 buf.position(info.offset);
1841                 for (int i = 0; i < info.size; i += 2) {
1842                     decoded[decodedIdx++] = buf.getShort();
1843                 }
1844 
1845                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
1846 
1847                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1848                     Log.d(TAG, "saw output EOS.");
1849                     if (resetMode == RESET_MODE_EOS_FLUSH) {
1850                         resetMode = RESET_MODE_NONE;
1851                         codec.flush();
1852                         extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1853                         sawInputEOS = false;
1854                         samplecounter = 0;
1855                         decoded = new short[0];
1856                         decodedIdx = 0;
1857                         if (timestamps != null) {
1858                             timestamps.clear();
1859                         }
1860                     } else {
1861                         sawOutputEOS = true;
1862                     }
1863                 }
1864             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1865                 codecOutputBuffers = codec.getOutputBuffers();
1866 
1867                 Log.d(TAG, "output buffers have changed.");
1868             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1869                 MediaFormat oformat = codec.getOutputFormat();
1870                 audioParams.setNumChannels(oformat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
1871                 audioParams.setSamplingRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
1872                 Log.d(TAG, "output format has changed to " + oformat);
1873             } else {
1874                 Log.d(TAG, "dequeueOutputBuffer returned " + res);
1875             }
1876         }
1877         if (noOutputCounter >= 50) {
1878             fail("decoder stopped outputing data");
1879         }
1880 
1881         codec.stop();
1882         codec.release();
1883         return decoded;
1884     }
1885 
1886     private static void queueConfig(MediaCodec codec, MediaFormat format) {
1887         for (String csdKey : CSD_KEYS) {
1888             if (!format.containsKey(csdKey)) {
1889                 continue;
1890             }
1891             ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
1892             int inputBufIndex = codec.dequeueInputBuffer(-1);
1893             if (inputBufIndex < 0) {
1894                 fail("failed to queue configuration buffer " + csdKey);
1895             } else {
1896                 ByteBuffer csd = (ByteBuffer) format.getByteBuffer(csdKey).rewind();
1897                 Log.v(TAG + "#queueConfig", String.format("queueing %s:%s", csdKey, csd));
1898                 codecInputBuffers[inputBufIndex].put(csd);
1899                 codec.queueInputBuffer(
1900                         inputBufIndex,
1901                         0 /* offset */,
1902                         csd.limit(),
1903                         0 /* presentation time (us) */,
1904                         MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
1905             }
1906         }
1907     }
1908 
1909     @Test
1910     public void testDecodeM4aWithEOSOnLastBuffer() throws Exception {
1911         testDecodeWithEOSOnLastBuffer("sinesweepm4a.m4a");
1912     }
1913 
1914     @Test
1915     public void testDecodeMp3WithEOSOnLastBuffer() throws Exception {
1916         testDecodeWithEOSOnLastBuffer("sinesweepmp3lame.mp3");
1917         testDecodeWithEOSOnLastBuffer("sinesweepmp3smpb.mp3");
1918     }
1919 
1920     @Test
1921     public void testDecodeOpusWithEOSOnLastBuffer() throws Exception {
1922         testDecodeWithEOSOnLastBuffer("sinesweepopus.mkv");
1923         testDecodeWithEOSOnLastBuffer("sinesweepopusmp4.mp4");
1924     }
1925 
1926     @Test
1927     public void testDecodeWavWithEOSOnLastBuffer() throws Exception {
1928         testDecodeWithEOSOnLastBuffer("sinesweepwav.wav");
1929     }
1930 
1931     @Test
1932     public void testDecodeFlacWithEOSOnLastBuffer() throws Exception {
1933         testDecodeWithEOSOnLastBuffer("sinesweepflacmkv.mkv");
1934         testDecodeWithEOSOnLastBuffer("sinesweepflac.flac");
1935         testDecodeWithEOSOnLastBuffer("sinesweepflacmp4.mp4");
1936     }
1937 
1938     @Test
1939     public void testDecodeOggWithEOSOnLastBuffer() throws Exception {
1940         testDecodeWithEOSOnLastBuffer("sinesweepogg.ogg");
1941         testDecodeWithEOSOnLastBuffer("sinesweepoggmkv.mkv");
1942         testDecodeWithEOSOnLastBuffer("sinesweepoggmp4.mp4");
1943     }
1944 
1945     /* setting EOS on the last full input buffer should be equivalent to setting EOS on an empty
1946      * input buffer after all the full ones. */
1947     private void testDecodeWithEOSOnLastBuffer(final String res) throws Exception {
1948         int numsamples = countSamples(res);
1949         assertTrue(numsamples != 0);
1950 
1951         for (String codecName: codecsFor(res)) {
1952             List<Long> timestamps1 = new ArrayList<Long>();
1953             short[] decode1 = decodeToMemory(codecName, res,
1954                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, timestamps1);
1955 
1956             List<Long> timestamps2 = new ArrayList<Long>();
1957             short[] decode2 = decodeToMemory(codecName, res,
1958                     RESET_MODE_NONE, CONFIG_MODE_NONE, numsamples - 1,
1959                     timestamps2);
1960 
1961             // check that data and timestamps are the same for EOS-on-last and EOS-after-last
1962             assertEquals(decode1.length, decode2.length);
1963             assertTrue(Arrays.equals(decode1, decode2));
1964             assertEquals(timestamps1.size(), timestamps2.size());
1965             assertTrue(timestamps1.equals(timestamps2));
1966 
1967             // ... and that this is also true when reconfiguring the codec
1968             timestamps2.clear();
1969             decode2 = decodeToMemory(codecName, res,
1970                     RESET_MODE_RECONFIGURE, CONFIG_MODE_NONE, -1, timestamps2);
1971             assertTrue(Arrays.equals(decode1, decode2));
1972             assertTrue(timestamps1.equals(timestamps2));
1973             timestamps2.clear();
1974             decode2 = decodeToMemory(codecName, res,
1975                     RESET_MODE_RECONFIGURE, CONFIG_MODE_NONE, numsamples - 1, timestamps2);
1976             assertEquals(decode1.length, decode2.length);
1977             assertTrue(Arrays.equals(decode1, decode2));
1978             assertTrue(timestamps1.equals(timestamps2));
1979 
1980             // ... and that this is also true when flushing the codec
1981             timestamps2.clear();
1982             decode2 = decodeToMemory(codecName, res,
1983                     RESET_MODE_FLUSH, CONFIG_MODE_NONE, -1, timestamps2);
1984             assertTrue(Arrays.equals(decode1, decode2));
1985             assertTrue(timestamps1.equals(timestamps2));
1986             timestamps2.clear();
1987             decode2 = decodeToMemory(codecName, res,
1988                     RESET_MODE_FLUSH, CONFIG_MODE_NONE, numsamples - 1,
1989                     timestamps2);
1990             assertEquals(decode1.length, decode2.length);
1991             assertTrue(Arrays.equals(decode1, decode2));
1992             assertTrue(timestamps1.equals(timestamps2));
1993         }
1994     }
1995 
1996     private int countSamples(final String res) throws IOException {
1997         MediaExtractor extractor = new MediaExtractor();
1998         extractor.setDataSource(mInpPrefix + res);
1999         extractor.selectTrack(0);
2000         int numsamples = extractor.getSampleTime() < 0 ? 0 : 1;
2001         while (extractor.advance()) {
2002             numsamples++;
2003         }
2004         return numsamples;
2005     }
2006 
2007     private void testDecode(final String testVideo, int frameNum) throws Exception {
2008         if (!MediaUtils.checkCodecForResource(mInpPrefix + testVideo, 0 /* track */)) {
2009             return; // skip
2010         }
2011 
2012         // Decode to Surface.
2013         Surface s = getActivity().getSurfaceHolder().getSurface();
2014         int frames1 = countFrames(testVideo, RESET_MODE_NONE, -1 /* eosframe */, s);
2015         assertEquals("wrong number of frames decoded", frameNum, frames1);
2016 
2017         // Decode to buffer.
2018         int frames2 = countFrames(testVideo, RESET_MODE_NONE, -1 /* eosframe */, null);
2019         assertEquals("different number of frames when using Surface", frames1, frames2);
2020     }
2021 
2022     @Test
2023     public void testCodecBasicH264() throws Exception {
2024         testDecode("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 240);
2025     }
2026 
2027     @Test
2028     public void testCodecBasicHEVC() throws Exception {
2029         testDecode(
2030                 "bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4", 300);
2031     }
2032 
2033     @Test
2034     public void testCodecBasicH263() throws Exception {
2035         testDecode("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp", 122);
2036     }
2037 
2038     @Test
2039     public void testCodecBasicMpeg2() throws Exception {
2040         testDecode("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300);
2041     }
2042 
2043     @Test
2044     public void testCodecBasicMpeg4() throws Exception {
2045         testDecode("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 249);
2046     }
2047 
2048     @Test
2049     public void testCodecBasicVP8() throws Exception {
2050         testDecode("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm", 240);
2051     }
2052 
2053     @Test
2054     public void testCodecBasicVP9() throws Exception {
2055         testDecode("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm", 240);
2056     }
2057 
2058     @Test
2059     public void testCodecBasicAV1() throws Exception {
2060         testDecode("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm", 300);
2061     }
2062 
2063     @Test
2064     public void testH264Decode320x240() throws Exception {
2065         testDecode("bbb_s1_320x240_mp4_h264_mp2_800kbps_30fps_aac_lc_5ch_240kbps_44100hz.mp4", 300);
2066     }
2067 
2068     @Test
2069     public void testH264Decode720x480() throws Exception {
2070         testDecode("bbb_s1_720x480_mp4_h264_mp3_2mbps_30fps_aac_lc_5ch_320kbps_48000hz.mp4", 300);
2071     }
2072 
2073     @Test
2074     public void testH264Decode30fps1280x720Tv() throws Exception {
2075         if (checkTv()) {
2076             assertTrue(MediaUtils.canDecodeVideo(
2077                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 30,
2078                     AVCProfileHigh, AVCLevel31, 8000000));
2079         }
2080     }
2081 
2082     @Test
2083     public void testH264SecureDecode30fps1280x720Tv() throws Exception {
2084         if (checkTv()) {
2085             verifySecureVideoDecodeSupport(
2086                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 30,
2087                     AVCProfileHigh, AVCLevel31, 8000000);
2088         }
2089     }
2090 
2091     @Test
2092     public void testH264Decode30fps1280x720() throws Exception {
2093         testDecode("bbb_s4_1280x720_mp4_h264_mp31_8mbps_30fps_aac_he_mono_40kbps_44100hz.mp4", 300);
2094     }
2095 
2096     @Test
2097     public void testH264Decode60fps1280x720Tv() throws Exception {
2098         if (checkTv()) {
2099             assertTrue(MediaUtils.canDecodeVideo(
2100                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 60,
2101                     AVCProfileHigh, AVCLevel32, 8000000));
2102             testDecode(
2103                     "bbb_s3_1280x720_mp4_h264_hp32_8mbps_60fps_aac_he_v2_stereo_48kbps_48000hz.mp4",
2104                     600);
2105         }
2106     }
2107 
2108     @Test
2109     public void testH264SecureDecode60fps1280x720Tv() throws Exception {
2110         if (checkTv()) {
2111             verifySecureVideoDecodeSupport(
2112                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 60,
2113                     AVCProfileHigh, AVCLevel32, 8000000);
2114         }
2115     }
2116 
2117     @Test
2118     public void testH264Decode60fps1280x720() throws Exception {
2119         testDecode("bbb_s3_1280x720_mp4_h264_mp32_8mbps_60fps_aac_he_v2_6ch_144kbps_44100hz.mp4",
2120                 600);
2121     }
2122 
2123     @Test
2124     public void testH264Decode30fps1920x1080Tv() throws Exception {
2125         if (checkTv()) {
2126             assertTrue(MediaUtils.canDecodeVideo(
2127                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 30,
2128                     AVCProfileHigh, AVCLevel4, 20000000));
2129             testDecode(
2130                     "bbb_s4_1920x1080_wide_mp4_h264_hp4_20mbps_30fps_aac_lc_6ch_384kbps_44100hz.mp4",
2131                     150);
2132         }
2133     }
2134 
2135     @Test
2136     public void testH264SecureDecode30fps1920x1080Tv() throws Exception {
2137         if (checkTv()) {
2138             verifySecureVideoDecodeSupport(
2139                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 30,
2140                     AVCProfileHigh, AVCLevel4, 20000000);
2141         }
2142     }
2143 
2144     @Test
2145     public void testH264Decode30fps1920x1080() throws Exception {
2146         testDecode("bbb_s4_1920x1080_wide_mp4_h264_mp4_20mbps_30fps_aac_he_5ch_200kbps_44100hz.mp4",
2147                 150);
2148     }
2149 
2150     @Test
2151     public void testH264Decode60fps1920x1080Tv() throws Exception {
2152         if (checkTv()) {
2153             assertTrue(MediaUtils.canDecodeVideo(
2154                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 60,
2155                     AVCProfileHigh, AVCLevel42, 20000000));
2156             testDecode("bbb_s2_1920x1080_mp4_h264_hp42_20mbps_60fps_aac_lc_6ch_384kbps_48000hz.mp4",
2157                     300);
2158         }
2159     }
2160 
2161     @Test
2162     public void testH264SecureDecode60fps1920x1080Tv() throws Exception {
2163         if (checkTv()) {
2164             verifySecureVideoDecodeSupport(
2165                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 60,
2166                     AVCProfileHigh, AVCLevel42, 20000000);
2167         }
2168     }
2169 
2170     @Test
2171     public void testH264Decode60fps1920x1080() throws Exception {
2172         testDecode("bbb_s2_1920x1080_mp4_h264_mp42_20mbps_60fps_aac_he_v2_5ch_160kbps_48000hz.mp4",
2173                 300);
2174         testDecode("bbb_s2_1920x1080_mkv_h264_mp42_20mbps_60fps_aac_he_v2_5ch_160kbps_48000hz.mkv",
2175                 300);
2176     }
2177 
2178     @Test
2179     public void testH265Decode25fps1280x720() throws Exception {
2180         testDecode("video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv", 240);
2181     }
2182 
2183     @Test
2184     public void testVP8Decode320x180() throws Exception {
2185         testDecode("bbb_s1_320x180_webm_vp8_800kbps_30fps_opus_5ch_320kbps_48000hz.webm", 300);
2186     }
2187 
2188     @Test
2189     public void testVP8Decode640x360() throws Exception {
2190         testDecode("bbb_s1_640x360_webm_vp8_2mbps_30fps_vorbis_5ch_320kbps_48000hz.webm", 300);
2191     }
2192 
2193     @Test
2194     public void testVP8Decode30fps1280x720Tv() throws Exception {
2195         if (checkTv()) {
2196             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1280, 720, 30));
2197         }
2198     }
2199 
2200     @Test
2201     public void testVP8Decode30fps1280x720() throws Exception {
2202         testDecode("bbb_s4_1280x720_webm_vp8_8mbps_30fps_opus_mono_64kbps_48000hz.webm", 300);
2203     }
2204 
2205     @Test
2206     public void testVP8Decode60fps1280x720Tv() throws Exception {
2207         if (checkTv()) {
2208             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1280, 720, 60));
2209         }
2210     }
2211 
2212     @Test
2213     public void testVP8Decode60fps1280x720() throws Exception {
2214         testDecode("bbb_s3_1280x720_webm_vp8_8mbps_60fps_opus_6ch_384kbps_48000hz.webm", 600);
2215     }
2216 
2217     @Test
2218     public void testVP8Decode30fps1920x1080Tv() throws Exception {
2219         if (checkTv()) {
2220             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1920, 1080, 30));
2221         }
2222     }
2223 
2224     @Test
2225     public void testVP8Decode30fps1920x1080() throws Exception {
2226         testDecode("bbb_s4_1920x1080_wide_webm_vp8_20mbps_30fps_vorbis_6ch_384kbps_44100hz.webm",
2227                 150);
2228     }
2229 
2230     @Test
2231     public void testVP8Decode60fps1920x1080Tv() throws Exception {
2232         if (checkTv()) {
2233             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1920, 1080, 60));
2234         }
2235     }
2236 
2237     @Test
2238     public void testVP8Decode60fps1920x1080() throws Exception {
2239         testDecode("bbb_s2_1920x1080_webm_vp8_20mbps_60fps_vorbis_6ch_384kbps_48000hz.webm", 300);
2240     }
2241 
2242     @Test
2243     public void testVP9Decode320x180() throws Exception {
2244         testDecode("bbb_s1_320x180_webm_vp9_0p11_600kbps_30fps_vorbis_mono_64kbps_48000hz.webm",
2245                 300);
2246     }
2247 
2248     @Test
2249     public void testVP9Decode640x360() throws Exception {
2250         testDecode("bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2251                 300);
2252     }
2253 
2254     @Test
2255     public void testVP9Decode30fps1280x720Tv() throws Exception {
2256         if (checkTv()) {
2257             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP9, 1280, 720, 30));
2258         }
2259     }
2260 
2261     @Test
2262     public void testVP9Decode30fps1280x720() throws Exception {
2263         testDecode("bbb_s4_1280x720_webm_vp9_0p31_4mbps_30fps_opus_stereo_128kbps_48000hz.webm",
2264                 300);
2265     }
2266 
2267     @Test
2268     public void testVP9Decode60fps1920x1080() throws Exception {
2269         testDecode("bbb_s2_1920x1080_webm_vp9_0p41_10mbps_60fps_vorbis_6ch_384kbps_22050hz.webm",
2270                 300);
2271     }
2272 
2273     @Test
2274     public void testVP9Decode30fps3840x2160() throws Exception {
2275         testDecode("bbb_s4_3840x2160_webm_vp9_0p5_20mbps_30fps_vorbis_6ch_384kbps_24000hz.webm",
2276                 150);
2277     }
2278 
2279     @Test
2280     public void testVP9Decode60fps3840x2160() throws Exception {
2281         testDecode("bbb_s2_3840x2160_webm_vp9_0p51_20mbps_60fps_vorbis_6ch_384kbps_32000hz.webm",
2282                 300);
2283     }
2284 
2285     @Test
2286     public void testAV1Decode320x180() throws Exception {
2287         testDecode("video_320x180_webm_av1_200kbps_30fps_vorbis_stereo_128kbps_48000hz.webm", 300);
2288     }
2289 
2290     @Test
2291     public void testAV1Decode640x360() throws Exception {
2292         testDecode("video_640x360_webm_av1_470kbps_30fps_vorbis_stereo_128kbps_48000hz.webm", 300);
2293     }
2294 
2295     @Test
2296     public void testAV1Decode30fps1280x720() throws Exception {
2297         testDecode("video_1280x720_webm_av1_2000kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2298                 300);
2299     }
2300 
2301     @Test
2302     public void testAV1Decode60fps1920x1080() throws Exception {
2303         testDecode("video_1920x1080_webm_av1_7000kbps_60fps_vorbis_stereo_128kbps_48000hz.webm",
2304                 300);
2305     }
2306 
2307     @Test
2308     public void testAV1Decode30fps3840x2160() throws Exception {
2309         testDecode("video_3840x2160_webm_av1_11000kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2310                 150);
2311     }
2312 
2313     @Test
2314     public void testAV1Decode60fps3840x2160() throws Exception {
2315         testDecode("video_3840x2160_webm_av1_18000kbps_60fps_vorbis_stereo_128kbps_48000hz.webm",
2316                 300);
2317     }
2318 
2319     @Test
2320     public void testHEVCDecode352x288() throws Exception {
2321         testDecode("bbb_s1_352x288_mp4_hevc_mp2_600kbps_30fps_aac_he_stereo_96kbps_48000hz.mp4",
2322                 300);
2323     }
2324 
2325     @Test
2326     public void testHEVCDecode720x480() throws Exception {
2327         testDecode("bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
2328                 300);
2329     }
2330 
2331     @Test
2332     public void testHEVCDecode30fps1280x720Tv() throws Exception {
2333         if (checkTv()) {
2334             assertTrue(MediaUtils.canDecodeVideo(
2335                     MediaFormat.MIMETYPE_VIDEO_HEVC, 1280, 720, 30,
2336                     HEVCProfileMain, HEVCMainTierLevel31, 4000000));
2337         }
2338     }
2339 
2340     @Test
2341     public void testHEVCDecode30fps1280x720() throws Exception {
2342         testDecode("bbb_s4_1280x720_mp4_hevc_mp31_4mbps_30fps_aac_he_stereo_80kbps_32000hz.mp4",
2343                 300);
2344     }
2345 
2346     @Test
2347     public void testHEVCDecode30fps1920x1080Tv() throws Exception {
2348         if (checkTv()) {
2349             assertTrue(MediaUtils.canDecodeVideo(
2350                     MediaFormat.MIMETYPE_VIDEO_HEVC, 1920, 1080, 30,
2351                     HEVCProfileMain, HEVCMainTierLevel41, 5000000));
2352         }
2353     }
2354 
2355     @Test
2356     public void testHEVCDecode60fps1920x1080() throws Exception {
2357         testDecode("bbb_s2_1920x1080_mp4_hevc_mp41_10mbps_60fps_aac_lc_6ch_384kbps_22050hz.mp4",
2358                 300);
2359     }
2360 
2361     @Test
2362     public void testHEVCDecode30fps3840x2160() throws Exception {
2363         testDecode("bbb_s4_3840x2160_mp4_hevc_mp5_20mbps_30fps_aac_lc_6ch_384kbps_24000hz.mp4",
2364                 150);
2365     }
2366 
2367     @Test
2368     public void testHEVCDecode60fps3840x2160() throws Exception {
2369         testDecode("bbb_s2_3840x2160_mp4_hevc_mp51_20mbps_60fps_aac_lc_6ch_384kbps_32000hz.mp4",
2370                 300);
2371     }
2372 
2373     @Test
2374     public void testMpeg2Decode352x288() throws Exception {
2375         testDecode("video_352x288_mp4_mpeg2_1000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300);
2376     }
2377 
2378     @Test
2379     public void testMpeg2Decode720x480() throws Exception {
2380         testDecode("video_720x480_mp4_mpeg2_2000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300);
2381     }
2382 
2383     @Test
2384     public void testMpeg2Decode30fps1280x720Tv() throws Exception {
2385         if (checkTv()) {
2386             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_MPEG2, 1280, 720, 30));
2387         }
2388     }
2389 
2390     @Test
2391     public void testMpeg2Decode30fps1280x720() throws Exception {
2392         testDecode("video_1280x720_mp4_mpeg2_6000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 150);
2393     }
2394 
2395     @Test
2396     public void testMpeg2Decode30fps1920x1080Tv() throws Exception {
2397         if (checkTv()) {
2398             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_MPEG2, 1920, 1080, 30));
2399         }
2400     }
2401 
2402     @Test
2403     public void testMpeg2Decode30fps1920x1080() throws Exception {
2404         testDecode("video_1920x1080_mp4_mpeg2_12000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 150);
2405     }
2406 
2407     @Test
2408     public void testMpeg2Decode30fps3840x2160() throws Exception {
2409         testDecode("video_3840x2160_mp4_mpeg2_20000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 150);
2410     }
2411 
2412     private void testCodecEarlyEOS(final String res, int eosFrame) throws Exception {
2413         if (!MediaUtils.checkCodecForResource(mInpPrefix + res, 0 /* track */)) {
2414             return; // skip
2415         }
2416         Surface s = getActivity().getSurfaceHolder().getSurface();
2417         int frames1 = countFrames(res, RESET_MODE_NONE, eosFrame, s);
2418         assertEquals("wrong number of frames decoded", eosFrame, frames1);
2419     }
2420 
2421     @Test
2422     public void testCodecEarlyEOSH263() throws Exception {
2423         testCodecEarlyEOS("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",
2424                 64 /* eosframe */);
2425     }
2426 
2427     @Test
2428     public void testCodecEarlyEOSH264() throws Exception {
2429         testCodecEarlyEOS("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2430                 120 /* eosframe */);
2431     }
2432 
2433     @Test
2434     public void testCodecEarlyEOSHEVC() throws Exception {
2435         testCodecEarlyEOS("video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2436                 120 /* eosframe */);
2437     }
2438 
2439     @Test
2440     public void testCodecEarlyEOSMpeg2() throws Exception {
2441         testCodecEarlyEOS("vdeo_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2442                 120 /* eosframe */);
2443     }
2444 
2445     @Test
2446     public void testCodecEarlyEOSMpeg4() throws Exception {
2447         testCodecEarlyEOS("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2448                 120 /* eosframe */);
2449     }
2450 
2451     @Test
2452     public void testCodecEarlyEOSVP8() throws Exception {
2453         testCodecEarlyEOS("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2454                 120 /* eosframe */);
2455     }
2456 
2457     @Test
2458     public void testCodecEarlyEOSVP9() throws Exception {
2459         testCodecEarlyEOS(
2460                 "video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2461                 120 /* eosframe */);
2462     }
2463 
2464     @Test
2465     public void testCodecEarlyEOSAV1() throws Exception {
2466         testCodecEarlyEOS("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2467                 120 /* eosframe */);
2468     }
2469 
2470     @Test
2471     public void testCodecResetsH264WithoutSurface() throws Exception {
2472         testCodecResets("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2473                 null);
2474     }
2475 
2476     @Test
2477     public void testCodecResetsH264WithSurface() throws Exception {
2478         Surface s = getActivity().getSurfaceHolder().getSurface();
2479         testCodecResets("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", s);
2480     }
2481 
2482     @Test
2483     public void testCodecResetsHEVCWithoutSurface() throws Exception {
2484         testCodecResets("bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
2485                 null);
2486     }
2487 
2488     @Test
2489     public void testCodecResetsHEVCWithSurface() throws Exception {
2490         Surface s = getActivity().getSurfaceHolder().getSurface();
2491         testCodecResets("bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
2492                 s);
2493     }
2494 
2495     @Test
2496     public void testCodecResetsMpeg2WithoutSurface() throws Exception {
2497         testCodecResets("video_1280x720_mp4_mpeg2_6000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2498                 null);
2499     }
2500 
2501     @Test
2502     public void testCodecResetsMpeg2WithSurface() throws Exception {
2503         Surface s = getActivity().getSurfaceHolder().getSurface();
2504         testCodecResets("video_176x144_mp4_mpeg2_105kbps_25fps_aac_stereo_128kbps_44100hz.mp4", s);
2505     }
2506 
2507     @Test
2508     public void testCodecResetsH263WithoutSurface() throws Exception {
2509         testCodecResets("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",null);
2510     }
2511 
2512     @Test
2513     public void testCodecResetsH263WithSurface() throws Exception {
2514         Surface s = getActivity().getSurfaceHolder().getSurface();
2515         testCodecResets("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp", s);
2516     }
2517 
2518     @Test
2519     public void testCodecResetsMpeg4WithoutSurface() throws Exception {
2520         testCodecResets("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2521                 null);
2522     }
2523 
2524     @Test
2525     public void testCodecResetsMpeg4WithSurface() throws Exception {
2526         Surface s = getActivity().getSurfaceHolder().getSurface();
2527         testCodecResets("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4", s);
2528     }
2529 
2530     @Test
2531     public void testCodecResetsVP8WithoutSurface() throws Exception {
2532         testCodecResets("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2533                 null);
2534     }
2535 
2536     @Test
2537     public void testCodecResetsVP8WithSurface() throws Exception {
2538         Surface s = getActivity().getSurfaceHolder().getSurface();
2539         testCodecResets("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2540                 s);
2541     }
2542 
2543     @Test
2544     public void testCodecResetsVP9WithoutSurface() throws Exception {
2545         testCodecResets("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2546                 null);
2547     }
2548 
2549     @Test
2550     public void testCodecResetsAV1WithoutSurface() throws Exception {
2551         testCodecResets("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2552                 null);
2553     }
2554 
2555     @Test
2556     public void testCodecResetsVP9WithSurface() throws Exception {
2557         Surface s = getActivity().getSurfaceHolder().getSurface();
2558         testCodecResets("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2559                 s);
2560     }
2561 
2562     @Test
2563     public void testCodecResetsAV1WithSurface() throws Exception {
2564         Surface s = getActivity().getSurfaceHolder().getSurface();
2565         testCodecResets("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2566                 s);
2567     }
2568 
2569 //    public void testCodecResetsOgg() throws Exception {
2570 //        testCodecResets("sinesweepogg.ogg", null);
2571 //    }
2572 
2573     @Test
2574     public void testCodecResetsMp3() throws Exception {
2575         testCodecReconfig("sinesweepmp3lame.mp3");
2576         // NOTE: replacing testCodecReconfig call soon
2577 //        testCodecResets("sinesweepmp3lame.mp3, null);
2578     }
2579 
2580     @Test
2581     public void testCodecResetsM4a() throws Exception {
2582         testCodecReconfig("sinesweepm4a.m4a");
2583         // NOTE: replacing testCodecReconfig call soon
2584 //        testCodecResets("sinesweepm4a.m4a", null);
2585     }
2586 
2587     private void testCodecReconfig(final String audio) throws Exception {
2588         int size1 = countSize(audio, RESET_MODE_NONE, -1 /* eosframe */);
2589         int size2 = countSize(audio, RESET_MODE_RECONFIGURE, -1 /* eosframe */);
2590         assertEquals("different output size when using reconfigured codec", size1, size2);
2591     }
2592 
2593     private void testCodecResets(final String video, Surface s) throws Exception {
2594         if (!MediaUtils.checkCodecForResource(mInpPrefix + video, 0 /* track */)) {
2595             return; // skip
2596         }
2597 
2598         int frames1 = countFrames(video, RESET_MODE_NONE, -1 /* eosframe */, s);
2599         int frames2 = countFrames(video, RESET_MODE_RECONFIGURE, -1 /* eosframe */, s);
2600         int frames3 = countFrames(video, RESET_MODE_FLUSH, -1 /* eosframe */, s);
2601         assertEquals("different number of frames when using reconfigured codec", frames1, frames2);
2602         assertEquals("different number of frames when using flushed codec", frames1, frames3);
2603     }
2604 
2605     private static void verifySecureVideoDecodeSupport(
2606             String mime, int width, int height, float rate, int profile, int level, int bitrate) {
2607         MediaFormat baseFormat = new MediaFormat();
2608         baseFormat.setString(MediaFormat.KEY_MIME, mime);
2609         baseFormat.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, true);
2610 
2611         MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
2612         format.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, true);
2613         format.setFloat(MediaFormat.KEY_FRAME_RATE, rate);
2614         format.setInteger(MediaFormat.KEY_PROFILE, profile);
2615         format.setInteger(MediaFormat.KEY_LEVEL, level);
2616         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
2617 
2618         MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
2619         if (mcl.findDecoderForFormat(baseFormat) == null) {
2620             MediaUtils.skipTest("no secure decoder for " + mime);
2621             return;
2622         }
2623         assertNotNull("no decoder for " + format, mcl.findDecoderForFormat(format));
2624     }
2625 
2626     private static MediaCodec createDecoder(MediaFormat format) {
2627         return MediaUtils.getDecoder(format);
2628     }
2629 
2630     // for video
2631     private int countFrames(final String video, int resetMode, int eosframe, Surface s)
2632             throws Exception {
2633         MediaExtractor extractor = new MediaExtractor();
2634         extractor.setDataSource(mInpPrefix + video);
2635         extractor.selectTrack(0);
2636 
2637         int numframes = decodeWithChecks(null /* decoderName */, extractor,
2638                 CHECKFLAG_RETURN_OUTPUTFRAMES | CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH,
2639                 resetMode, s, eosframe, null, null);
2640 
2641         extractor.release();
2642         return numframes;
2643     }
2644 
2645     // for audio
2646     private int countSize(final String audio, int resetMode, int eosframe)
2647             throws Exception {
2648         MediaExtractor extractor = new MediaExtractor();
2649         extractor.setDataSource(mInpPrefix + audio);
2650 
2651         extractor.selectTrack(0);
2652 
2653         // fails CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH
2654         int outputSize = decodeWithChecks(null /* decoderName */, extractor,
2655                 CHECKFLAG_RETURN_OUTPUTSIZE, resetMode, null,
2656                 eosframe, null, null);
2657 
2658         extractor.release();
2659         return outputSize;
2660     }
2661 
2662     /*
2663     * Test all decoders' EOS behavior.
2664     */
2665     private void testEOSBehavior(final String movie, int stopatsample) throws Exception {
2666         testEOSBehavior(movie, new int[] {stopatsample});
2667     }
2668 
2669     /*
2670     * Test all decoders' EOS behavior.
2671     */
2672     private void testEOSBehavior(final String movie, int[] stopAtSample) throws Exception {
2673         Surface s = null;
2674         MediaExtractor extractor = new MediaExtractor();
2675         extractor.setDataSource(mInpPrefix + movie);
2676         extractor.selectTrack(0); // consider variable looping on track
2677         MediaFormat format = extractor.getTrackFormat(0);
2678 
2679         String[] decoderNames = MediaUtils.getDecoderNames(format);
2680         for (String decoderName: decoderNames) {
2681             List<Long> outputChecksums = new ArrayList<Long>();
2682             List<Long> outputTimestamps = new ArrayList<Long>();
2683             Arrays.sort(stopAtSample);
2684             int last = stopAtSample.length - 1;
2685 
2686             // decode reference (longest sequence to stop at + 100) and
2687             // store checksums/pts in outputChecksums and outputTimestamps
2688             // (will fail CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH)
2689             decodeWithChecks(decoderName, extractor,
2690                     CHECKFLAG_SETCHECKSUM | CHECKFLAG_SETPTS | CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH,
2691                     RESET_MODE_NONE, s,
2692                     stopAtSample[last] + 100, outputChecksums, outputTimestamps);
2693 
2694             // decode stopAtSample requests in reverse order (longest to
2695             // shortest) and compare to reference checksums/pts in
2696             // outputChecksums and outputTimestamps
2697             for (int i = last; i >= 0; --i) {
2698                 if (true) { // reposition extractor
2699                     extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
2700                 } else { // create new extractor
2701                     extractor.release();
2702                     extractor = new MediaExtractor();
2703                     extractor.setDataSource(mInpPrefix + movie);
2704                     extractor.selectTrack(0); // consider variable looping on track
2705                 }
2706                 decodeWithChecks(decoderName, extractor,
2707                         CHECKFLAG_COMPARECHECKSUM | CHECKFLAG_COMPAREPTS
2708                         | CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH
2709                         | CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH,
2710                         RESET_MODE_NONE, s,
2711                         stopAtSample[i], outputChecksums, outputTimestamps);
2712             }
2713             extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
2714         }
2715 
2716         extractor.release();
2717     }
2718 
2719     private static final int CHECKFLAG_SETCHECKSUM = 1 << 0;
2720     private static final int CHECKFLAG_COMPARECHECKSUM = 1 << 1;
2721     private static final int CHECKFLAG_SETPTS = 1 << 2;
2722     private static final int CHECKFLAG_COMPAREPTS = 1 << 3;
2723     private static final int CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH = 1 << 4;
2724     private static final int CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH = 1 << 5;
2725     private static final int CHECKFLAG_RETURN_OUTPUTFRAMES = 1 << 6;
2726     private static final int CHECKFLAG_RETURN_OUTPUTSIZE = 1 << 7;
2727 
2728     /**
2729      * Decodes frames with parameterized checks and return values.
2730      * If decoderName is provided, mediacodec will create that decoder. Otherwise,
2731      * mediacodec will use the default decoder provided by platform.
2732      * The integer return can be selected through the checkFlags variable.
2733      */
2734     private static int decodeWithChecks(
2735             String decoderName, MediaExtractor extractor,
2736             int checkFlags, int resetMode, Surface surface, int stopAtSample,
2737             List<Long> outputChecksums, List<Long> outputTimestamps)
2738             throws Exception {
2739         int trackIndex = extractor.getSampleTrackIndex();
2740         MediaFormat format = extractor.getTrackFormat(trackIndex);
2741         String mime = format.getString(MediaFormat.KEY_MIME);
2742         boolean isAudio = mime.startsWith("audio/");
2743         ByteBuffer[] codecInputBuffers;
2744         ByteBuffer[] codecOutputBuffers;
2745 
2746         MediaCodec codec =
2747                 decoderName == null ? createDecoder(format) : MediaCodec.createByCodecName(decoderName);
2748         Log.i("@@@@", "using codec: " + codec.getName());
2749         codec.configure(format, surface, null /* crypto */, 0 /* flags */);
2750         codec.start();
2751         codecInputBuffers = codec.getInputBuffers();
2752         codecOutputBuffers = codec.getOutputBuffers();
2753 
2754         if (resetMode == RESET_MODE_RECONFIGURE) {
2755             codec.stop();
2756             codec.configure(format, surface, null /* crypto */, 0 /* flags */);
2757             codec.start();
2758             codecInputBuffers = codec.getInputBuffers();
2759             codecOutputBuffers = codec.getOutputBuffers();
2760         } else if (resetMode == RESET_MODE_FLUSH) {
2761             codec.flush();
2762 
2763             // We must always queue CSD after a flush that is potentially
2764             // before we receive output format has changed.
2765             queueConfig(codec, format);
2766         }
2767 
2768         // start decode loop
2769         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
2770 
2771         MediaFormat outFormat = codec.getOutputFormat();
2772         long kTimeOutUs = 5000; // 5ms timeout
2773         String outMime = format.getString(MediaFormat.KEY_MIME);
2774         if ((surface == null) && (outMime != null) && outMime.startsWith("video/")) {
2775             int outWidth = outFormat.getInteger(MediaFormat.KEY_WIDTH);
2776             int outHeight = outFormat.getInteger(MediaFormat.KEY_HEIGHT);
2777             // in the 4K decoding case in byte buffer mode, set kTimeOutUs to 10ms as decode may
2778             // involve a memcpy
2779             if (outWidth * outHeight >= 8000000) {
2780                 kTimeOutUs = 10000;
2781             }
2782         }
2783 
2784         boolean sawInputEOS = false;
2785         boolean sawOutputEOS = false;
2786         int deadDecoderCounter = 0;
2787         int samplenum = 0;
2788         int numframes = 0;
2789         int outputSize = 0;
2790         int width = 0;
2791         int height = 0;
2792         boolean dochecksum = false;
2793         ArrayList<Long> timestamps = new ArrayList<Long>();
2794         if ((checkFlags & CHECKFLAG_SETPTS) != 0) {
2795             outputTimestamps.clear();
2796         }
2797         if ((checkFlags & CHECKFLAG_SETCHECKSUM) != 0) {
2798             outputChecksums.clear();
2799         }
2800         boolean advanceDone = true;
2801         while (!sawOutputEOS && deadDecoderCounter < 100) {
2802             // handle input
2803             if (!sawInputEOS) {
2804                 int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
2805 
2806                 if (inputBufIndex >= 0) {
2807                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
2808 
2809                     int sampleSize =
2810                             extractor.readSampleData(dstBuf, 0 /* offset */);
2811                     assertEquals("end of stream should match extractor.advance()", sampleSize >= 0,
2812                             advanceDone);
2813                     long presentationTimeUs = extractor.getSampleTime();
2814                     advanceDone = extractor.advance();
2815                     // int flags = extractor.getSampleFlags();
2816                     // Log.i("@@@@", "read sample " + samplenum + ":" +
2817                     // extractor.getSampleFlags()
2818                     // + " @ " + extractor.getSampleTime() + " size " +
2819                     // sampleSize);
2820 
2821                     if (sampleSize < 0) {
2822                         assertFalse("advance succeeded after failed read", advanceDone);
2823                         Log.d(TAG, "saw input EOS.");
2824                         sawInputEOS = true;
2825                         assertEquals("extractor.readSampleData() must return -1 at end of stream",
2826                                 -1, sampleSize);
2827                         assertEquals("extractor.getSampleTime() must return -1 at end of stream",
2828                                 -1, presentationTimeUs);
2829                         sampleSize = 0; // required otherwise queueInputBuffer
2830                                         // returns invalid.
2831                     } else {
2832                         timestamps.add(presentationTimeUs);
2833                         samplenum++; // increment before comparing with stopAtSample
2834                         if (samplenum == stopAtSample) {
2835                             Log.d(TAG, "saw input EOS (stop at sample).");
2836                             sawInputEOS = true; // tag this sample as EOS
2837                         }
2838                     }
2839                     codec.queueInputBuffer(
2840                             inputBufIndex,
2841                             0 /* offset */,
2842                             sampleSize,
2843                             presentationTimeUs,
2844                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
2845                 } else {
2846                     assertEquals(
2847                             "codec.dequeueInputBuffer() unrecognized return value: " + inputBufIndex,
2848                             MediaCodec.INFO_TRY_AGAIN_LATER, inputBufIndex);
2849                 }
2850             }
2851 
2852             // handle output
2853             int outputBufIndex = codec.dequeueOutputBuffer(info, kTimeOutUs);
2854 
2855             deadDecoderCounter++;
2856             if (outputBufIndex >= 0) {
2857                 if (info.size > 0) { // Disregard 0-sized buffers at the end.
2858                     deadDecoderCounter = 0;
2859                     if (resetMode != RESET_MODE_NONE) {
2860                         // once we've gotten some data out of the decoder, reset
2861                         // and start again
2862                         if (resetMode == RESET_MODE_RECONFIGURE) {
2863                             codec.stop();
2864                             codec.configure(format, surface /* surface */, null /* crypto */,
2865                                     0 /* flags */);
2866                             codec.start();
2867                             codecInputBuffers = codec.getInputBuffers();
2868                             codecOutputBuffers = codec.getOutputBuffers();
2869                         } else if (resetMode == RESET_MODE_FLUSH) {
2870                             codec.flush();
2871                         } else {
2872                             fail("unknown resetMode: " + resetMode);
2873                         }
2874                         // restart at beginning, clear resetMode
2875                         resetMode = RESET_MODE_NONE;
2876                         extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
2877                         sawInputEOS = false;
2878                         numframes = 0;
2879                         timestamps.clear();
2880                         if ((checkFlags & CHECKFLAG_SETPTS) != 0) {
2881                             outputTimestamps.clear();
2882                         }
2883                         if ((checkFlags & CHECKFLAG_SETCHECKSUM) != 0) {
2884                             outputChecksums.clear();
2885                         }
2886                         continue;
2887                     }
2888                     if ((checkFlags & CHECKFLAG_COMPAREPTS) != 0) {
2889                         assertTrue("number of frames (" + numframes
2890                                 + ") exceeds number of reference timestamps",
2891                                 numframes < outputTimestamps.size());
2892                         assertEquals("frame ts mismatch at frame " + numframes,
2893                                 (long) outputTimestamps.get(numframes), info.presentationTimeUs);
2894                     } else if ((checkFlags & CHECKFLAG_SETPTS) != 0) {
2895                         outputTimestamps.add(info.presentationTimeUs);
2896                     }
2897                     if ((checkFlags & (CHECKFLAG_SETCHECKSUM | CHECKFLAG_COMPARECHECKSUM)) != 0) {
2898                         long sum = 0;   // note: checksum is 0 if buffer format unrecognized
2899                         if (dochecksum) {
2900                             Image image = codec.getOutputImage(outputBufIndex);
2901                             // use image to do crc if it's available
2902                             // fall back to buffer if image is not available
2903                             if (image != null) {
2904                                 sum = checksum(image);
2905                             } else {
2906                                 // TODO: add stride - right now just use info.size (as before)
2907                                 //sum = checksum(codecOutputBuffers[outputBufIndex], width, height,
2908                                 //        stride);
2909                                 ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufIndex);
2910                                 outputBuffer.position(info.offset);
2911                                 sum = checksum(outputBuffer, info.size);
2912                             }
2913                         }
2914                         if ((checkFlags & CHECKFLAG_COMPARECHECKSUM) != 0) {
2915                             assertTrue("number of frames (" + numframes
2916                                     + ") exceeds number of reference checksums",
2917                                     numframes < outputChecksums.size());
2918                             Log.d(TAG, "orig checksum: " + outputChecksums.get(numframes)
2919                                     + " new checksum: " + sum);
2920                             assertEquals("frame data mismatch at frame " + numframes,
2921                                     (long) outputChecksums.get(numframes), sum);
2922                         } else if ((checkFlags & CHECKFLAG_SETCHECKSUM) != 0) {
2923                             outputChecksums.add(sum);
2924                         }
2925                     }
2926                     if ((checkFlags & CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH) != 0) {
2927                         assertTrue("output timestamp " + info.presentationTimeUs
2928                                 + " without corresponding input timestamp"
2929                                 , timestamps.remove(info.presentationTimeUs));
2930                     }
2931                     outputSize += info.size;
2932                     numframes++;
2933                 }
2934                 // Log.d(TAG, "got frame, size " + info.size + "/" +
2935                 // info.presentationTimeUs +
2936                 // "/" + numframes + "/" + info.flags);
2937                 codec.releaseOutputBuffer(outputBufIndex, true /* render */);
2938                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
2939                     Log.d(TAG, "saw output EOS.");
2940                     sawOutputEOS = true;
2941                 }
2942             } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
2943                 codecOutputBuffers = codec.getOutputBuffers();
2944                 Log.d(TAG, "output buffers have changed.");
2945             } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
2946                 MediaFormat oformat = codec.getOutputFormat();
2947                 if (oformat.containsKey(MediaFormat.KEY_COLOR_FORMAT) &&
2948                         oformat.containsKey(MediaFormat.KEY_WIDTH) &&
2949                         oformat.containsKey(MediaFormat.KEY_HEIGHT)) {
2950                     int colorFormat = oformat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
2951                     width = oformat.getInteger(MediaFormat.KEY_WIDTH);
2952                     height = oformat.getInteger(MediaFormat.KEY_HEIGHT);
2953                     dochecksum = isRecognizedFormat(colorFormat); // only checksum known raw
2954                                                                   // buf formats
2955                     Log.d(TAG, "checksum fmt: " + colorFormat + " dim " + width + "x" + height);
2956                 } else {
2957                     dochecksum = false; // check with audio later
2958                     width = height = 0;
2959                     Log.d(TAG, "output format has changed to (unknown video) " + oformat);
2960                 }
2961             } else {
2962                 assertEquals(
2963                         "codec.dequeueOutputBuffer() unrecognized return index: "
2964                                 + outputBufIndex,
2965                         MediaCodec.INFO_TRY_AGAIN_LATER, outputBufIndex);
2966             }
2967         }
2968         codec.stop();
2969         codec.release();
2970 
2971         assertTrue("last frame didn't have EOS", sawOutputEOS);
2972         if ((checkFlags & CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH) != 0) {
2973             assertEquals("I!=O", samplenum, numframes);
2974             if (stopAtSample != 0) {
2975                 assertEquals("did not stop with right number of frames", stopAtSample, numframes);
2976             }
2977         }
2978         return (checkFlags & CHECKFLAG_RETURN_OUTPUTSIZE) != 0 ? outputSize :
2979                 (checkFlags & CHECKFLAG_RETURN_OUTPUTFRAMES) != 0 ? numframes :
2980                         0;
2981     }
2982 
2983     @Test
2984     public void testEOSBehaviorH264() throws Exception {
2985         // this video has an I frame at 44
2986         testEOSBehavior("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2987                 new int[]{1, 44, 45, 55});
2988     }
2989     @Test
2990     public void testEOSBehaviorHEVC() throws Exception {
2991         testEOSBehavior("video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2992                 new int[]{1, 17, 23, 49});
2993     }
2994 
2995     @Test
2996     public void testEOSBehaviorMpeg2() throws Exception {
2997         testEOSBehavior("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2998                 17);
2999         testEOSBehavior("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
3000                 23);
3001         testEOSBehavior("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
3002                 49);
3003     }
3004 
3005     @Test
3006     public void testEOSBehaviorH263() throws Exception {
3007         // this video has an I frame every 12 frames.
3008         testEOSBehavior("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",
3009                 new int[]{1, 24, 25, 48, 50});
3010     }
3011 
3012     @Test
3013     public void testEOSBehaviorMpeg4() throws Exception {
3014         // this video has an I frame every 12 frames
3015         testEOSBehavior("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
3016                 new int[]{1, 24, 25, 48, 50, 2});
3017     }
3018 
3019     @Test
3020     public void testEOSBehaviorVP8() throws Exception {
3021         // this video has an I frame at 46
3022         testEOSBehavior("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
3023                 new int[]{1, 46, 47, 57, 45});
3024     }
3025 
3026     @Test
3027     public void testEOSBehaviorVP9() throws Exception {
3028         // this video has an I frame at 44
3029         testEOSBehavior("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
3030                 new int[]{1, 44, 45, 55, 43});
3031     }
3032 
3033     @Test
3034     public void testEOSBehaviorAV1() throws Exception {
3035         // this video has an I frame at 44
3036         testEOSBehavior("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
3037                 new int[]{1, 44, 45, 55, 43});
3038     }
3039 
3040     /* from EncodeDecodeTest */
3041     private static boolean isRecognizedFormat(int colorFormat) {
3042         // Log.d(TAG, "color format: " + String.format("0x%08x", colorFormat));
3043         switch (colorFormat) {
3044         // these are the formats we know how to handle for this test
3045             case CodecCapabilities.COLOR_FormatYUV420Planar:
3046             case CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
3047             case CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
3048             case CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
3049             case CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
3050             case CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
3051                 /*
3052                  * TODO: Check newer formats or ignore.
3053                  * OMX_SEC_COLOR_FormatNV12Tiled = 0x7FC00002
3054                  * OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03: N4/N7_2
3055                  * OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m = 0x7FA30C04: N5
3056                  */
3057                 return true;
3058             default:
3059                 return false;
3060         }
3061     }
3062 
3063     private static long checksum(ByteBuffer buf, int size) {
3064         int cap = buf.capacity();
3065         assertTrue("checksum() params are invalid: size = " + size + " cap = " + cap,
3066                 size > 0 && size <= cap);
3067         CRC32 crc = new CRC32();
3068         if (buf.hasArray()) {
3069             crc.update(buf.array(), buf.position() + buf.arrayOffset(), size);
3070         } else {
3071             int pos = buf.position();
3072             final int rdsize = Math.min(4096, size);
3073             byte bb[] = new byte[rdsize];
3074             int chk;
3075             for (int i = 0; i < size; i += chk) {
3076                 chk = Math.min(rdsize, size - i);
3077                 buf.get(bb, 0, chk);
3078                 crc.update(bb, 0, chk);
3079             }
3080             buf.position(pos);
3081         }
3082         return crc.getValue();
3083     }
3084 
3085     private static long checksum(ByteBuffer buf, int width, int height, int stride) {
3086         int cap = buf.capacity();
3087         assertTrue("checksum() params are invalid: w x h , s = "
3088                 + width + " x " + height + " , " + stride + " cap = " + cap,
3089                 width > 0 && width <= stride && height > 0 && height * stride <= cap);
3090         // YUV 4:2:0 should generally have a data storage height 1.5x greater
3091         // than the declared image height, representing the UV planes.
3092         //
3093         // We only check Y frame for now. Somewhat unknown with tiling effects.
3094         //
3095         //long tm = System.nanoTime();
3096         final int lineinterval = 1; // line sampling frequency
3097         CRC32 crc = new CRC32();
3098         if (buf.hasArray()) {
3099             byte b[] = buf.array();
3100             int offs = buf.arrayOffset();
3101             for (int i = 0; i < height; i += lineinterval) {
3102                 crc.update(b, i * stride + offs, width);
3103             }
3104         } else { // almost always ends up here due to direct buffers
3105             int pos = buf.position();
3106             if (true) { // this {} is 80x times faster than else {} below.
3107                 byte[] bb = new byte[width]; // local line buffer
3108                 for (int i = 0; i < height; i += lineinterval) {
3109                     buf.position(pos + i * stride);
3110                     buf.get(bb, 0, width);
3111                     crc.update(bb, 0, width);
3112                 }
3113             } else {
3114                 for (int i = 0; i < height; i += lineinterval) {
3115                     buf.position(pos + i * stride);
3116                     for (int j = 0; j < width; ++j) {
3117                         crc.update(buf.get());
3118                     }
3119                 }
3120             }
3121             buf.position(pos);
3122         }
3123         //tm = System.nanoTime() - tm;
3124         //Log.d(TAG, "checksum time " + tm);
3125         return crc.getValue();
3126     }
3127 
3128     private static long checksum(Image image) {
3129         int format = image.getFormat();
3130         assertEquals("unsupported image format", ImageFormat.YUV_420_888, format);
3131 
3132         CRC32 crc = new CRC32();
3133 
3134         int imageWidth = image.getWidth();
3135         int imageHeight = image.getHeight();
3136 
3137         Image.Plane[] planes = image.getPlanes();
3138         for (int i = 0; i < planes.length; ++i) {
3139             ByteBuffer buf = planes[i].getBuffer();
3140 
3141             int width, height, rowStride, pixelStride, x, y;
3142             rowStride = planes[i].getRowStride();
3143             pixelStride = planes[i].getPixelStride();
3144             if (i == 0) {
3145                 width = imageWidth;
3146                 height = imageHeight;
3147             } else {
3148                 width = imageWidth / 2;
3149                 height = imageHeight /2;
3150             }
3151             // local contiguous pixel buffer
3152             byte[] bb = new byte[width * height];
3153             if (buf.hasArray()) {
3154                 byte b[] = buf.array();
3155                 int offs = buf.arrayOffset();
3156                 if (pixelStride == 1) {
3157                     for (y = 0; y < height; ++y) {
3158                         System.arraycopy(bb, y * width, b, y * rowStride + offs, width);
3159                     }
3160                 } else {
3161                     // do it pixel-by-pixel
3162                     for (y = 0; y < height; ++y) {
3163                         int lineOffset = offs + y * rowStride;
3164                         for (x = 0; x < width; ++x) {
3165                             bb[y * width + x] = b[lineOffset + x * pixelStride];
3166                         }
3167                     }
3168                 }
3169             } else { // almost always ends up here due to direct buffers
3170                 int pos = buf.position();
3171                 if (pixelStride == 1) {
3172                     for (y = 0; y < height; ++y) {
3173                         buf.position(pos + y * rowStride);
3174                         buf.get(bb, y * width, width);
3175                     }
3176                 } else {
3177                     // local line buffer
3178                     byte[] lb = new byte[rowStride];
3179                     // do it pixel-by-pixel
3180                     for (y = 0; y < height; ++y) {
3181                         buf.position(pos + y * rowStride);
3182                         // we're only guaranteed to have pixelStride * (width - 1) + 1 bytes
3183                         buf.get(lb, 0, pixelStride * (width - 1) + 1);
3184                         for (x = 0; x < width; ++x) {
3185                             bb[y * width + x] = lb[x * pixelStride];
3186                         }
3187                     }
3188                 }
3189                 buf.position(pos);
3190             }
3191             crc.update(bb, 0, width * height);
3192         }
3193 
3194         return crc.getValue();
3195     }
3196 
3197     @Test
3198     public void testFlush() throws Exception {
3199         testFlush("loudsoftwav.wav");
3200         testFlush("loudsoftogg.ogg");
3201         testFlush("loudsoftoggmkv.mkv");
3202         testFlush("loudsoftoggmp4.mp4");
3203         testFlush("loudsoftmp3.mp3");
3204         testFlush("loudsoftaac.aac");
3205         testFlush("loudsoftfaac.m4a");
3206         testFlush("loudsoftitunes.m4a");
3207     }
3208 
3209     private void testFlush(final String resource) throws Exception {
3210         MediaExtractor extractor;
3211         MediaCodec codec;
3212         ByteBuffer[] codecInputBuffers;
3213         ByteBuffer[] codecOutputBuffers;
3214 
3215         extractor = new MediaExtractor();
3216         extractor.setDataSource(mInpPrefix + resource);
3217 
3218         assertEquals("wrong number of tracks", 1, extractor.getTrackCount());
3219         MediaFormat format = extractor.getTrackFormat(0);
3220         String mime = format.getString(MediaFormat.KEY_MIME);
3221         assertTrue("not an audio file", mime.startsWith("audio/"));
3222 
3223         codec = MediaCodec.createDecoderByType(mime);
3224         assertNotNull("couldn't find codec " + mime, codec);
3225 
3226         codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
3227         codec.start();
3228         codecInputBuffers = codec.getInputBuffers();
3229         codecOutputBuffers = codec.getOutputBuffers();
3230 
3231         extractor.selectTrack(0);
3232 
3233         // decode a bit of the first part of the file, and verify the amplitude
3234         short maxvalue1 = getAmplitude(extractor, codec);
3235 
3236         // flush the codec and seek the extractor a different position, then decode a bit more
3237         // and check the amplitude
3238         extractor.seekTo(8000000, 0);
3239         codec.flush();
3240         short maxvalue2 = getAmplitude(extractor, codec);
3241 
3242         assertTrue("first section amplitude too low", maxvalue1 > 20000);
3243         assertTrue("second section amplitude too high", maxvalue2 < 5000);
3244         codec.stop();
3245         codec.release();
3246 
3247     }
3248 
3249     private short getAmplitude(MediaExtractor extractor, MediaCodec codec) {
3250         short maxvalue = 0;
3251         int numBytesDecoded = 0;
3252         final long kTimeOutUs = 5000;
3253         ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
3254         ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
3255         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
3256 
3257         while(numBytesDecoded < 44100 * 2) {
3258             int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
3259 
3260             if (inputBufIndex >= 0) {
3261                 ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
3262 
3263                 int sampleSize = extractor.readSampleData(dstBuf, 0 /* offset */);
3264                 long presentationTimeUs = extractor.getSampleTime();
3265 
3266                 codec.queueInputBuffer(
3267                         inputBufIndex,
3268                         0 /* offset */,
3269                         sampleSize,
3270                         presentationTimeUs,
3271                         0 /* flags */);
3272 
3273                 extractor.advance();
3274             }
3275             int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
3276 
3277             if (res >= 0) {
3278 
3279                 int outputBufIndex = res;
3280                 ByteBuffer buf = codecOutputBuffers[outputBufIndex];
3281 
3282                 buf.position(info.offset);
3283                 for (int i = 0; i < info.size; i += 2) {
3284                     short sample = buf.getShort();
3285                     if (maxvalue < sample) {
3286                         maxvalue = sample;
3287                     }
3288                     int idx = (numBytesDecoded + i) / 2;
3289                 }
3290 
3291                 numBytesDecoded += info.size;
3292 
3293                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
3294             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
3295                 codecOutputBuffers = codec.getOutputBuffers();
3296             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
3297                 MediaFormat oformat = codec.getOutputFormat();
3298             }
3299         }
3300         return maxvalue;
3301     }
3302 
3303     /* return true if a particular video feature is supported for the given mimetype */
3304     private boolean isVideoFeatureSupported(String mimeType, String feature) {
3305         MediaFormat format = MediaFormat.createVideoFormat( mimeType, 1920, 1080);
3306         format.setFeatureEnabled(feature, true);
3307         MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
3308         String codecName = mcl.findDecoderForFormat(format);
3309         return (codecName == null) ? false : true;
3310     }
3311 
3312     /**
3313      * Test tunneled video playback mode if supported
3314      *
3315      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3316      */
3317     private void tunneledVideoPlayback(String mimeType, String videoName) throws Exception {
3318         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3319                     "No tunneled video playback codec found for MIME " + mimeType)) {
3320             return;
3321         }
3322 
3323         AudioManager am = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE);
3324         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3325                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3326 
3327         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3328         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3329         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3330         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3331         mMediaCodecPlayer.startCodec();
3332 
3333         // When video codecs are started, large chunks of contiguous physical memory need to be
3334         // allocated, which, on low-RAM devices, can trigger high CPU usage for moving memory
3335         // around to create contiguous space for the video decoder. This can cause an increase in
3336         // startup time for playback.
3337         ActivityManager activityManager = mContext.getSystemService(ActivityManager.class);
3338         long firstFrameRenderedTimeoutSeconds = activityManager.isLowRamDevice() ? 3 : 1;
3339 
3340         mMediaCodecPlayer.play();
3341         sleepUntil(() ->
3342                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3343                 && mMediaCodecPlayer.getTimestamp() != null
3344                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3345                 Duration.ofSeconds(firstFrameRenderedTimeoutSeconds));
3346         assertNotEquals("onFrameRendered was not called",
3347                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3348         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3349         assertNotEquals("Audio timestamp has a zero frame position",
3350                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3351 
3352         final long durationMs = mMediaCodecPlayer.getDuration();
3353         final long timeOutMs = System.currentTimeMillis() + durationMs + 5 * 1000; // add 5 sec
3354         while (!mMediaCodecPlayer.isEnded()) {
3355             assertTrue("Tunneled video playback timeout exceeded",
3356                     timeOutMs > System.currentTimeMillis());
3357             Thread.sleep(SLEEP_TIME_MS);
3358             if (mMediaCodecPlayer.getCurrentPosition() >= mMediaCodecPlayer.getDuration()) {
3359                 Log.d(TAG, "testTunneledVideoPlayback -- current pos = " +
3360                         mMediaCodecPlayer.getCurrentPosition() +
3361                         ">= duration = " + mMediaCodecPlayer.getDuration());
3362                 break;
3363             }
3364         }
3365         // mMediaCodecPlayer.reset() handled in TearDown();
3366     }
3367 
3368     /**
3369      * Test tunneled video playback mode with HEVC if supported
3370      */
3371     @Test
3372     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3373     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3374     public void testTunneledVideoPlaybackHevc() throws Exception {
3375         tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_HEVC,
3376                     "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3377     }
3378 
3379     /**
3380      * Test tunneled video playback mode with AVC if supported
3381      */
3382     @Test
3383     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3384     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3385     public void testTunneledVideoPlaybackAvc() throws Exception {
3386         tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_AVC,
3387                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3388     }
3389 
3390     /**
3391      * Test tunneled video playback mode with VP9 if supported
3392      */
3393     @Test
3394     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3395     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3396     public void testTunneledVideoPlaybackVp9() throws Exception {
3397         tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_VP9,
3398                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3399     }
3400 
3401     /**
3402      * Test tunneled video playback flush if supported
3403      *
3404      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3405      */
3406     private void testTunneledVideoFlush(String mimeType, String videoName) throws Exception {
3407         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3408                     "No tunneled video playback codec found for MIME " + mimeType)) {
3409             return;
3410         }
3411 
3412         AudioManager am = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE);
3413         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3414                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3415 
3416         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3417         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3418         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3419         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3420         mMediaCodecPlayer.startCodec();
3421 
3422         mMediaCodecPlayer.play();
3423         sleepUntil(() ->
3424                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3425                 && mMediaCodecPlayer.getTimestamp() != null
3426                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3427                 Duration.ofSeconds(1));
3428         assertNotEquals("onFrameRendered was not called",
3429                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3430         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3431         assertNotEquals("Audio timestamp has a zero frame position",
3432                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3433 
3434         mMediaCodecPlayer.pause();
3435         mMediaCodecPlayer.flush();
3436         // mMediaCodecPlayer.reset() handled in TearDown();
3437     }
3438 
3439     /**
3440      * Test tunneled video playback flush with HEVC if supported
3441      */
3442     @Test
3443     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3444     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3445     public void testTunneledVideoFlushHevc() throws Exception {
3446         testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_HEVC,
3447                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3448     }
3449 
3450     /**
3451      * Test tunneled video playback flush with AVC if supported
3452      */
3453     @Test
3454     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3455     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3456     public void testTunneledVideoFlushAvc() throws Exception {
3457         testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_AVC,
3458                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3459     }
3460 
3461     /**
3462      * Test tunneled video playback flush with VP9 if supported
3463      */
3464     @Test
3465     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3466     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3467     public void testTunneledVideoFlushVp9() throws Exception {
3468         testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_VP9,
3469                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3470     }
3471 
3472     /**
3473      * Test that the first frame is rendered when video peek is on in tunneled mode.
3474      *
3475      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3476      */
3477     private void testTunneledVideoPeekOn(String mimeType, String videoName, float frameRate)
3478             throws Exception {
3479         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3480                     "No tunneled video playback codec found for MIME " + mimeType)) {
3481             return;
3482         }
3483 
3484         // Setup tunnel mode test media player
3485         AudioManager am = mContext.getSystemService(AudioManager.class);
3486         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3487                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3488 
3489         // Frame rate is needed by some devices to initialize the display hardware
3490         mMediaCodecPlayer.setFrameRate(frameRate);
3491 
3492         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3493         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3494         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3495         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3496         mMediaCodecPlayer.startCodec();
3497         mMediaCodecPlayer.setVideoPeek(true); // Enable video peek
3498 
3499         // Queue the first video frame, which should not be rendered imminently
3500         mMediaCodecPlayer.queueOneVideoFrame();
3501 
3502         // Assert that onFirstTunnelFrameReady is called
3503         final int waitForFrameReadyMs = 150;
3504         Thread.sleep(waitForFrameReadyMs);
3505         assertTrue(String.format("onFirstTunnelFrameReady not called within %d milliseconds",
3506                         waitForFrameReadyMs),
3507                 mMediaCodecPlayer.isFirstTunnelFrameReady());
3508 
3509         // This is long due to high-latency display pipelines on TV devices
3510         final int waitForRenderingMs = 1000;
3511         Thread.sleep(waitForRenderingMs);
3512 
3513         // Assert that video peek is enabled and working
3514         assertNotEquals(String.format("First frame not rendered within %d milliseconds",
3515                         waitForRenderingMs), CodecState.UNINITIALIZED_TIMESTAMP,
3516                 mMediaCodecPlayer.getCurrentPosition());
3517 
3518         // mMediaCodecPlayer.reset() handled in TearDown();
3519     }
3520 
3521     /**
3522      * Test that the first frame is rendered when video peek is on for HEVC in tunneled mode.
3523      */
3524     @Test
3525     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3526     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3527     public void testTunneledVideoPeekOnHevc() throws Exception {
3528         testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_HEVC,
3529                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv", 25);
3530     }
3531 
3532     /**
3533      * Test that the first frame is rendered when video peek is on for AVC in tunneled mode.
3534      */
3535     @Test
3536     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3537     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3538     public void testTunneledVideoPeekOnAvc() throws Exception {
3539         testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_AVC,
3540                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 25);
3541     }
3542 
3543     /**
3544      * Test that the first frame is rendered when video peek is on for VP9 in tunneled mode.
3545      */
3546     @Test
3547     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3548     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3549     public void testTunneledVideoPeekOnVp9() throws Exception {
3550         testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_VP9,
3551                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
3552                 30);
3553     }
3554 
3555 
3556     /**
3557      * Test that peek off doesn't render the first frame until turned on in tunneled mode.
3558      *
3559      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3560      */
3561     private void testTunneledVideoPeekOff(String mimeType, String videoName, float frameRate)
3562             throws Exception {
3563         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3564                     "No tunneled video playback codec found for MIME " + mimeType)) {
3565             return;
3566         }
3567 
3568         // Setup tunnel mode test media player
3569         AudioManager am = mContext.getSystemService(AudioManager.class);
3570         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3571                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3572 
3573         // Frame rate is needed by some devices to initialize the display hardware
3574         mMediaCodecPlayer.setFrameRate(frameRate);
3575 
3576         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3577         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3578         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3579         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3580         mMediaCodecPlayer.startCodec();
3581         mMediaCodecPlayer.setVideoPeek(false); // Disable video peek
3582 
3583         // Queue the first video frame, which should not be rendered yet
3584         mMediaCodecPlayer.queueOneVideoFrame();
3585 
3586         // Assert that onFirstTunnelFrameReady is called
3587         final int waitForFrameReadyMs = 150;
3588         Thread.sleep(waitForFrameReadyMs);
3589         assertTrue(String.format("onFirstTunnelFrameReady not called within %d milliseconds",
3590                         waitForFrameReadyMs),
3591                 mMediaCodecPlayer.isFirstTunnelFrameReady());
3592 
3593         // This is long due to high-latency display pipelines on TV devices
3594         final int waitForRenderingMs = 1000;
3595         Thread.sleep(waitForRenderingMs);
3596 
3597         // Assert the video frame has not been peeked yet
3598         assertEquals("First frame rendered while peek disabled", CodecState.UNINITIALIZED_TIMESTAMP,
3599                 mMediaCodecPlayer.getCurrentPosition());
3600 
3601         // Enable video peek
3602         mMediaCodecPlayer.setVideoPeek(true);
3603         Thread.sleep(waitForRenderingMs);
3604 
3605         // Assert that the first frame was rendered
3606         assertNotEquals(String.format(
3607                         "First frame not rendered within %d milliseconds after peek is enabled",
3608                         waitForRenderingMs), CodecState.UNINITIALIZED_TIMESTAMP,
3609                 mMediaCodecPlayer.getCurrentPosition());
3610 
3611         // mMediaCodecPlayer.reset() handled in TearDown();
3612     }
3613 
3614     /**
3615      * Test that peek off doesn't render the first frame until turned on for HEC in tunneled mode.
3616      */
3617     @Test
3618     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3619     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3620     public void testTunneledVideoPeekOffHevc() throws Exception {
3621         testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_HEVC,
3622                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv", 25);
3623     }
3624 
3625     /**
3626      * Test that peek off doesn't render the first frame until turned on for AVC in tunneled mode.
3627      */
3628     @Test
3629     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3630     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3631     public void testTunneledVideoPeekOffAvc() throws Exception {
3632         testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_AVC,
3633                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 25);
3634     }
3635 
3636     /**
3637      * Test that peek off doesn't render the first frame until turned on for VP9 in tunneled mode.
3638      */
3639     @Test
3640     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3641     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3642     public void testTunneledVideoPeekOffVp9() throws Exception {
3643         testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_VP9,
3644                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
3645                 30);
3646     }
3647 
3648    /**
3649     * Test that audio timestamps don't progress during audio PTS gaps in tunneled mode.
3650     *
3651     * See: https://source.android.com/docs/devices/tv/multimedia-tunneling#behavior
3652     */
3653    private void testTunneledAudioProgressWithPtsGaps(String mimeType, String fileName)
3654             throws Exception {
3655         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3656                     "No tunneled video playback codec found for MIME " + mimeType)) {
3657             return;
3658         }
3659 
3660         AudioManager am = mContext.getSystemService(AudioManager.class);
3661 
3662         mMediaCodecPlayer = new MediaCodecTunneledPlayer(mContext,
3663                 getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3664 
3665         final Uri mediaUri = Uri.fromFile(new File(mInpPrefix, fileName));
3666         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3667         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3668         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3669         mMediaCodecPlayer.startCodec();
3670 
3671         mMediaCodecPlayer.play();
3672         sleepUntil(() ->
3673                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3674                 && mMediaCodecPlayer.getTimestamp() != null
3675                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3676                 Duration.ofSeconds(1));
3677         assertNotEquals("onFrameRendered was not called",
3678                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3679         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3680         assertNotEquals("Audio timestamp has a zero frame position",
3681                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3682 
3683         // After 100 ms of playback, simulate a PTS gap of 100 ms
3684         Thread.sleep(100);
3685         mMediaCodecPlayer.setAudioTrackOffsetNs(100L * 1000000);
3686 
3687         // Verify that at some point in time in the future, the framePosition stopped advancing.
3688         // This should happen when the PTS gap is encountered - silence is rendered to fill the
3689         // PTS gap, but this silence should not cause framePosition to advance.
3690         {
3691             final long ptsGapTimeoutMs = 3000;
3692             long startTimeMs = System.currentTimeMillis();
3693             AudioTimestamp previousTimestamp;
3694             do {
3695                 assertTrue(String.format("No audio PTS gap after %d milliseconds", ptsGapTimeoutMs),
3696                         System.currentTimeMillis() - startTimeMs < ptsGapTimeoutMs);
3697                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3698                 Thread.sleep(50);
3699             } while (mMediaCodecPlayer.getTimestamp().framePosition
3700                     != previousTimestamp.framePosition);
3701         }
3702 
3703         // Allow the playback to advance past the PTS gap and back to normal operation
3704         Thread.sleep(500);
3705 
3706         // Simulate the end of playback by pretending that we have no more audio data
3707         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(true);
3708 
3709         // Sleep till framePosition stabilizes, i.e. playback is complete
3710         {
3711             long endOfPlayackTimeoutMs = 20000;
3712             long startTimeMs = System.currentTimeMillis();
3713             AudioTimestamp previousTimestamp;
3714             do {
3715                 assertTrue(String.format("No end of playback after %d milliseconds",
3716                                 endOfPlayackTimeoutMs),
3717                         System.currentTimeMillis() - startTimeMs < endOfPlayackTimeoutMs);
3718                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3719                 Thread.sleep(100);
3720             } while (mMediaCodecPlayer.getTimestamp().framePosition
3721                     != previousTimestamp.framePosition);
3722         }
3723 
3724         // Verify if number of frames written and played are same even if PTS gaps were present
3725         // in the playback.
3726         assertEquals("Number of frames written != Number of frames played",
3727                 mMediaCodecPlayer.getAudioFramesWritten(),
3728                 mMediaCodecPlayer.getTimestamp().framePosition);
3729     }
3730 
3731     /**
3732      * Test that audio timestamps don't progress during audio PTS gaps for HEVC in tunneled mode.
3733      */
3734     @Test
3735     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3736     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3737     public void testTunneledAudioProgressWithPtsGapsHevc() throws Exception {
3738         testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_HEVC,
3739                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3740     }
3741 
3742     /**
3743      * Test that audio timestamps don't progress during audio PTS gaps for AVC in tunneled mode.
3744      */
3745     @Test
3746     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3747     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3748     public void testTunneledAudioProgressWithPtsGapsAvc() throws Exception {
3749         testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_AVC,
3750                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3751     }
3752 
3753     /**
3754      * Test that audio timestamps don't progress during audio PTS gaps for VP9 in tunneled mode.
3755      */
3756     @Test
3757     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3758     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3759     public void testTunneledAudioProgressWithPtsGapsVp9() throws Exception {
3760         testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_VP9,
3761                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3762     }
3763 
3764     /**
3765      * Test that audio timestamps stop progressing during underrun in tunneled mode.
3766      *
3767      * See: https://source.android.com/docs/devices/tv/multimedia-tunneling#behavior
3768      */
3769     private void testTunneledAudioProgressWithUnderrun(String mimeType, String fileName)
3770             throws Exception {
3771         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3772                 "No tunneled video playback codec found for MIME " + mimeType)) {
3773             return;
3774         }
3775 
3776         AudioManager am = mContext.getSystemService(AudioManager.class);
3777 
3778         mMediaCodecPlayer = new MediaCodecTunneledPlayer(mContext,
3779                 getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3780 
3781         final Uri mediaUri = Uri.fromFile(new File(mInpPrefix, fileName));
3782         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3783         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3784         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3785         mMediaCodecPlayer.startCodec();
3786 
3787         mMediaCodecPlayer.play();
3788         sleepUntil(() ->
3789                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3790                 && mMediaCodecPlayer.getTimestamp() != null
3791                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3792                 Duration.ofSeconds(1));
3793         assertNotEquals("onFrameRendered was not called",
3794                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3795         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3796         assertNotEquals("Audio timestamp has a zero frame position",
3797                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3798 
3799         // After 200 ms of playback, stop writing to the AudioTrack to simulate underrun
3800         Thread.sleep(200);
3801         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(true);
3802 
3803         // Sleep till framePosition stabilizes, i.e. AudioTrack is in an underrun condition
3804         {
3805             long endOfPlayackTimeoutMs = 3000;
3806             long startTimeMs = System.currentTimeMillis();
3807             AudioTimestamp previousTimestamp;
3808             do {
3809                 assertTrue(String.format("No underrun after %d milliseconds",
3810                                 endOfPlayackTimeoutMs),
3811                         System.currentTimeMillis() - startTimeMs < endOfPlayackTimeoutMs);
3812                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3813                 Thread.sleep(100);
3814             } while (mMediaCodecPlayer.getTimestamp().framePosition
3815                     != previousTimestamp.framePosition);
3816         }
3817 
3818         // After 200 ms of starving the AudioTrack, resume writing
3819         Thread.sleep(200);
3820         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(false);
3821 
3822         // After 200 ms, simulate the end of playback by pretending that we have no more audio data
3823         Thread.sleep(200);
3824         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(true);
3825 
3826         // Sleep till framePosition stabilizes, i.e. playback is complete
3827         {
3828             long endOfPlayackTimeoutMs = 3000;
3829             long startTimeMs = System.currentTimeMillis();
3830             AudioTimestamp previousTimestamp;
3831             do {
3832                 assertTrue(String.format("No end of playback after %d milliseconds",
3833                                 endOfPlayackTimeoutMs),
3834                         System.currentTimeMillis() - startTimeMs < endOfPlayackTimeoutMs);
3835                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3836                 Thread.sleep(100);
3837             } while (mMediaCodecPlayer.getTimestamp().framePosition
3838                     != previousTimestamp.framePosition);
3839         }
3840 
3841         // Verify if number of frames written and played are same even if an underrun condition
3842         // occurs.
3843         assertEquals("Number of frames written != Number of frames played",
3844                 mMediaCodecPlayer.getAudioFramesWritten(),
3845                 mMediaCodecPlayer.getTimestamp().framePosition);
3846     }
3847 
3848     /**
3849      * Test that audio timestamps stop progressing during underrun for HEVC in tunneled mode.
3850      */
3851     @Test
3852     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3853     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3854     public void testTunneledAudioProgressWithUnderrunHevc() throws Exception {
3855         testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
3856                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3857     }
3858 
3859     /**
3860      * Test that audio timestamps stop progressing during underrun for AVC in tunneled mode.
3861      */
3862     @Test
3863     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3864     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3865     public void testTunneledAudioProgressWithUnderrunAvc() throws Exception {
3866         testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
3867                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3868     }
3869 
3870     /**
3871      * Test that audio timestamps stop progressing during underrun for VP9 in tunneled mode.
3872      */
3873     @Test
3874     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3875     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3876     public void testTunneledAudioProgressWithUnderrunVp9() throws Exception {
3877         testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
3878                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3879     }
3880 
3881     /**
3882      * Test accurate video rendering after a flush in tunneled mode.
3883      *
3884      * Test On some devices, queuing content when the player is paused, then triggering a flush,
3885      * then queuing more content does not behave as expected. The queued content gets lost and the
3886      * flush is really only applied once playback has resumed.
3887      *
3888      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3889      */
3890     private void testTunneledAccurateVideoFlush(String mimeType, String videoName)
3891             throws Exception {
3892         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3893                     "No tunneled video playback codec found for MIME " + mimeType)) {
3894             return;
3895         }
3896 
3897         // Below are some timings used throughout this test.
3898         //
3899         // Maximum allowed time between start of playback and first frame displayed
3900         final long maxAllowedTimeToFirstFrameMs = 500;
3901         // Maximum allowed time between issuing a pause and the last frame being displayed
3902         final long maxDrainTimeMs = 200;
3903 
3904         // Setup tunnel mode test media player
3905         AudioManager am = mContext.getSystemService(AudioManager.class);
3906         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3907                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3908 
3909         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3910         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3911         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3912         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3913         mMediaCodecPlayer.startCodec();
3914         // Video peek might interfere with the test: we want to ensure that queuing more data during
3915         // a pause does not cause displaying more video frames, which is precisely what video peek
3916         // does.
3917         mMediaCodecPlayer.setVideoPeek(false);
3918 
3919         mMediaCodecPlayer.play();
3920         sleepUntil(() ->
3921                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3922                 && mMediaCodecPlayer.getTimestamp() != null
3923                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3924                 Duration.ofSeconds(1));
3925         assertNotEquals("onFrameRendered was not called",
3926                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3927         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3928         assertNotEquals("Audio timestamp has a zero frame position",
3929                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3930 
3931         // Allow some time for playback to commence
3932         Thread.sleep(500);
3933 
3934         // Pause playback
3935         mMediaCodecPlayer.pause();
3936 
3937         // Wait for audio to pause
3938         AudioTimestamp pauseAudioTimestamp;
3939         {
3940             AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
3941             long startTimeMs = System.currentTimeMillis();
3942             do {
3943                 // If it takes longer to pause, the UX won't feel responsive to the user
3944                 int audioPauseTimeoutMs = 250;
3945                 assertTrue(String.format("No audio pause after %d milliseconds",
3946                                 audioPauseTimeoutMs),
3947                         System.currentTimeMillis() - startTimeMs < audioPauseTimeoutMs);
3948                 pauseAudioTimestamp = currentAudioTimestamp;
3949                 Thread.sleep(50);
3950                 currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
3951             } while (currentAudioTimestamp.framePosition != pauseAudioTimestamp.framePosition);
3952         }
3953         long pauseAudioSystemTimeMs = pauseAudioTimestamp.nanoTime / 1000 / 1000;
3954 
3955         // Wait for video to pause
3956         long pauseVideoSystemTimeNs;
3957         long pauseVideoPositionUs;
3958         {
3959             long currentVideoSystemTimeNs = mMediaCodecPlayer.getCurrentRenderedSystemTimeNano();
3960             long startTimeMs = System.currentTimeMillis();
3961             do {
3962                 int videoUnderrunTimeoutMs = 2000;
3963                 assertTrue(String.format("No video pause after %d milliseconds",
3964                                 videoUnderrunTimeoutMs),
3965                         System.currentTimeMillis() - startTimeMs < videoUnderrunTimeoutMs);
3966                 pauseVideoSystemTimeNs = currentVideoSystemTimeNs;
3967                 Thread.sleep(250); // onFrameRendered can get delayed in the Framework
3968                 currentVideoSystemTimeNs = mMediaCodecPlayer.getCurrentRenderedSystemTimeNano();
3969             } while (currentVideoSystemTimeNs != pauseVideoSystemTimeNs);
3970             pauseVideoPositionUs = mMediaCodecPlayer.getVideoTimeUs();
3971         }
3972         long pauseVideoSystemTimeMs = pauseVideoSystemTimeNs / 1000 / 1000;
3973 
3974         // Video should not continue running for a long period of time after audio pauses
3975         long pauseVideoToleranceMs = 500;
3976         assertTrue(String.format(
3977                         "Video ran %d milliseconds longer than audio (video:%d audio:%d)",
3978                         pauseVideoToleranceMs, pauseVideoSystemTimeMs, pauseAudioSystemTimeMs),
3979                 pauseVideoSystemTimeMs - pauseAudioSystemTimeMs < pauseVideoToleranceMs);
3980 
3981         // Verify that playback stays paused
3982         Thread.sleep(500);
3983         assertEquals(mMediaCodecPlayer.getTimestamp().framePosition,
3984                 pauseAudioTimestamp.framePosition);
3985         assertEquals(mMediaCodecPlayer.getCurrentRenderedSystemTimeNano(), pauseVideoSystemTimeNs);
3986         assertEquals(mMediaCodecPlayer.getVideoTimeUs(), pauseVideoPositionUs);
3987 
3988         // Verify audio and video are roughly in sync when paused
3989         long framePosition = mMediaCodecPlayer.getTimestamp().framePosition;
3990         long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
3991         long pauseAudioPositionMs = pauseAudioTimestamp.framePosition * 1000 / playbackRateFps;
3992         long pauseVideoPositionMs = pauseVideoPositionUs / 1000;
3993         long deltaMs = pauseVideoPositionMs - pauseAudioPositionMs;
3994         assertTrue(String.format(
3995                         "Video is %d milliseconds out of sync from audio (video:%d audio:%d)",
3996                         deltaMs, pauseVideoPositionMs, pauseAudioPositionMs),
3997                 deltaMs > -80 && deltaMs < pauseVideoToleranceMs);
3998 
3999         // Flush both audio and video pipelines
4000         mMediaCodecPlayer.flush();
4001 
4002         // The flush should not cause any frame to be displayed.
4003         // Wait for the max startup latency to see if one (incorrectly) arrives.
4004         Thread.sleep(maxAllowedTimeToFirstFrameMs);
4005         assertEquals("Video frame rendered after flush", mMediaCodecPlayer.getVideoTimeUs(),
4006                 CodecState.UNINITIALIZED_TIMESTAMP);
4007 
4008         // Ensure video peek is disabled before queuing the next frame, otherwise it will
4009         // automatically be rendered when queued.
4010         mMediaCodecPlayer.setVideoPeek(false);
4011 
4012         // We rewind to the beginning of the stream (to a key frame) and queue one frame, but
4013         // pretend like we're seeking 1 second forward in the stream.
4014         long presentationTimeOffsetUs = pauseVideoPositionUs + 1000 * 1000;
4015         mMediaCodecPlayer.seekToBeginning(presentationTimeOffsetUs);
4016         Long queuedVideoTimestamp = mMediaCodecPlayer.queueOneVideoFrame();
4017         assertNotNull("Failed to queue a video frame", queuedVideoTimestamp);
4018 
4019         // The enqueued frame should not be rendered while we're paused.
4020         // Wait for the max startup latency to see if it (incorrectly) arrives.
4021         Thread.sleep(maxAllowedTimeToFirstFrameMs);
4022         assertEquals("Video frame rendered during pause", mMediaCodecPlayer.getVideoTimeUs(),
4023                 CodecState.UNINITIALIZED_TIMESTAMP);
4024 
4025         // Resume playback
4026         mMediaCodecPlayer.resume();
4027         Thread.sleep(maxAllowedTimeToFirstFrameMs);
4028         // Verify that the first rendered frame was the first queued frame
4029         ImmutableList<Long> renderedVideoTimestamps =
4030                 mMediaCodecPlayer.getRenderedVideoFrameTimestampList();
4031         assertFalse(String.format("No frame rendered after resume within %d ms",
4032                         maxAllowedTimeToFirstFrameMs), renderedVideoTimestamps.isEmpty());
4033         assertEquals("First rendered video frame does not match first queued video frame",
4034                 renderedVideoTimestamps.get(0), queuedVideoTimestamp);
4035         // mMediaCodecPlayer.reset() handled in TearDown();
4036     }
4037 
4038     /**
4039      * Test accurate video rendering after a video MediaCodec flush with HEVC if supported
4040      */
4041     @Test
4042     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4043     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4044     public void testTunneledAccurateVideoFlushHevc() throws Exception {
4045         testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_HEVC,
4046                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
4047     }
4048 
4049     /**
4050      * Test accurate video rendering after a video MediaCodec flush with AVC if supported
4051      */
4052     @Test
4053     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4054     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4055     public void testTunneledAccurateVideoFlushAvc() throws Exception {
4056         testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_AVC,
4057                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4058     }
4059 
4060     /**
4061      * Test accurate video rendering after a video MediaCodec flush with VP9 if supported
4062      */
4063     @Test
4064     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4065     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4066     public void testTunneledAccurateVideoFlushVp9() throws Exception {
4067         testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_VP9,
4068                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4069     }
4070 
4071     /**
4072      * Test that audio timestamps stop progressing during pause in tunneled mode.
4073      */
4074     private void testTunneledAudioProgressWithPause(String mimeType, String videoName)
4075             throws Exception {
4076         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
4077                     "No tunneled video playback codec found for MIME " + mimeType)) {
4078             return;
4079         }
4080 
4081         AudioManager am = mContext.getSystemService(AudioManager.class);
4082         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
4083                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
4084 
4085         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
4086         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
4087         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
4088         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
4089         mMediaCodecPlayer.startCodec();
4090 
4091         mMediaCodecPlayer.play();
4092         sleepUntil(() ->
4093                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
4094                 && mMediaCodecPlayer.getTimestamp() != null
4095                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
4096                 Duration.ofSeconds(1));
4097         long firstVideoPosition = mMediaCodecPlayer.getVideoTimeUs();
4098         assertNotEquals("onFrameRendered was not called",
4099                 firstVideoPosition, CodecState.UNINITIALIZED_TIMESTAMP);
4100         AudioTimestamp firstAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4101         assertNotEquals("Audio timestamp is null", firstAudioTimestamp, null);
4102         assertNotEquals("Audio timestamp has a zero frame position",
4103                 firstAudioTimestamp.framePosition, 0);
4104 
4105         // Expected stabilization wait is 60ms. We triple to 180ms to prevent flakiness
4106         // and still test basic functionality.
4107         final int sleepTimeMs = 180;
4108         Thread.sleep(sleepTimeMs);
4109         mMediaCodecPlayer.pause();
4110         // pause might take some time to ramp volume down.
4111         Thread.sleep(sleepTimeMs);
4112         AudioTimestamp audioTimestampAfterPause = mMediaCodecPlayer.getTimestamp();
4113         // Verify the video has advanced beyond the first position.
4114         assertTrue(mMediaCodecPlayer.getVideoTimeUs() > firstVideoPosition);
4115         // Verify that the timestamp has advanced beyond the first timestamp.
4116         assertTrue(audioTimestampAfterPause.nanoTime > firstAudioTimestamp.nanoTime);
4117 
4118         Thread.sleep(sleepTimeMs);
4119         // Verify that the timestamp does not advance after pause.
4120         assertEquals(audioTimestampAfterPause.nanoTime, mMediaCodecPlayer.getTimestamp().nanoTime);
4121     }
4122 
4123 
4124     /**
4125      * Test that audio timestamps stop progressing during pause for HEVC in tunneled mode.
4126      */
4127     @Test
4128     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4129     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4130     public void testTunneledAudioProgressWithPauseHevc() throws Exception {
4131         testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_HEVC,
4132                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
4133     }
4134 
4135     /**
4136      * Test that audio timestamps stop progressing during pause for AVC in tunneled mode.
4137      */
4138     @Test
4139     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4140     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4141     public void testTunneledAudioProgressWithPauseAvc() throws Exception {
4142         testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_AVC,
4143                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4144     }
4145 
4146     /**
4147      * Test that audio timestamps stop progressing during pause for VP9 in tunneled mode.
4148      */
4149     @Test
4150     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4151     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4152     public void testTunneledAudioProgressWithPauseVp9() throws Exception {
4153         testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_VP9,
4154                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4155     }
4156 
4157     /**
4158      * Test that audio underrun pauses video and resumes in-sync in tunneled mode.
4159      *
4160      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
4161      */
4162     private void tunneledAudioUnderrun(String mimeType, String videoName)
4163             throws Exception {
4164         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
4165                 "No tunneled video playback codec found for MIME " + mimeType)) {
4166             return;
4167         }
4168 
4169         AudioManager am = mContext.getSystemService(AudioManager.class);
4170         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
4171                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
4172 
4173         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
4174         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
4175         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
4176         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
4177         mMediaCodecPlayer.startCodec();
4178 
4179         mMediaCodecPlayer.play();
4180         sleepUntil(() ->
4181                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
4182                 && mMediaCodecPlayer.getTimestamp() != null
4183                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
4184                 Duration.ofSeconds(1));
4185         assertNotEquals("onFrameRendered was not called",
4186                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
4187         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
4188         assertNotEquals("Audio timestamp has a zero frame position",
4189                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
4190 
4191         // Simulate underrun by starving the audio track of data
4192         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(true);
4193 
4194         // Wait for audio underrun
4195         AudioTimestamp underrunAudioTimestamp;
4196         {
4197             AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4198             long startTimeMs = System.currentTimeMillis();
4199             do {
4200                 int audioUnderrunTimeoutMs = 1000;
4201                 assertTrue(String.format("No audio underrun after %d milliseconds",
4202                                 System.currentTimeMillis() - startTimeMs),
4203                         System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
4204                 underrunAudioTimestamp = currentAudioTimestamp;
4205                 Thread.sleep(50);
4206                 currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4207             } while (currentAudioTimestamp.framePosition != underrunAudioTimestamp.framePosition);
4208         }
4209 
4210         // Wait until video playback pauses due to underrunning audio
4211         long pausedVideoTimeUs = -1;
4212         {
4213             long currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4214             long startTimeMs = System.currentTimeMillis();
4215             do {
4216                 int videoPauseTimeoutMs = 2000;
4217                 assertTrue(String.format("No video pause after %d milliseconds",
4218                                 videoPauseTimeoutMs),
4219                         System.currentTimeMillis() - startTimeMs < videoPauseTimeoutMs);
4220                 pausedVideoTimeUs = currentVideoTimeUs;
4221                 Thread.sleep(250); // onFrameRendered messages can get delayed in the Framework
4222                 currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4223             } while (currentVideoTimeUs != pausedVideoTimeUs);
4224         }
4225 
4226         // Retrieve index for the video rendered frame at the time of video pausing
4227         int pausedVideoRenderedTimestampIndex =
4228                 mMediaCodecPlayer.getRenderedVideoFrameTimestampList().size() - 1;
4229 
4230         // Resume audio playback with a negative offset, in order to simulate a desynchronisation.
4231         // TODO(b/202710709): Use timestamp relative to last played video frame before pause
4232         mMediaCodecPlayer.setAudioTrackOffsetNs(-100L * 1000000);
4233         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(false);
4234 
4235         // Wait until audio playback resumes
4236         AudioTimestamp postResumeAudioTimestamp;
4237         {
4238             AudioTimestamp previousAudioTimestamp;
4239             long startTimeMs = System.currentTimeMillis();
4240             do {
4241                 int audioResumeTimeoutMs = 1000;
4242                 assertTrue(String.format("Audio has not resumed after %d milliseconds",
4243                                 audioResumeTimeoutMs),
4244                         System.currentTimeMillis() - startTimeMs < audioResumeTimeoutMs);
4245                 previousAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4246                 Thread.sleep(50);
4247                 postResumeAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4248             } while (postResumeAudioTimestamp.framePosition
4249                     == previousAudioTimestamp.framePosition);
4250         }
4251 
4252         // Now that audio playback has resumed, wait until video playback resumes
4253         {
4254             // We actually don't care about trying to capture the exact time video resumed, because
4255             // we can just look at the historical list of rendered video timestamps
4256             long postResumeVideoTimeUs;
4257             long previousVideoTimeUs;
4258             long startTimeMs = System.currentTimeMillis();
4259             do {
4260                 int videoResumeTimeoutMs = 2000;
4261                 assertTrue(String.format("Video has not resumed after %d milliseconds",
4262                                 videoResumeTimeoutMs),
4263                         System.currentTimeMillis() - startTimeMs < videoResumeTimeoutMs);
4264                 previousVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4265                 Thread.sleep(50);
4266                 postResumeVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4267             } while (postResumeVideoTimeUs == previousVideoTimeUs);
4268         }
4269 
4270         // The system time when rendering the first audio frame after the resume
4271         long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
4272         long playedFrames = postResumeAudioTimestamp.framePosition
4273                 - underrunAudioTimestamp.framePosition + 1;
4274         double elapsedTimeNs = playedFrames * (1000.0 * 1000.0 * 1000.0 / playbackRateFps);
4275         long resumeAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime - (long) elapsedTimeNs;
4276         long resumeAudioSystemTimeMs = resumeAudioSystemTimeNs / 1000 / 1000;
4277 
4278         // The system time when rendering the first video frame after video playback resumes
4279         long resumeVideoSystemTimeMs = mMediaCodecPlayer.getRenderedVideoFrameSystemTimeList()
4280                 .get(pausedVideoRenderedTimestampIndex + 1) / 1000 / 1000;
4281 
4282         // Verify that video resumes in a reasonable amount of time after audio resumes
4283         // Note: Because a -100ms PTS gap is introduced, the video should resume 100ms later
4284         resumeAudioSystemTimeMs += 100;
4285         long resumeDeltaMs = resumeVideoSystemTimeMs - resumeAudioSystemTimeMs;
4286         assertTrue(String.format("Video started %s milliseconds before audio resumed "
4287                         + "(video:%d audio:%d)", resumeDeltaMs * -1, resumeVideoSystemTimeMs,
4288                         resumeAudioSystemTimeMs),
4289                 resumeDeltaMs > 0); // video is expected to start after audio resumes
4290         assertTrue(String.format(
4291                         "Video started %d milliseconds after audio resumed (video:%d audio:%d)",
4292                         resumeDeltaMs, resumeVideoSystemTimeMs, resumeAudioSystemTimeMs),
4293                 resumeDeltaMs <= 600); // video starting 300ms after audio is barely noticeable
4294 
4295         // Determine the system time of the audio frame that matches the presentation timestamp of
4296         // the resumed video frame
4297         long resumeVideoPresentationTimeUs = mMediaCodecPlayer.getRenderedVideoFrameTimestampList()
4298                 .get(pausedVideoRenderedTimestampIndex + 1);
4299         long matchingAudioFramePosition =
4300                 resumeVideoPresentationTimeUs * playbackRateFps / 1000 / 1000;
4301         playedFrames = matchingAudioFramePosition - postResumeAudioTimestamp.framePosition;
4302         elapsedTimeNs = playedFrames * (1000.0 * 1000.0 * 1000.0 / playbackRateFps);
4303         long matchingAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime + (long) elapsedTimeNs;
4304         long matchingAudioSystemTimeMs = matchingAudioSystemTimeNs / 1000 / 1000;
4305 
4306         // Verify that video and audio are in sync at the time when video resumes
4307         // Note: Because a -100ms PTS gap is introduced, the video should resume 100ms later
4308         matchingAudioSystemTimeMs += 100;
4309         long avSyncOffsetMs =  resumeVideoSystemTimeMs - matchingAudioSystemTimeMs;
4310         assertTrue(String.format("Video is %d milliseconds out of sync of audio after resuming "
4311                         + "(video:%d, audio:%d)", avSyncOffsetMs, resumeVideoSystemTimeMs,
4312                         matchingAudioSystemTimeMs),
4313                 // some leniency in AV sync is required because Android TV STB/OTT OEMs often have
4314                 // to tune for imperfect downstream TVs (that have processing delays on the video)
4315                 // by knowingly producing HDMI output that has audio and video mildly out of sync
4316                 Math.abs(avSyncOffsetMs) <= 80);
4317     }
4318 
4319     /**
4320      * Test that audio underrun pauses video and resumes in-sync for HEVC in tunneled mode.
4321      */
4322     @Test
4323     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4324     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4325     public void testTunneledAudioUnderrunHevc() throws Exception {
4326         tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
4327                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
4328     }
4329 
4330     /**
4331      * Test that audio underrun pauses video and resumes in-sync for AVC in tunneled mode.
4332      */
4333     @Test
4334     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4335     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4336     public void testTunneledAudioUnderrunAvc() throws Exception {
4337         tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
4338                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4339     }
4340 
4341     /**
4342      * Test that audio underrun pauses video and resumes in-sync for VP9 in tunneled mode.
4343      */
4344     @Test
4345     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4346     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4347     public void testTunneledAudioUnderrunVp9() throws Exception {
4348         tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
4349                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4350     }
4351 
4352     private void sleepUntil(Supplier<Boolean> supplier, Duration maxWait) throws Exception {
4353         final long deadLineMs = System.currentTimeMillis() + maxWait.toMillis();
4354         do {
4355             Thread.sleep(50);
4356         } while (!supplier.get() && System.currentTimeMillis() < deadLineMs);
4357     }
4358 
4359     /**
4360      * Returns list of CodecCapabilities advertising support for the given MIME type.
4361      */
4362     private static List<CodecCapabilities> getCodecCapabilitiesForMimeType(String mimeType) {
4363         int numCodecs = MediaCodecList.getCodecCount();
4364         List<CodecCapabilities> caps = new ArrayList<CodecCapabilities>();
4365         for (int i = 0; i < numCodecs; i++) {
4366             MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
4367             if (codecInfo.isAlias()) {
4368                 continue;
4369             }
4370             if (codecInfo.isEncoder()) {
4371                 continue;
4372             }
4373 
4374             String[] types = codecInfo.getSupportedTypes();
4375             for (int j = 0; j < types.length; j++) {
4376                 if (types[j].equalsIgnoreCase(mimeType)) {
4377                     caps.add(codecInfo.getCapabilitiesForType(mimeType));
4378                 }
4379             }
4380         }
4381         return caps;
4382     }
4383 
4384     /**
4385      * Returns true if there exists a codec supporting the given MIME type that meets the
4386      * minimum specification for VR high performance requirements.
4387      *
4388      * The requirements are as follows:
4389      *   - At least 243000 blocks per second (where blocks are defined as 16x16 -- note this
4390      *   is equivalent to 1920x1080@30fps)
4391      *   - Feature adaptive-playback present
4392      */
4393     private static boolean doesMimeTypeHaveMinimumSpecVrReadyCodec(String mimeType) {
4394         List<CodecCapabilities> caps = getCodecCapabilitiesForMimeType(mimeType);
4395         for (CodecCapabilities c : caps) {
4396             if (!c.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback)) {
4397                 continue;
4398             }
4399 
4400             if (!c.getVideoCapabilities().areSizeAndRateSupported(1920, 1080, 30.0)) {
4401                 continue;
4402             }
4403 
4404             return true;
4405         }
4406 
4407         return false;
4408     }
4409 
4410     /**
4411      * Returns true if there exists a codec supporting the given MIME type that meets VR high
4412      * performance requirements.
4413      *
4414      * The requirements are as follows:
4415      *   - At least 972000 blocks per second (where blocks are defined as 16x16 -- note this
4416      *   is equivalent to 3840x2160@30fps)
4417      *   - At least 4 concurrent instances
4418      *   - Feature adaptive-playback present
4419      */
4420     private static boolean doesMimeTypeHaveVrReadyCodec(String mimeType) {
4421         List<CodecCapabilities> caps = getCodecCapabilitiesForMimeType(mimeType);
4422         for (CodecCapabilities c : caps) {
4423             if (c.getMaxSupportedInstances() < 4) {
4424                 continue;
4425             }
4426 
4427             if (!c.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback)) {
4428                 continue;
4429             }
4430 
4431             if (!c.getVideoCapabilities().areSizeAndRateSupported(3840, 2160, 30.0)) {
4432                 continue;
4433             }
4434 
4435             return true;
4436         }
4437 
4438         return false;
4439     }
4440 
4441     @Test
4442     public void testVrHighPerformanceH264() throws Exception {
4443         if (!supportsVrHighPerformance()) {
4444             MediaUtils.skipTest(TAG, "FEATURE_VR_MODE_HIGH_PERFORMANCE not present");
4445             return;
4446         }
4447 
4448         boolean h264IsReady = doesMimeTypeHaveVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_AVC);
4449         assertTrue("Did not find a VR ready H.264 decoder", h264IsReady);
4450     }
4451 
4452     @Test
4453     public void testVrHighPerformanceHEVC() throws Exception {
4454         if (!supportsVrHighPerformance()) {
4455             MediaUtils.skipTest(TAG, "FEATURE_VR_MODE_HIGH_PERFORMANCE not present");
4456             return;
4457         }
4458 
4459         // Test minimum mandatory requirements.
4460         assertTrue(doesMimeTypeHaveMinimumSpecVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_HEVC));
4461 
4462         boolean hevcIsReady = doesMimeTypeHaveVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_HEVC);
4463         if (!hevcIsReady) {
4464             Log.d(TAG, "HEVC isn't required to be VR ready");
4465             return;
4466         }
4467     }
4468 
4469     @Test
4470     public void testVrHighPerformanceVP9() throws Exception {
4471         if (!supportsVrHighPerformance()) {
4472             MediaUtils.skipTest(TAG, "FEATURE_VR_MODE_HIGH_PERFORMANCE not present");
4473             return;
4474         }
4475 
4476         // Test minimum mandatory requirements.
4477         assertTrue(doesMimeTypeHaveMinimumSpecVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_VP9));
4478 
4479         boolean vp9IsReady = doesMimeTypeHaveVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_VP9);
4480         if (!vp9IsReady) {
4481             Log.d(TAG, "VP9 isn't required to be VR ready");
4482             return;
4483         }
4484     }
4485 
4486     private boolean supportsVrHighPerformance() {
4487         PackageManager pm = mContext.getPackageManager();
4488         return pm.hasSystemFeature(PackageManager.FEATURE_VR_MODE_HIGH_PERFORMANCE);
4489     }
4490 
4491     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.R)
4492     @Test
4493     public void testLowLatencyVp9At1280x720() throws Exception {
4494         testLowLatencyVideo(
4495                 "video_1280x720_webm_vp9_csd_309kbps_25fps_vorbis_stereo_128kbps_48000hz.webm", 300,
4496                 false /* useNdk */);
4497         testLowLatencyVideo(
4498                 "video_1280x720_webm_vp9_csd_309kbps_25fps_vorbis_stereo_128kbps_48000hz.webm", 300,
4499                 true /* useNdk */);
4500     }
4501 
4502     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.R)
4503     @Test
4504     public void testLowLatencyVp9At1920x1080() throws Exception {
4505         testLowLatencyVideo(
4506                 "bbb_s2_1920x1080_webm_vp9_0p41_10mbps_60fps_vorbis_6ch_384kbps_22050hz.webm", 300,
4507                 false /* useNdk */);
4508         testLowLatencyVideo(
4509                 "bbb_s2_1920x1080_webm_vp9_0p41_10mbps_60fps_vorbis_6ch_384kbps_22050hz.webm", 300,
4510                 true /* useNdk */);
4511     }
4512 
4513     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.R)
4514     @Test
4515     public void testLowLatencyVp9At3840x2160() throws Exception {
4516         testLowLatencyVideo(
4517                 "bbb_s2_3840x2160_webm_vp9_0p51_20mbps_60fps_vorbis_6ch_384kbps_32000hz.webm", 300,
4518                 false /* useNdk */);
4519         testLowLatencyVideo(
4520                 "bbb_s2_3840x2160_webm_vp9_0p51_20mbps_60fps_vorbis_6ch_384kbps_32000hz.webm", 300,
4521                 true /* useNdk */);
4522     }
4523 
4524     @NonMainlineTest
4525     @Test
4526     public void testLowLatencyAVCAt1280x720() throws Exception {
4527         testLowLatencyVideo(
4528                 "video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 300,
4529                 false /* useNdk */);
4530         testLowLatencyVideo(
4531                 "video_1280x720_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 300,
4532                 true /* useNdk */);
4533     }
4534 
4535     @NonMainlineTest
4536     @Test
4537     public void testLowLatencyHEVCAt480x360() throws Exception {
4538         testLowLatencyVideo(
4539                 "video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300,
4540                 false /* useNdk */);
4541         testLowLatencyVideo(
4542                 "video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300,
4543                 true /* useNdk */);
4544     }
4545 
4546     private void testLowLatencyVideo(String testVideo, int frameCount, boolean useNdk)
4547             throws Exception {
4548         AssetFileDescriptor fd = getAssetFileDescriptorFor(testVideo);
4549         MediaExtractor extractor = new MediaExtractor();
4550         extractor.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
4551         fd.close();
4552 
4553         MediaFormat format = null;
4554         int trackIndex = -1;
4555         for (int i = 0; i < extractor.getTrackCount(); i++) {
4556             format = extractor.getTrackFormat(i);
4557             if (format.getString(MediaFormat.KEY_MIME).startsWith("video/")) {
4558                 trackIndex = i;
4559                 break;
4560             }
4561         }
4562 
4563         assertTrue("No video track was found", trackIndex >= 0);
4564 
4565         extractor.selectTrack(trackIndex);
4566         format.setFeatureEnabled(MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency,
4567                 true /* enable */);
4568 
4569         MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
4570         String decoderName = mcl.findDecoderForFormat(format);
4571         if (decoderName == null) {
4572             MediaUtils.skipTest("no low latency decoder for " + format);
4573             return;
4574         }
4575         String entry = (useNdk ? "NDK" : "SDK");
4576         Log.v(TAG, "found " + entry + " decoder " + decoderName + " for format: " + format);
4577 
4578         Surface surface = getActivity().getSurfaceHolder().getSurface();
4579         MediaCodecWrapper decoder = null;
4580         if (useNdk) {
4581             decoder = new NdkMediaCodec(decoderName);
4582         } else {
4583             decoder = new SdkMediaCodec(MediaCodec.createByCodecName(decoderName));
4584         }
4585         format.removeFeature(MediaCodecInfo.CodecCapabilities.FEATURE_LowLatency);
4586         format.setInteger(MediaFormat.KEY_LOW_LATENCY, 1);
4587         decoder.configure(format, 0 /* flags */, surface);
4588         decoder.start();
4589 
4590         if (!useNdk) {
4591             decoder.getInputBuffers();
4592         }
4593         ByteBuffer[] codecOutputBuffers = decoder.getOutputBuffers();
4594         String decoderOutputFormatString = null;
4595 
4596         // start decoding
4597         final long kTimeOutUs = 1000000;  // 1 second
4598         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
4599         int bufferCounter = 0;
4600         long[] latencyMs = new long[frameCount];
4601         boolean waitingForOutput = false;
4602         long startTimeMs = System.currentTimeMillis();
4603         while (bufferCounter < frameCount) {
4604             if (!waitingForOutput) {
4605                 int inputBufferId = decoder.dequeueInputBuffer(kTimeOutUs);
4606                 if (inputBufferId < 0) {
4607                     Log.v(TAG, "no input buffer");
4608                     break;
4609                 }
4610 
4611                 ByteBuffer dstBuf = decoder.getInputBuffer(inputBufferId);
4612 
4613                 int sampleSize = extractor.readSampleData(dstBuf, 0 /* offset */);
4614                 long presentationTimeUs = 0;
4615                 if (sampleSize < 0) {
4616                     Log.v(TAG, "had input EOS, early termination at frame " + bufferCounter);
4617                     break;
4618                 } else {
4619                     presentationTimeUs = extractor.getSampleTime();
4620                 }
4621 
4622                 startTimeMs = System.currentTimeMillis();
4623                 decoder.queueInputBuffer(
4624                         inputBufferId,
4625                         0 /* offset */,
4626                         sampleSize,
4627                         presentationTimeUs,
4628                         0 /* flags */);
4629 
4630                 extractor.advance();
4631                 waitingForOutput = true;
4632             }
4633 
4634             int outputBufferId = decoder.dequeueOutputBuffer(info, kTimeOutUs);
4635 
4636             if (outputBufferId >= 0) {
4637                 waitingForOutput = false;
4638                 //Log.d(TAG, "got output, size " + info.size + ", time " + info.presentationTimeUs);
4639                 latencyMs[bufferCounter++] = System.currentTimeMillis() - startTimeMs;
4640                 // TODO: render the frame and find the rendering time to calculate the total delay
4641                 decoder.releaseOutputBuffer(outputBufferId, false /* render */);
4642             } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
4643                 codecOutputBuffers = decoder.getOutputBuffers();
4644                 Log.d(TAG, "output buffers have changed.");
4645             } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
4646                 decoderOutputFormatString = decoder.getOutputFormatString();
4647                 Log.d(TAG, "output format has changed to " + decoderOutputFormatString);
4648             } else {
4649                 fail("No output buffer returned without frame delay, status " + outputBufferId);
4650             }
4651         }
4652 
4653         assertTrue("No INFO_OUTPUT_FORMAT_CHANGED from decoder", decoderOutputFormatString != null);
4654 
4655         long latencyMean = 0;
4656         long latencyMax = 0;
4657         int maxIndex = 0;
4658         for (int i = 0; i < bufferCounter; ++i) {
4659             latencyMean += latencyMs[i];
4660             if (latencyMs[i] > latencyMax) {
4661                 latencyMax = latencyMs[i];
4662                 maxIndex = i;
4663             }
4664         }
4665         if (bufferCounter > 0) {
4666             latencyMean /= bufferCounter;
4667         }
4668         Log.d(TAG, entry + " latency average " + latencyMean + " ms, max " + latencyMax +
4669                 " ms at frame " + maxIndex);
4670 
4671         DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, "video_decoder_latency");
4672         String mime = format.getString(MediaFormat.KEY_MIME);
4673         int width = format.getInteger(MediaFormat.KEY_WIDTH);
4674         int height = format.getInteger(MediaFormat.KEY_HEIGHT);
4675         log.addValue("codec_name", decoderName, ResultType.NEUTRAL, ResultUnit.NONE);
4676         log.addValue("mime_type", mime, ResultType.NEUTRAL, ResultUnit.NONE);
4677         log.addValue("width", width, ResultType.NEUTRAL, ResultUnit.NONE);
4678         log.addValue("height", height, ResultType.NEUTRAL, ResultUnit.NONE);
4679         log.addValue("video_res", testVideo, ResultType.NEUTRAL, ResultUnit.NONE);
4680         log.addValue("decode_to", surface == null ? "buffer" : "surface",
4681                 ResultType.NEUTRAL, ResultUnit.NONE);
4682 
4683         log.addValue("average_latency", latencyMean, ResultType.LOWER_BETTER, ResultUnit.MS);
4684         log.addValue("max_latency", latencyMax, ResultType.LOWER_BETTER, ResultUnit.MS);
4685 
4686         log.submit(getInstrumentation());
4687 
4688         decoder.stop();
4689         decoder.release();
4690         extractor.release();
4691     }
4692 }
4693