• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.codec.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_Format32bitABGR2101010;
20 import static android.media.MediaCodecInfo.CodecProfileLevel.AV1ProfileMain10;
21 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh10;
22 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCProfileMain10;
23 import static android.media.MediaCodecInfo.CodecProfileLevel.VP9Profile2;
24 
25 import static org.junit.Assert.assertEquals;
26 import static org.junit.Assert.fail;
27 import static org.junit.Assume.assumeTrue;
28 
29 import android.content.Context;
30 import android.media.MediaCodec;
31 import android.media.MediaCodecInfo;
32 import android.media.MediaCodecList;
33 import android.media.MediaFormat;
34 import android.media.cts.InputSurface;
35 import android.media.cts.OutputSurface;
36 import android.media.cts.TestArgs;
37 import android.media.cts.TestUtils;
38 import android.opengl.GLES20;
39 import android.opengl.GLES30;
40 import android.os.Build;
41 import android.platform.test.annotations.PlatinumTest;
42 import android.util.Log;
43 
44 import androidx.test.platform.app.InstrumentationRegistry;
45 
46 import com.android.compatibility.common.util.ApiLevelUtil;
47 import com.android.compatibility.common.util.ApiTest;
48 import com.android.compatibility.common.util.MediaUtils;
49 
50 import org.junit.Before;
51 import org.junit.Test;
52 import org.junit.runner.RunWith;
53 import org.junit.runners.Parameterized;
54 
55 import java.io.BufferedOutputStream;
56 import java.io.File;
57 import java.io.FileOutputStream;
58 import java.io.IOException;
59 import java.nio.ByteBuffer;
60 import java.util.ArrayList;
61 import java.util.Arrays;
62 import java.util.Collection;
63 import java.util.List;
64 
65 /**
66  * This test has three steps:
67  * <ol>
68  *   <li>Generate a video test stream.
69  *   <li>Decode the video from the stream, rendering frames into a SurfaceTexture.
70  *       Render the texture onto a Surface that feeds a video encoder, modifying
71  *       the output with a fragment shader.
72  *   <li>Decode the second video and compare it to the expected result.
73  * </ol><p>
74  * The second step is a typical scenario for video editing.  We could do all this in one
75  * step, feeding data through multiple stages of MediaCodec, but at some point we're
76  * no longer exercising the code in the way we expect it to be used (and the code
77  * gets a bit unwieldy).
78  */
79 @PlatinumTest(focusArea = "media")
80 @RunWith(Parameterized.class)
81 public class DecodeEditEncodeTest {
82     private static final String TAG = "DecodeEditEncode";
83     private static final boolean WORK_AROUND_BUGS = false;  // avoid fatal codec bugs
84     private static final boolean VERBOSE = false;           // lots of logging
85     private static final boolean DEBUG_SAVE_FILE = false;   // save copy of encoded movie
86     private static final boolean IS_AFTER_T = ApiLevelUtil.isAfter(Build.VERSION_CODES.TIRAMISU);
87 
88     // parameters for the encoder
89     private static final int FRAME_RATE = 15;               // 15fps
90     private static final int IFRAME_INTERVAL = 10;          // 10 seconds between I-frames
91     private static final String KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
92 
93     // movie length, in frames
94     private static final int NUM_FRAMES = FRAME_RATE * 3;   // three seconds of video
95 
96     // since encoders are lossy, we treat the first N frames differently, with a different
97     // tolerance, than the remainder of the clip.  The # of such frames is
98     // INITIAL_TOLERANCE_FRAME_LIMIT, the tolerance within that window is defined by
99     // INITIAL_TOLERANCE and the tolerance afterwards is defined by TOLERANCE
100     private final int INITIAL_TOLERANCE_FRAME_LIMIT = FRAME_RATE * 2;
101 
102     // allowed error between input and output
103     private static final int TOLERANCE = 8;
104 
105     // allowed error between input and output for initial INITIAL_TOLERANCE_FRAME_LIMIT frames
106     private static final int INITIAL_TOLERANCE = 10;
107 
108     private static final int TEST_R0 = 0;                   // dull green background
109     private static final int TEST_G0 = 136;
110     private static final int TEST_B0 = 0;
111     private static final int TEST_R1 = 236;                 // pink; BT.601 YUV {120,160,200}
112     private static final int TEST_G1 = 50;
113     private static final int TEST_B1 = 186;
114 
115     // Replaces TextureRender.FRAGMENT_SHADER during edit; swaps green and blue channels.
116     private static final String FRAGMENT_SHADER =
117             "#extension GL_OES_EGL_image_external : require\n" +
118             "precision mediump float;\n" +
119             "varying vec2 vTextureCoord;\n" +
120             "uniform samplerExternalOES sTexture;\n" +
121             "void main() {\n" +
122             "  gl_FragColor = texture2D(sTexture, vTextureCoord).rbga;\n" +
123             "}\n";
124 
125     // component names
126     private final String mEncoderName;
127     private final String mDecoderName;
128     // mime
129     private final String mMediaType;
130     // size of a frame, in pixels
131     private final int mWidth;
132     private final int mHeight;
133     // bit rate, in bits per second
134     private final int mBitRate;
135     private final boolean mUseHighBitDepth;
136 
137     // largest color component delta seen (i.e. actual vs. expected)
138     private int mLargestColorDelta;
139 
prepareParamList(List<Object[]> exhaustiveArgsList)140     static private List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList) {
141         final List<Object[]> argsList = new ArrayList<>();
142         int argLength = exhaustiveArgsList.get(0).length;
143         for (Object[] arg : exhaustiveArgsList) {
144             String mediaType = (String)arg[0];
145             if (TestArgs.shouldSkipMediaType(mediaType)) {
146                 continue;
147             }
148             String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType);
149             String[] decoderNames = MediaUtils.getDecoderNamesForMime(mediaType);
150             // First pair of decoder and encoder that supports given mediaType is chosen
151             outerLoop:
152             for (String decoder : decoderNames) {
153                 if (TestArgs.shouldSkipCodec(decoder)) {
154                     continue;
155                 }
156 
157                 for (String encoder : encoderNames) {
158                     if (TestArgs.shouldSkipCodec(encoder)) {
159                         continue;
160                     }
161                     Object[] testArgs = new Object[argLength + 2];
162                     // Add encoder name and decoder name as first two arguments and then
163                     // copy arguments passed
164                     testArgs[0] = encoder;
165                     testArgs[1] = decoder;
166                     System.arraycopy(arg, 0, testArgs, 2, argLength);
167                     argsList.add(testArgs);
168                     // Only one combination of encoder and decoder is tested
169                     break outerLoop;
170                 }
171             }
172         }
173         return argsList;
174     }
175 
hasSupportForColorFormat(String name, String mediaType, int colorFormat, boolean isEncoder)176     private static boolean hasSupportForColorFormat(String name, String mediaType,
177             int colorFormat, boolean isEncoder) {
178         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
179         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
180             if (isEncoder != codecInfo.isEncoder()) {
181                 continue;
182             }
183             if (!name.equals(codecInfo.getName())) {
184                 continue;
185             }
186             MediaCodecInfo.CodecCapabilities cap = codecInfo.getCapabilitiesForType(mediaType);
187             for (int c : cap.colorFormats) {
188                 if (c == colorFormat) {
189                     return true;
190                 }
191             }
192         }
193         return false;
194     }
195 
196     @Before
shouldSkip()197     public void shouldSkip() throws IOException {
198         MediaFormat format = MediaFormat.createVideoFormat(mMediaType, mWidth, mHeight);
199         format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
200         format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
201         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
202         if (mUseHighBitDepth) {
203             assumeTrue(mEncoderName + " doesn't support RGBA1010102",
204                     hasSupportForColorFormat(mEncoderName, mMediaType,
205                             COLOR_Format32bitABGR2101010, /* isEncoder */ true));
206 
207             switch (mMediaType) {
208                 case MediaFormat.MIMETYPE_VIDEO_AVC:
209                     format.setInteger(MediaFormat.KEY_PROFILE, AVCProfileHigh10);
210                     break;
211                 case MediaFormat.MIMETYPE_VIDEO_HEVC:
212                     format.setInteger(MediaFormat.KEY_PROFILE, HEVCProfileMain10);
213                     break;
214                 case MediaFormat.MIMETYPE_VIDEO_VP9:
215                     format.setInteger(MediaFormat.KEY_PROFILE, VP9Profile2);
216                     break;
217                 case MediaFormat.MIMETYPE_VIDEO_AV1:
218                     format.setInteger(MediaFormat.KEY_PROFILE, AV1ProfileMain10);
219                     break;
220                 default:
221                     fail("MediaType " + mMediaType + " is not supported for 10-bit testing.");
222                     break;
223             }
224         }
225         assumeTrue(MediaUtils.supports(mEncoderName, format));
226         assumeTrue(MediaUtils.supports(mDecoderName, format));
227         // Few cuttlefish specific color conversion issues were fixed after Android T.
228         if (MediaUtils.onCuttlefish()) {
229             assumeTrue("Color conversion related tests are not valid on cuttlefish releases "
230                     + "through android T for format: " + format, IS_AFTER_T);
231         }
232         // Pre Android U, this test only checked the 1st codec (which is usually a hardware codec)
233         // and software codecs exercised a problem in the underlying graphis code.
234         // So we will only run this for CTS mode or if we're on versions after Android T
235         // (where the graphics code is fixed)
236         if (TestUtils.isMtsMode()) {
237             assumeTrue("Color conversion related tests are skipped in MTS on releases "
238                     + "through android T for format: " + format, IS_AFTER_T);
239         }
240     }
241 
242     @Parameterized.Parameters(name = "{index}_{0}_{1}_{2}_{3}_{4}_{5}")
input()243     public static Collection<Object[]> input() {
244         final List<Object[]> baseArgsList = Arrays.asList(new Object[][]{
245                 // width, height, bitrate
246                 {176, 144, 1000000},
247                 {320, 240, 2000000},
248                 {1280, 720, 6000000}
249         });
250         final String[] mediaTypes = {MediaFormat.MIMETYPE_VIDEO_AVC,
251                 MediaFormat.MIMETYPE_VIDEO_HEVC, MediaFormat.MIMETYPE_VIDEO_VP8,
252                 MediaFormat.MIMETYPE_VIDEO_VP9, MediaFormat.MIMETYPE_VIDEO_AV1};
253         final boolean[] useHighBitDepthModes = {false, true};
254         final List<Object[]> exhaustiveArgsList = new ArrayList<>();
255         for (boolean useHighBitDepth : useHighBitDepthModes) {
256             for (String mediaType : mediaTypes) {
257                 for (Object[] obj : baseArgsList) {
258                     if (mediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP8) && useHighBitDepth) {
259                         continue;
260                     }
261                     exhaustiveArgsList.add(
262                             new Object[]{mediaType, obj[0], obj[1], obj[2], useHighBitDepth});
263                 }
264             }
265         }
266         return prepareParamList(exhaustiveArgsList);
267     }
268 
DecodeEditEncodeTest(String encoder, String decoder, String mimeType, int width, int height, int bitRate, boolean useHighBitDepth)269     public DecodeEditEncodeTest(String encoder, String decoder, String mimeType, int width,
270             int height, int bitRate, boolean useHighBitDepth) {
271         if ((width % 16) != 0 || (height % 16) != 0) {
272             Log.w(TAG, "WARNING: width or height not multiple of 16");
273         }
274         mEncoderName = encoder;
275         mDecoderName = decoder;
276         mMediaType = mimeType;
277         mWidth = width;
278         mHeight = height;
279         mBitRate = bitRate;
280         mUseHighBitDepth = useHighBitDepth;
281     }
282 
283     @ApiTest(apis = {"android.opengl.GLES20#GL_FRAGMENT_SHADER",
284             "android.opengl.GLES20#glReadPixels",
285             "android.opengl.GLES30#glReadPixels",
286             "android.media.format.MediaFormat#KEY_ALLOW_FRAME_DROP",
287             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
288             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_Format32bitABGR2101010",
289             "android.media.MediaFormat#KEY_COLOR_RANGE",
290             "android.media.MediaFormat#KEY_COLOR_STANDARD",
291             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
292     @Test
testVideoEdit()293     public void testVideoEdit() throws Throwable {
294         VideoEditWrapper.runTest(this);
295     }
296 
297     /**
298      * Wraps testEditVideo, running it in a new thread.  Required because of the way
299      * SurfaceTexture.OnFrameAvailableListener works when the current thread has a Looper
300      * configured.
301      */
302     private static class VideoEditWrapper implements Runnable {
303         private Throwable mThrowable;
304         private DecodeEditEncodeTest mTest;
305 
VideoEditWrapper(DecodeEditEncodeTest test)306         private VideoEditWrapper(DecodeEditEncodeTest test) {
307             mTest = test;
308         }
309 
310         @Override
run()311         public void run() {
312             try {
313                 mTest.videoEditTest();
314             } catch (Throwable th) {
315                 mThrowable = th;
316             }
317         }
318 
319         /** Entry point. */
runTest(DecodeEditEncodeTest obj)320         public static void runTest(DecodeEditEncodeTest obj) throws Throwable {
321             VideoEditWrapper wrapper = new VideoEditWrapper(obj);
322             Thread th = new Thread(wrapper, "codec test");
323             th.start();
324             th.join();
325             if (wrapper.mThrowable != null) {
326                 throw wrapper.mThrowable;
327             }
328         }
329     }
330 
331     /**
332      * Tests editing of a video file with GL.
333      */
videoEditTest()334     private void videoEditTest()
335             throws IOException {
336         VideoChunks sourceChunks = new VideoChunks();
337 
338         generateVideoFile(sourceChunks);
339 
340         if (DEBUG_SAVE_FILE) {
341             // Save a copy to a file.  We call it ".mp4", but it's actually just an elementary
342             // stream, so not all video players will know what to do with it.
343             Context context = InstrumentationRegistry.getInstrumentation().getTargetContext();
344             String dirName = context.getFilesDir().getAbsolutePath();
345             String fileName = "vedit1_" + mWidth + "x" + mHeight + ".mp4";
346             sourceChunks.saveToFile(new File(dirName, fileName));
347         }
348 
349         VideoChunks destChunks = editVideoFile(sourceChunks);
350 
351         if (DEBUG_SAVE_FILE) {
352             Context context = InstrumentationRegistry.getInstrumentation().getTargetContext();
353             String dirName = context.getFilesDir().getAbsolutePath();
354             String fileName = "vedit2_" + mWidth + "x" + mHeight + ".mp4";
355             destChunks.saveToFile(new File(dirName, fileName));
356         }
357 
358         checkVideoFile(destChunks);
359     }
360 
361     /**
362      * Generates a test video file, saving it as VideoChunks.  We generate frames with GL to
363      * avoid having to deal with multiple YUV formats.
364      */
generateVideoFile(VideoChunks output)365     private void generateVideoFile(VideoChunks output)
366             throws IOException {
367         if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight);
368         MediaCodec encoder = null;
369         InputSurface inputSurface = null;
370 
371         try {
372             // We avoid the device-specific limitations on width and height by using values that
373             // are multiples of 16, which all tested devices seem to be able to handle.
374             MediaFormat format = MediaFormat.createVideoFormat(mMediaType, mWidth, mHeight);
375 
376             // Set some properties.  Failing to specify some of these can cause the MediaCodec
377             // configure() call to throw an unhelpful exception.
378             format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
379                     MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
380             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
381             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
382             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
383             if (mUseHighBitDepth) {
384                 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_FULL);
385                 format.setInteger(MediaFormat.KEY_COLOR_STANDARD,
386                         MediaFormat.COLOR_STANDARD_BT2020);
387                 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
388                         MediaFormat.COLOR_TRANSFER_ST2084);
389             } else {
390                 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED);
391                 format.setInteger(MediaFormat.KEY_COLOR_STANDARD,
392                         MediaFormat.COLOR_STANDARD_BT601_PAL);
393                 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
394                         MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
395             }
396             if (VERBOSE) Log.d(TAG, "format: " + format);
397             output.setMediaFormat(format);
398 
399             // Create a MediaCodec for the desired codec, then configure it as an encoder with
400             // our desired properties.
401             encoder = MediaCodec.createByCodecName(mEncoderName);
402             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
403             inputSurface = new InputSurface(encoder.createInputSurface(), mUseHighBitDepth);
404             inputSurface.makeCurrent();
405             encoder.start();
406 
407             generateVideoData(encoder, inputSurface, output);
408         } finally {
409             if (encoder != null) {
410                 if (VERBOSE) Log.d(TAG, "releasing encoder");
411                 encoder.stop();
412                 encoder.release();
413                 if (VERBOSE) Log.d(TAG, "released encoder");
414             }
415             if (inputSurface != null) {
416                 inputSurface.release();
417             }
418         }
419     }
420 
421     /**
422      * Generates video frames, feeds them into the encoder, and writes the output to the
423      * VideoChunks instance.
424      */
generateVideoData(MediaCodec encoder, InputSurface inputSurface, VideoChunks output)425     private void generateVideoData(MediaCodec encoder, InputSurface inputSurface,
426             VideoChunks output) {
427         final int TIMEOUT_USEC = 10000;
428         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
429         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
430         int generateIndex = 0;
431         int outputCount = 0;
432 
433         // Loop until the output side is done.
434         boolean inputDone = false;
435         boolean outputDone = false;
436         while (!outputDone) {
437             if (VERBOSE) Log.d(TAG, "gen loop");
438 
439             // If we're not done submitting frames, generate a new one and submit it.  The
440             // eglSwapBuffers call will block if the input is full.
441             if (!inputDone) {
442                 if (generateIndex == NUM_FRAMES) {
443                     // Send an empty frame with the end-of-stream flag set.
444                     if (VERBOSE) Log.d(TAG, "signaling input EOS");
445                     if (WORK_AROUND_BUGS) {
446                         // Might drop a frame, but at least we won't crash mediaserver.
447                         try { Thread.sleep(500); } catch (InterruptedException ie) {}
448                         outputDone = true;
449                     } else {
450                         encoder.signalEndOfInputStream();
451                     }
452                     inputDone = true;
453                 } else {
454                     generateSurfaceFrame(generateIndex);
455                     inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
456                     if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
457                     inputSurface.swapBuffers();
458                 }
459                 generateIndex++;
460             }
461 
462             // Check for output from the encoder.  If there's no output yet, we either need to
463             // provide more input, or we need to wait for the encoder to work its magic.  We
464             // can't actually tell which is the case, so if we can't get an output buffer right
465             // away we loop around and see if it wants more input.
466             //
467             // If we do find output, drain it all before supplying more input.
468             while (true) {
469                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
470                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
471                     // no output available yet
472                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
473                     break;      // out of while
474                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
475                     // not expected for an encoder
476                     encoderOutputBuffers = encoder.getOutputBuffers();
477                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
478                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
479                     // expected on API 18+
480                     MediaFormat newFormat = encoder.getOutputFormat();
481                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
482                 } else if (encoderStatus < 0) {
483                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
484                 } else { // encoderStatus >= 0
485                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
486                     if (encodedData == null) {
487                         fail("encoderOutputBuffer " + encoderStatus + " was null");
488                     }
489 
490                     if (info.size != 0) {
491                         // Adjust the ByteBuffer values to match BufferInfo.
492                         encodedData.position(info.offset);
493                         encodedData.limit(info.offset + info.size);
494 
495                         output.addChunk(encodedData, info.flags, info.presentationTimeUs);
496                         if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
497                             outputCount++;
498                         }
499                     }
500 
501                     encoder.releaseOutputBuffer(encoderStatus, false);
502                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
503                         outputDone = true;
504                         break;      // out of while
505                     }
506                 }
507             }
508         }
509 
510         assertEquals("Frame count", NUM_FRAMES, outputCount);
511     }
512 
513     /**
514      * Generates a frame of data using GL commands.
515      * <p>
516      * We have an 8-frame animation sequence that wraps around.  It looks like this:
517      * <pre>
518      *   0 1 2 3
519      *   7 6 5 4
520      * </pre>
521      * We draw one of the eight rectangles and leave the rest set to the zero-fill color.     */
generateSurfaceFrame(int frameIndex)522     private void generateSurfaceFrame(int frameIndex) {
523         frameIndex %= 8;
524 
525         int startX, startY;
526         if (frameIndex < 4) {
527             // (0,0) is bottom-left in GL
528             startX = frameIndex * (mWidth / 4);
529             startY = mHeight / 2;
530         } else {
531             startX = (7 - frameIndex) * (mWidth / 4);
532             startY = 0;
533         }
534 
535         GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
536         GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
537         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
538         GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
539         GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
540         GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
541         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
542     }
543 
544     /**
545      * Edits a video file, saving the contents to a new file.  This involves decoding and
546      * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
547      * <p>
548      * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
549      * output, but it's not practical to support all OEM formats.  By using a SurfaceTexture
550      * for output and a Surface for input, we can avoid issues with obscure formats and can
551      * use a fragment shader to do transformations.
552      */
editVideoFile(VideoChunks inputData)553     private VideoChunks editVideoFile(VideoChunks inputData)
554             throws IOException {
555         if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
556         VideoChunks outputData = new VideoChunks();
557         MediaCodec decoder = null;
558         MediaCodec encoder = null;
559         InputSurface inputSurface = null;
560         OutputSurface outputSurface = null;
561 
562         try {
563             MediaFormat inputFormat = inputData.getMediaFormat();
564 
565             // Create an encoder format that matches the input format.  (Might be able to just
566             // re-use the format used to generate the video, since we want it to be the same.)
567             MediaFormat outputFormat = MediaFormat.createVideoFormat(mMediaType, mWidth, mHeight);
568             outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
569                     MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
570             outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
571                     inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
572             outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
573                     inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
574             outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
575                     inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
576             if (mUseHighBitDepth) {
577                 outputFormat.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_FULL);
578                 outputFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD,
579                         MediaFormat.COLOR_STANDARD_BT2020);
580                 outputFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
581                         MediaFormat.COLOR_TRANSFER_ST2084);
582             } else {
583                 outputFormat.setInteger(MediaFormat.KEY_COLOR_RANGE,
584                         MediaFormat.COLOR_RANGE_LIMITED);
585                 outputFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD,
586                         MediaFormat.COLOR_STANDARD_BT601_PAL);
587                 outputFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
588                         MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
589             }
590             outputData.setMediaFormat(outputFormat);
591 
592             encoder = MediaCodec.createByCodecName(mEncoderName);
593             encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
594             inputSurface = new InputSurface(encoder.createInputSurface(), mUseHighBitDepth);
595             inputSurface.makeCurrent();
596             encoder.start();
597 
598             // OutputSurface uses the EGL context created by InputSurface.
599             decoder = MediaCodec.createByCodecName(mDecoderName);
600             outputSurface = new OutputSurface();
601             outputSurface.changeFragmentShader(FRAGMENT_SHADER);
602             // do not allow frame drops
603             inputFormat.setInteger(KEY_ALLOW_FRAME_DROP, 0);
604 
605             decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
606             decoder.start();
607 
608             // verify that we are not dropping frames
609             inputFormat = decoder.getInputFormat();
610             assertEquals("Could not prevent frame dropping",
611                          0, inputFormat.getInteger(KEY_ALLOW_FRAME_DROP));
612 
613             editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
614         } finally {
615             if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder");
616             if (outputSurface != null) {
617                 outputSurface.release();
618             }
619             if (inputSurface != null) {
620                 inputSurface.release();
621             }
622             if (encoder != null) {
623                 encoder.stop();
624                 encoder.release();
625             }
626             if (decoder != null) {
627                 decoder.stop();
628                 decoder.release();
629             }
630         }
631 
632         return outputData;
633     }
634 
635     /**
636      * Edits a stream of video data.
637      */
editVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder, VideoChunks outputData)638     private void editVideoData(VideoChunks inputData, MediaCodec decoder,
639             OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
640             VideoChunks outputData) {
641         final int TIMEOUT_USEC = 10000;
642         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
643         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
644         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
645         int inputChunk = 0;
646         int outputCount = 0;
647 
648         boolean outputDone = false;
649         boolean inputDone = false;
650         boolean decoderDone = false;
651         while (!outputDone) {
652             if (VERBOSE) Log.d(TAG, "edit loop");
653 
654             // Feed more data to the decoder.
655             if (!inputDone) {
656                 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
657                 if (inputBufIndex >= 0) {
658                     if (inputChunk == inputData.getNumChunks()) {
659                         // End of stream -- send empty frame with EOS flag set.
660                         decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
661                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
662                         inputDone = true;
663                         if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
664                     } else {
665                         // Copy a chunk of input to the decoder.  The first chunk should have
666                         // the BUFFER_FLAG_CODEC_CONFIG flag set.
667                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
668                         inputBuf.clear();
669                         inputData.getChunkData(inputChunk, inputBuf);
670                         int flags = inputData.getChunkFlags(inputChunk);
671                         long time = inputData.getChunkTime(inputChunk);
672                         decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
673                                 time, flags);
674                         if (VERBOSE) {
675                             Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
676                                     inputBuf.position() + " flags=" + flags);
677                         }
678                         inputChunk++;
679                     }
680                 } else {
681                     if (VERBOSE) Log.d(TAG, "input buffer not available");
682                 }
683             }
684 
685             // Assume output is available.  Loop until both assumptions are false.
686             boolean decoderOutputAvailable = !decoderDone;
687             boolean encoderOutputAvailable = true;
688             while (decoderOutputAvailable || encoderOutputAvailable) {
689                 // Start by draining any pending output from the encoder.  It's important to
690                 // do this before we try to stuff any more data in.
691                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
692                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
693                     // no output available yet
694                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
695                     encoderOutputAvailable = false;
696                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
697                     encoderOutputBuffers = encoder.getOutputBuffers();
698                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
699                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
700                     MediaFormat newFormat = encoder.getOutputFormat();
701                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
702                 } else if (encoderStatus < 0) {
703                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
704                 } else { // encoderStatus >= 0
705                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
706                     if (encodedData == null) {
707                         fail("encoderOutputBuffer " + encoderStatus + " was null");
708                     }
709 
710                     // Write the data to the output "file".
711                     if (info.size != 0) {
712                         encodedData.position(info.offset);
713                         encodedData.limit(info.offset + info.size);
714 
715                         outputData.addChunk(encodedData, info.flags, info.presentationTimeUs);
716                         outputCount++;
717 
718                         if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes");
719                     }
720                     outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
721                     encoder.releaseOutputBuffer(encoderStatus, false);
722                 }
723                 if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
724                     // Continue attempts to drain output.
725                     continue;
726                 }
727 
728                 // Encoder is drained, check to see if we've got a new frame of output from
729                 // the decoder.  (The output is going to a Surface, rather than a ByteBuffer,
730                 // but we still get information through BufferInfo.)
731                 if (!decoderDone) {
732                     int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
733                     if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
734                         // no output available yet
735                         if (VERBOSE) Log.d(TAG, "no output from decoder available");
736                         decoderOutputAvailable = false;
737                     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
738                         //decoderOutputBuffers = decoder.getOutputBuffers();
739                         if (VERBOSE) Log.d(TAG, "decoder output buffers changed (we don't care)");
740                     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
741                         // expected before first buffer of data
742                         MediaFormat newFormat = decoder.getOutputFormat();
743                         if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
744                     } else if (decoderStatus < 0) {
745                         fail("unexpected result from decoder.dequeueOutputBuffer: "+decoderStatus);
746                     } else { // decoderStatus >= 0
747                         if (VERBOSE) Log.d(TAG, "surface decoder given buffer "
748                                 + decoderStatus + " (size=" + info.size + ")");
749                         // The ByteBuffers are null references, but we still get a nonzero
750                         // size for the decoded data.
751                         boolean doRender = (info.size != 0);
752 
753                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
754                         // to SurfaceTexture to convert to a texture.  The API doesn't
755                         // guarantee that the texture will be available before the call
756                         // returns, so we need to wait for the onFrameAvailable callback to
757                         // fire.  If we don't wait, we risk rendering from the previous frame.
758                         decoder.releaseOutputBuffer(decoderStatus, doRender);
759                         if (doRender) {
760                             // This waits for the image and renders it after it arrives.
761                             if (VERBOSE) Log.d(TAG, "awaiting frame");
762                             outputSurface.awaitNewImage();
763                             outputSurface.drawImage();
764 
765                             // Send it to the encoder.
766                             inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
767                             if (VERBOSE) Log.d(TAG, "swapBuffers");
768                             inputSurface.swapBuffers();
769                         }
770                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
771                             // forward decoder EOS to encoder
772                             if (VERBOSE) Log.d(TAG, "signaling input EOS");
773                             if (WORK_AROUND_BUGS) {
774                                 // Bail early, possibly dropping a frame.
775                                 return;
776                             } else {
777                                 encoder.signalEndOfInputStream();
778                             }
779                         }
780                     }
781                 }
782             }
783         }
784 
785         if (inputChunk != outputCount) {
786             throw new RuntimeException("frame lost: " + inputChunk + " in, " +
787                     outputCount + " out");
788         }
789     }
790 
791     /**
792      * Checks the video file to see if the contents match our expectations.  We decode the
793      * video to a Surface and check the pixels with GL.
794      */
checkVideoFile(VideoChunks inputData)795     private void checkVideoFile(VideoChunks inputData)
796             throws IOException {
797         OutputSurface surface = null;
798         MediaCodec decoder = null;
799 
800         mLargestColorDelta = -1;
801 
802         if (VERBOSE) Log.d(TAG, "checkVideoFile");
803 
804         try {
805             surface = new OutputSurface(mWidth, mHeight, mUseHighBitDepth);
806 
807             MediaFormat format = inputData.getMediaFormat();
808             decoder = MediaCodec.createByCodecName(mDecoderName);
809             format.setInteger(KEY_ALLOW_FRAME_DROP, 0);
810             decoder.configure(format, surface.getSurface(), null, 0);
811             decoder.start();
812 
813             int badFrames = checkVideoData(inputData, decoder, surface);
814             if (badFrames != 0) {
815                 fail("Found " + badFrames + " bad frames");
816             }
817         } finally {
818             if (surface != null) {
819                 surface.release();
820             }
821             if (decoder != null) {
822                 decoder.stop();
823                 decoder.release();
824             }
825 
826             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
827         }
828     }
829 
830     /**
831      * Checks the video data.
832      *
833      * @return the number of bad frames
834      */
checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface)835     private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) {
836         final int TIMEOUT_USEC = 1000;
837         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
838         ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
839         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
840         int inputChunk = 0;
841         int checkIndex = 0;
842         int badFrames = 0;
843 
844         boolean outputDone = false;
845         boolean inputDone = false;
846         while (!outputDone) {
847             if (VERBOSE) Log.d(TAG, "check loop");
848 
849             // Feed more data to the decoder.
850             if (!inputDone) {
851                 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
852                 if (inputBufIndex >= 0) {
853                     if (inputChunk == inputData.getNumChunks()) {
854                         // End of stream -- send empty frame with EOS flag set.
855                         decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
856                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
857                         inputDone = true;
858                         if (VERBOSE) Log.d(TAG, "sent input EOS");
859                     } else {
860                         // Copy a chunk of input to the decoder.  The first chunk should have
861                         // the BUFFER_FLAG_CODEC_CONFIG flag set.
862                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
863                         inputBuf.clear();
864                         inputData.getChunkData(inputChunk, inputBuf);
865                         int flags = inputData.getChunkFlags(inputChunk);
866                         long time = inputData.getChunkTime(inputChunk);
867                         decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
868                                 time, flags);
869                         if (VERBOSE) {
870                             Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
871                                     inputBuf.position() + " flags=" + flags);
872                         }
873                         inputChunk++;
874                     }
875                 } else {
876                     if (VERBOSE) Log.d(TAG, "input buffer not available");
877                 }
878             }
879 
880             if (!outputDone) {
881                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
882                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
883                     // no output available yet
884                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
885                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
886                     decoderOutputBuffers = decoder.getOutputBuffers();
887                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
888                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
889                     MediaFormat newFormat = decoder.getOutputFormat();
890                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
891                 } else if (decoderStatus < 0) {
892                     fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
893                 } else { // decoderStatus >= 0
894                     ByteBuffer decodedData = decoderOutputBuffers[decoderStatus];
895 
896                     if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
897                             " (size=" + info.size + ")");
898                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
899                         if (VERBOSE) Log.d(TAG, "output EOS");
900                         outputDone = true;
901                     }
902 
903                     boolean doRender = (info.size != 0);
904 
905                     // As soon as we call releaseOutputBuffer, the buffer will be forwarded
906                     // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
907                     // that the texture will be available before the call returns, so we
908                     // need to wait for the onFrameAvailable callback to fire.
909                     decoder.releaseOutputBuffer(decoderStatus, doRender);
910                     if (doRender) {
911                         if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
912                         assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
913                                 info.presentationTimeUs);
914                         surface.awaitNewImage();
915                         surface.drawImage();
916                         if (!checkSurfaceFrame(checkIndex)) {
917                             badFrames++;
918                         }
919                         checkIndex++;
920                     }
921                 }
922             }
923         }
924 
925         return badFrames;
926     }
927 
928     /**
929      * Checks the frame for correctness, using GL to check RGB values.
930      *
931      * @return true if the frame looks good
932      */
checkSurfaceFrame(int frameIndex)933     private boolean checkSurfaceFrame(int frameIndex) {
934         ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this
935         boolean frameFailed = false;
936         // Choose the appropriate initial/regular tolerance
937         int maxDelta = frameIndex < INITIAL_TOLERANCE_FRAME_LIMIT ? INITIAL_TOLERANCE : TOLERANCE;
938         for (int i = 0; i < 8; i++) {
939             // Note the coordinates are inverted on the Y-axis in GL.
940             int x, y;
941             if (i < 4) {
942                 x = i * (mWidth / 4) + (mWidth / 8);
943                 y = (mHeight * 3) / 4;
944             } else {
945                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
946                 y = mHeight / 4;
947             }
948 
949             int r, g, b;
950             if (mUseHighBitDepth) {
951                 GLES30.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA,
952                         GLES30.GL_UNSIGNED_INT_2_10_10_10_REV, pixelBuf);
953                 r = (pixelBuf.get(1) & 0x03) << 8 | (pixelBuf.get(0) & 0xFF);
954                 g = (pixelBuf.get(2) & 0x0F) << 6 | ((pixelBuf.get(1) >> 2) & 0x3F);
955                 b = (pixelBuf.get(3) & 0x3F) << 4 | ((pixelBuf.get(2) >> 4) & 0x0F);
956                 // Convert the values to 8 bit (using rounding division by 4) as comparisons
957                 // later are with 8 bit RGB values
958                 r = (r + 2) >> 2;
959                 g = (g + 2) >> 2;
960                 b = (b + 2) >> 2;
961             } else {
962                 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuf);
963                 r = pixelBuf.get(0) & 0xFF;
964                 g = pixelBuf.get(1) & 0xFF;
965                 b = pixelBuf.get(2) & 0xFF;
966             }
967             //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
968 
969             int expR, expG, expB;
970             if (i == frameIndex % 8) {
971                 // colored rect (green/blue swapped)
972                 expR = TEST_R1;
973                 expG = TEST_B1;
974                 expB = TEST_G1;
975             } else {
976                 // zero background color (green/blue swapped)
977                 expR = TEST_R0;
978                 expG = TEST_B0;
979                 expB = TEST_G0;
980             }
981             if (!isColorClose(r, expR, maxDelta) ||
982                     !isColorClose(g, expG, maxDelta) ||
983                     !isColorClose(b, expB, maxDelta)) {
984                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": rgb=" + r +
985                         "," + g + "," + b + " vs. expected " + expR + "," + expG +
986                         "," + expB + ") for allowed error of " + maxDelta);
987                 frameFailed = true;
988             }
989         }
990 
991         return !frameFailed;
992     }
993 
994     /**
995      * Returns true if the actual color value is close to the expected color value.  Updates
996      * mLargestColorDelta.
997      */
998     boolean isColorClose(int actual, int expected, int maxDelta) {
999         int delta = Math.abs(actual - expected);
1000         if (delta > mLargestColorDelta) {
1001             mLargestColorDelta = delta;
1002         }
1003         return (delta <= maxDelta);
1004     }
1005 
1006     /**
1007      * Generates the presentation time for frame N, in microseconds.
1008      */
1009     private static long computePresentationTime(int frameIndex) {
1010         return 123 + frameIndex * 1000000 / FRAME_RATE;
1011     }
1012 
1013 
1014     /**
1015      * The elementary stream coming out of the encoder needs to be fed back into
1016      * the decoder one chunk at a time.  If we just wrote the data to a file, we would lose
1017      * the information about chunk boundaries.  This class stores the encoded data in memory,
1018      * retaining the chunk organization.
1019      */
1020     private static class VideoChunks {
1021         private MediaFormat mMediaFormat;
1022         private ArrayList<byte[]> mChunks = new ArrayList<byte[]>();
1023         private ArrayList<Integer> mFlags = new ArrayList<Integer>();
1024         private ArrayList<Long> mTimes = new ArrayList<Long>();
1025 
1026         /**
1027          * Sets the MediaFormat, for the benefit of a future decoder.
1028          */
1029         public void setMediaFormat(MediaFormat format) {
1030             mMediaFormat = format;
1031         }
1032 
1033         /**
1034          * Gets the MediaFormat that was used by the encoder.
1035          */
1036         public MediaFormat getMediaFormat() {
1037             return new MediaFormat(mMediaFormat);
1038         }
1039 
1040         /**
1041          * Adds a new chunk.  Advances buf.position to buf.limit.
1042          */
1043         public void addChunk(ByteBuffer buf, int flags, long time) {
1044             byte[] data = new byte[buf.remaining()];
1045             buf.get(data);
1046             mChunks.add(data);
1047             mFlags.add(flags);
1048             mTimes.add(time);
1049         }
1050 
1051         /**
1052          * Returns the number of chunks currently held.
1053          */
1054         public int getNumChunks() {
1055             return mChunks.size();
1056         }
1057 
1058         /**
1059          * Copies the data from chunk N into "dest".  Advances dest.position.
1060          */
1061         public void getChunkData(int chunk, ByteBuffer dest) {
1062             byte[] data = mChunks.get(chunk);
1063             dest.put(data);
1064         }
1065 
1066         /**
1067          * Returns the flags associated with chunk N.
1068          */
1069         public int getChunkFlags(int chunk) {
1070             return mFlags.get(chunk);
1071         }
1072 
1073         /**
1074          * Returns the timestamp associated with chunk N.
1075          */
1076         public long getChunkTime(int chunk) {
1077             return mTimes.get(chunk);
1078         }
1079 
1080         /**
1081          * Writes the chunks to a file as a contiguous stream.  Useful for debugging.
1082          */
1083         public void saveToFile(File file) {
1084             Log.d(TAG, "saving chunk data to file " + file);
1085             FileOutputStream fos = null;
1086             BufferedOutputStream bos = null;
1087 
1088             try {
1089                 fos = new FileOutputStream(file);
1090                 bos = new BufferedOutputStream(fos);
1091                 fos = null;     // closing bos will also close fos
1092 
1093                 int numChunks = getNumChunks();
1094                 for (int i = 0; i < numChunks; i++) {
1095                     byte[] chunk = mChunks.get(i);
1096                     bos.write(chunk);
1097                 }
1098             } catch (IOException ioe) {
1099                 throw new RuntimeException(ioe);
1100             } finally {
1101                 try {
1102                     if (bos != null) {
1103                         bos.close();
1104                     }
1105                     if (fos != null) {
1106                         fos.close();
1107                     }
1108                 } catch (IOException ioe) {
1109                     throw new RuntimeException(ioe);
1110                 }
1111             }
1112         }
1113     }
1114 }
1115