• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.codec.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_Format32bitABGR2101010;
20 import static android.media.MediaCodecInfo.CodecProfileLevel.AV1ProfileMain10;
21 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh10;
22 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCProfileMain10;
23 import static android.media.MediaCodecInfo.CodecProfileLevel.VP9Profile2;
24 
25 import static org.junit.Assert.assertEquals;
26 import static org.junit.Assert.fail;
27 import static org.junit.Assume.assumeTrue;
28 
29 import android.content.Context;
30 import android.media.MediaCodec;
31 import android.media.MediaCodecInfo;
32 import android.media.MediaCodecList;
33 import android.media.MediaFormat;
34 import android.media.cts.InputSurface;
35 import android.media.cts.OutputSurface;
36 import android.media.cts.TestArgs;
37 import android.opengl.GLES20;
38 import android.opengl.GLES30;
39 import android.os.Build;
40 import android.platform.test.annotations.PlatinumTest;
41 import android.util.Log;
42 
43 import androidx.test.platform.app.InstrumentationRegistry;
44 
45 import com.android.compatibility.common.util.ApiLevelUtil;
46 import com.android.compatibility.common.util.ApiTest;
47 import com.android.compatibility.common.util.MediaUtils;
48 
49 import org.junit.Before;
50 import org.junit.Test;
51 import org.junit.runner.RunWith;
52 import org.junit.runners.Parameterized;
53 
54 import java.io.BufferedOutputStream;
55 import java.io.File;
56 import java.io.FileOutputStream;
57 import java.io.IOException;
58 import java.nio.ByteBuffer;
59 import java.util.ArrayList;
60 import java.util.Arrays;
61 import java.util.Collection;
62 import java.util.List;
63 
64 /**
65  * This test has three steps:
66  * <ol>
67  *   <li>Generate a video test stream.
68  *   <li>Decode the video from the stream, rendering frames into a SurfaceTexture.
69  *       Render the texture onto a Surface that feeds a video encoder, modifying
70  *       the output with a fragment shader.
71  *   <li>Decode the second video and compare it to the expected result.
72  * </ol><p>
73  * The second step is a typical scenario for video editing.  We could do all this in one
74  * step, feeding data through multiple stages of MediaCodec, but at some point we're
75  * no longer exercising the code in the way we expect it to be used (and the code
76  * gets a bit unwieldy).
77  */
78 @PlatinumTest(focusArea = "media")
79 @RunWith(Parameterized.class)
80 public class DecodeEditEncodeTest {
81     private static final String TAG = "DecodeEditEncode";
82     private static final boolean WORK_AROUND_BUGS = false;  // avoid fatal codec bugs
83     private static final boolean VERBOSE = false;           // lots of logging
84     private static final boolean DEBUG_SAVE_FILE = false;   // save copy of encoded movie
85     //TODO(b/248315681) Remove codenameEquals() check once devices return correct version for U
86     private static final boolean IS_AFTER_T = ApiLevelUtil.isAfter(Build.VERSION_CODES.TIRAMISU)
87             || ApiLevelUtil.codenameEquals("UpsideDownCake");
88 
89     // parameters for the encoder
90     private static final int FRAME_RATE = 15;               // 15fps
91     private static final int IFRAME_INTERVAL = 10;          // 10 seconds between I-frames
92     private static final String KEY_ALLOW_FRAME_DROP = "allow-frame-drop";
93 
94     // movie length, in frames
95     private static final int NUM_FRAMES = FRAME_RATE * 3;   // three seconds of video
96 
97     // since encoders are lossy, we treat the first N frames differently, with a different
98     // tolerance, than the remainder of the clip.  The # of such frames is
99     // INITIAL_TOLERANCE_FRAME_LIMIT, the tolerance within that window is defined by
100     // INITIAL_TOLERANCE and the tolerance afterwards is defined by TOLERANCE
101     private final int INITIAL_TOLERANCE_FRAME_LIMIT = FRAME_RATE * 2;
102 
103     // allowed error between input and output
104     private static final int TOLERANCE = 8;
105 
106     // allowed error between input and output for initial INITIAL_TOLERANCE_FRAME_LIMIT frames
107     private static final int INITIAL_TOLERANCE = 10;
108 
109     private static final int TEST_R0 = 0;                   // dull green background
110     private static final int TEST_G0 = 136;
111     private static final int TEST_B0 = 0;
112     private static final int TEST_R1 = 236;                 // pink; BT.601 YUV {120,160,200}
113     private static final int TEST_G1 = 50;
114     private static final int TEST_B1 = 186;
115 
116     // Replaces TextureRender.FRAGMENT_SHADER during edit; swaps green and blue channels.
117     private static final String FRAGMENT_SHADER =
118             "#extension GL_OES_EGL_image_external : require\n" +
119             "precision mediump float;\n" +
120             "varying vec2 vTextureCoord;\n" +
121             "uniform samplerExternalOES sTexture;\n" +
122             "void main() {\n" +
123             "  gl_FragColor = texture2D(sTexture, vTextureCoord).rbga;\n" +
124             "}\n";
125 
126     // component names
127     private final String mEncoderName;
128     private final String mDecoderName;
129     // mime
130     private final String mMediaType;
131     // size of a frame, in pixels
132     private final int mWidth;
133     private final int mHeight;
134     // bit rate, in bits per second
135     private final int mBitRate;
136     private final boolean mUseHighBitDepth;
137 
138     // largest color component delta seen (i.e. actual vs. expected)
139     private int mLargestColorDelta;
140 
prepareParamList(List<Object[]> exhaustiveArgsList)141     static private List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList) {
142         final List<Object[]> argsList = new ArrayList<>();
143         int argLength = exhaustiveArgsList.get(0).length;
144         for (Object[] arg : exhaustiveArgsList) {
145             String mediaType = (String)arg[0];
146             if (TestArgs.shouldSkipMediaType(mediaType)) {
147                 continue;
148             }
149             String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType);
150             String[] decoderNames = MediaUtils.getDecoderNamesForMime(mediaType);
151             // First pair of decoder and encoder that supports given mediaType is chosen
152             outerLoop:
153             for (String decoder : decoderNames) {
154                 if (TestArgs.shouldSkipCodec(decoder)) {
155                     continue;
156                 }
157 
158                 for (String encoder : encoderNames) {
159                     if (TestArgs.shouldSkipCodec(encoder)) {
160                         continue;
161                     }
162                     Object[] testArgs = new Object[argLength + 2];
163                     // Add encoder name and decoder name as first two arguments and then
164                     // copy arguments passed
165                     testArgs[0] = encoder;
166                     testArgs[1] = decoder;
167                     System.arraycopy(arg, 0, testArgs, 2, argLength);
168                     argsList.add(testArgs);
169                     // Only one combination of encoder and decoder is tested
170                     break outerLoop;
171                 }
172             }
173         }
174         return argsList;
175     }
176 
hasSupportForColorFormat(String name, String mediaType, int colorFormat, boolean isEncoder)177     private static boolean hasSupportForColorFormat(String name, String mediaType,
178             int colorFormat, boolean isEncoder) {
179         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
180         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
181             if (isEncoder != codecInfo.isEncoder()) {
182                 continue;
183             }
184             if (!name.equals(codecInfo.getName())) {
185                 continue;
186             }
187             MediaCodecInfo.CodecCapabilities cap = codecInfo.getCapabilitiesForType(mediaType);
188             for (int c : cap.colorFormats) {
189                 if (c == colorFormat) {
190                     return true;
191                 }
192             }
193         }
194         return false;
195     }
196 
197     @Before
shouldSkip()198     public void shouldSkip() throws IOException {
199         MediaFormat format = MediaFormat.createVideoFormat(mMediaType, mWidth, mHeight);
200         format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
201         format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
202         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
203         if (mUseHighBitDepth) {
204             assumeTrue(mEncoderName + " doesn't support RGBA1010102",
205                     hasSupportForColorFormat(mEncoderName, mMediaType,
206                             COLOR_Format32bitABGR2101010, /* isEncoder */ true));
207 
208             switch (mMediaType) {
209                 case MediaFormat.MIMETYPE_VIDEO_AVC:
210                     format.setInteger(MediaFormat.KEY_PROFILE, AVCProfileHigh10);
211                     break;
212                 case MediaFormat.MIMETYPE_VIDEO_HEVC:
213                     format.setInteger(MediaFormat.KEY_PROFILE, HEVCProfileMain10);
214                     break;
215                 case MediaFormat.MIMETYPE_VIDEO_VP9:
216                     format.setInteger(MediaFormat.KEY_PROFILE, VP9Profile2);
217                     break;
218                 case MediaFormat.MIMETYPE_VIDEO_AV1:
219                     format.setInteger(MediaFormat.KEY_PROFILE, AV1ProfileMain10);
220                     break;
221                 default:
222                     fail("MediaType " + mMediaType + " is not supported for 10-bit testing.");
223                     break;
224             }
225         }
226         assumeTrue(MediaUtils.supports(mEncoderName, format));
227         assumeTrue(MediaUtils.supports(mDecoderName, format));
228         // Few cuttlefish specific color conversion issues were fixed after Android T.
229         if (MediaUtils.onCuttlefish()) {
230             assumeTrue("Color conversion related tests are not valid on cuttlefish releases "
231                     + "through android T for format: " + format, IS_AFTER_T);
232         }
233     }
234 
235     @Parameterized.Parameters(name = "{index}_{0}_{1}_{2}_{3}_{4}_{5}")
input()236     public static Collection<Object[]> input() {
237         final List<Object[]> baseArgsList = Arrays.asList(new Object[][]{
238                 // width, height, bitrate
239                 {176, 144, 1000000},
240                 {320, 240, 2000000},
241                 {1280, 720, 6000000}
242         });
243         final String[] mediaTypes = {MediaFormat.MIMETYPE_VIDEO_AVC,
244                 MediaFormat.MIMETYPE_VIDEO_HEVC, MediaFormat.MIMETYPE_VIDEO_VP8,
245                 MediaFormat.MIMETYPE_VIDEO_VP9, MediaFormat.MIMETYPE_VIDEO_AV1};
246         final boolean[] useHighBitDepthModes = {false, true};
247         final List<Object[]> exhaustiveArgsList = new ArrayList<>();
248         for (boolean useHighBitDepth : useHighBitDepthModes) {
249             for (String mediaType : mediaTypes) {
250                 for (Object[] obj : baseArgsList) {
251                     if (mediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP8) && useHighBitDepth) {
252                         continue;
253                     }
254                     exhaustiveArgsList.add(
255                             new Object[]{mediaType, obj[0], obj[1], obj[2], useHighBitDepth});
256                 }
257             }
258         }
259         return prepareParamList(exhaustiveArgsList);
260     }
261 
DecodeEditEncodeTest(String encoder, String decoder, String mimeType, int width, int height, int bitRate, boolean useHighBitDepth)262     public DecodeEditEncodeTest(String encoder, String decoder, String mimeType, int width,
263             int height, int bitRate, boolean useHighBitDepth) {
264         if ((width % 16) != 0 || (height % 16) != 0) {
265             Log.w(TAG, "WARNING: width or height not multiple of 16");
266         }
267         mEncoderName = encoder;
268         mDecoderName = decoder;
269         mMediaType = mimeType;
270         mWidth = width;
271         mHeight = height;
272         mBitRate = bitRate;
273         mUseHighBitDepth = useHighBitDepth;
274     }
275 
276     @ApiTest(apis = {"android.opengl.GLES20#GL_FRAGMENT_SHADER",
277             "android.opengl.GLES20#glReadPixels",
278             "android.opengl.GLES30#glReadPixels",
279             "android.media.format.MediaFormat#KEY_ALLOW_FRAME_DROP",
280             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
281             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_Format32bitABGR2101010",
282             "android.media.MediaFormat#KEY_COLOR_RANGE",
283             "android.media.MediaFormat#KEY_COLOR_STANDARD",
284             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
285     @Test
testVideoEdit()286     public void testVideoEdit() throws Throwable {
287         VideoEditWrapper.runTest(this);
288     }
289 
290     /**
291      * Wraps testEditVideo, running it in a new thread.  Required because of the way
292      * SurfaceTexture.OnFrameAvailableListener works when the current thread has a Looper
293      * configured.
294      */
295     private static class VideoEditWrapper implements Runnable {
296         private Throwable mThrowable;
297         private DecodeEditEncodeTest mTest;
298 
VideoEditWrapper(DecodeEditEncodeTest test)299         private VideoEditWrapper(DecodeEditEncodeTest test) {
300             mTest = test;
301         }
302 
303         @Override
run()304         public void run() {
305             try {
306                 mTest.videoEditTest();
307             } catch (Throwable th) {
308                 mThrowable = th;
309             }
310         }
311 
312         /** Entry point. */
runTest(DecodeEditEncodeTest obj)313         public static void runTest(DecodeEditEncodeTest obj) throws Throwable {
314             VideoEditWrapper wrapper = new VideoEditWrapper(obj);
315             Thread th = new Thread(wrapper, "codec test");
316             th.start();
317             th.join();
318             if (wrapper.mThrowable != null) {
319                 throw wrapper.mThrowable;
320             }
321         }
322     }
323 
324     /**
325      * Tests editing of a video file with GL.
326      */
videoEditTest()327     private void videoEditTest()
328             throws IOException {
329         VideoChunks sourceChunks = new VideoChunks();
330 
331         generateVideoFile(sourceChunks);
332 
333         if (DEBUG_SAVE_FILE) {
334             // Save a copy to a file.  We call it ".mp4", but it's actually just an elementary
335             // stream, so not all video players will know what to do with it.
336             Context context = InstrumentationRegistry.getInstrumentation().getTargetContext();
337             String dirName = context.getFilesDir().getAbsolutePath();
338             String fileName = "vedit1_" + mWidth + "x" + mHeight + ".mp4";
339             sourceChunks.saveToFile(new File(dirName, fileName));
340         }
341 
342         VideoChunks destChunks = editVideoFile(sourceChunks);
343 
344         if (DEBUG_SAVE_FILE) {
345             Context context = InstrumentationRegistry.getInstrumentation().getTargetContext();
346             String dirName = context.getFilesDir().getAbsolutePath();
347             String fileName = "vedit2_" + mWidth + "x" + mHeight + ".mp4";
348             destChunks.saveToFile(new File(dirName, fileName));
349         }
350 
351         checkVideoFile(destChunks);
352     }
353 
354     /**
355      * Generates a test video file, saving it as VideoChunks.  We generate frames with GL to
356      * avoid having to deal with multiple YUV formats.
357      */
generateVideoFile(VideoChunks output)358     private void generateVideoFile(VideoChunks output)
359             throws IOException {
360         if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight);
361         MediaCodec encoder = null;
362         InputSurface inputSurface = null;
363 
364         try {
365             // We avoid the device-specific limitations on width and height by using values that
366             // are multiples of 16, which all tested devices seem to be able to handle.
367             MediaFormat format = MediaFormat.createVideoFormat(mMediaType, mWidth, mHeight);
368 
369             // Set some properties.  Failing to specify some of these can cause the MediaCodec
370             // configure() call to throw an unhelpful exception.
371             format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
372                     MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
373             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
374             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
375             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
376             if (mUseHighBitDepth) {
377                 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_FULL);
378                 format.setInteger(MediaFormat.KEY_COLOR_STANDARD,
379                         MediaFormat.COLOR_STANDARD_BT2020);
380                 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
381                         MediaFormat.COLOR_TRANSFER_ST2084);
382             } else {
383                 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED);
384                 format.setInteger(MediaFormat.KEY_COLOR_STANDARD,
385                         MediaFormat.COLOR_STANDARD_BT601_PAL);
386                 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
387                         MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
388             }
389             if (VERBOSE) Log.d(TAG, "format: " + format);
390             output.setMediaFormat(format);
391 
392             // Create a MediaCodec for the desired codec, then configure it as an encoder with
393             // our desired properties.
394             encoder = MediaCodec.createByCodecName(mEncoderName);
395             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
396             inputSurface = new InputSurface(encoder.createInputSurface(), mUseHighBitDepth);
397             inputSurface.makeCurrent();
398             encoder.start();
399 
400             generateVideoData(encoder, inputSurface, output);
401         } finally {
402             if (encoder != null) {
403                 if (VERBOSE) Log.d(TAG, "releasing encoder");
404                 encoder.stop();
405                 encoder.release();
406                 if (VERBOSE) Log.d(TAG, "released encoder");
407             }
408             if (inputSurface != null) {
409                 inputSurface.release();
410             }
411         }
412     }
413 
414     /**
415      * Generates video frames, feeds them into the encoder, and writes the output to the
416      * VideoChunks instance.
417      */
generateVideoData(MediaCodec encoder, InputSurface inputSurface, VideoChunks output)418     private void generateVideoData(MediaCodec encoder, InputSurface inputSurface,
419             VideoChunks output) {
420         final int TIMEOUT_USEC = 10000;
421         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
422         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
423         int generateIndex = 0;
424         int outputCount = 0;
425 
426         // Loop until the output side is done.
427         boolean inputDone = false;
428         boolean outputDone = false;
429         while (!outputDone) {
430             if (VERBOSE) Log.d(TAG, "gen loop");
431 
432             // If we're not done submitting frames, generate a new one and submit it.  The
433             // eglSwapBuffers call will block if the input is full.
434             if (!inputDone) {
435                 if (generateIndex == NUM_FRAMES) {
436                     // Send an empty frame with the end-of-stream flag set.
437                     if (VERBOSE) Log.d(TAG, "signaling input EOS");
438                     if (WORK_AROUND_BUGS) {
439                         // Might drop a frame, but at least we won't crash mediaserver.
440                         try { Thread.sleep(500); } catch (InterruptedException ie) {}
441                         outputDone = true;
442                     } else {
443                         encoder.signalEndOfInputStream();
444                     }
445                     inputDone = true;
446                 } else {
447                     generateSurfaceFrame(generateIndex);
448                     inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
449                     if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
450                     inputSurface.swapBuffers();
451                 }
452                 generateIndex++;
453             }
454 
455             // Check for output from the encoder.  If there's no output yet, we either need to
456             // provide more input, or we need to wait for the encoder to work its magic.  We
457             // can't actually tell which is the case, so if we can't get an output buffer right
458             // away we loop around and see if it wants more input.
459             //
460             // If we do find output, drain it all before supplying more input.
461             while (true) {
462                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
463                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
464                     // no output available yet
465                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
466                     break;      // out of while
467                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
468                     // not expected for an encoder
469                     encoderOutputBuffers = encoder.getOutputBuffers();
470                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
471                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
472                     // expected on API 18+
473                     MediaFormat newFormat = encoder.getOutputFormat();
474                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
475                 } else if (encoderStatus < 0) {
476                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
477                 } else { // encoderStatus >= 0
478                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
479                     if (encodedData == null) {
480                         fail("encoderOutputBuffer " + encoderStatus + " was null");
481                     }
482 
483                     if (info.size != 0) {
484                         // Adjust the ByteBuffer values to match BufferInfo.
485                         encodedData.position(info.offset);
486                         encodedData.limit(info.offset + info.size);
487 
488                         output.addChunk(encodedData, info.flags, info.presentationTimeUs);
489                         if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
490                             outputCount++;
491                         }
492                     }
493 
494                     encoder.releaseOutputBuffer(encoderStatus, false);
495                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
496                         outputDone = true;
497                         break;      // out of while
498                     }
499                 }
500             }
501         }
502 
503         assertEquals("Frame count", NUM_FRAMES, outputCount);
504     }
505 
506     /**
507      * Generates a frame of data using GL commands.
508      * <p>
509      * We have an 8-frame animation sequence that wraps around.  It looks like this:
510      * <pre>
511      *   0 1 2 3
512      *   7 6 5 4
513      * </pre>
514      * We draw one of the eight rectangles and leave the rest set to the zero-fill color.     */
generateSurfaceFrame(int frameIndex)515     private void generateSurfaceFrame(int frameIndex) {
516         frameIndex %= 8;
517 
518         int startX, startY;
519         if (frameIndex < 4) {
520             // (0,0) is bottom-left in GL
521             startX = frameIndex * (mWidth / 4);
522             startY = mHeight / 2;
523         } else {
524             startX = (7 - frameIndex) * (mWidth / 4);
525             startY = 0;
526         }
527 
528         GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
529         GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
530         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
531         GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
532         GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
533         GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
534         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
535     }
536 
537     /**
538      * Edits a video file, saving the contents to a new file.  This involves decoding and
539      * re-encoding, not to mention conversions between YUV and RGB, and so may be lossy.
540      * <p>
541      * If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
542      * output, but it's not practical to support all OEM formats.  By using a SurfaceTexture
543      * for output and a Surface for input, we can avoid issues with obscure formats and can
544      * use a fragment shader to do transformations.
545      */
editVideoFile(VideoChunks inputData)546     private VideoChunks editVideoFile(VideoChunks inputData)
547             throws IOException {
548         if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
549         VideoChunks outputData = new VideoChunks();
550         MediaCodec decoder = null;
551         MediaCodec encoder = null;
552         InputSurface inputSurface = null;
553         OutputSurface outputSurface = null;
554 
555         try {
556             MediaFormat inputFormat = inputData.getMediaFormat();
557 
558             // Create an encoder format that matches the input format.  (Might be able to just
559             // re-use the format used to generate the video, since we want it to be the same.)
560             MediaFormat outputFormat = MediaFormat.createVideoFormat(mMediaType, mWidth, mHeight);
561             outputFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,
562                     MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
563             outputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
564                     inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
565             outputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
566                     inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
567             outputFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
568                     inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));
569             if (mUseHighBitDepth) {
570                 outputFormat.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_FULL);
571                 outputFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD,
572                         MediaFormat.COLOR_STANDARD_BT2020);
573                 outputFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
574                         MediaFormat.COLOR_TRANSFER_ST2084);
575             } else {
576                 outputFormat.setInteger(MediaFormat.KEY_COLOR_RANGE,
577                         MediaFormat.COLOR_RANGE_LIMITED);
578                 outputFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD,
579                         MediaFormat.COLOR_STANDARD_BT601_PAL);
580                 outputFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER,
581                         MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
582             }
583             outputData.setMediaFormat(outputFormat);
584 
585             encoder = MediaCodec.createByCodecName(mEncoderName);
586             encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
587             inputSurface = new InputSurface(encoder.createInputSurface(), mUseHighBitDepth);
588             inputSurface.makeCurrent();
589             encoder.start();
590 
591             // OutputSurface uses the EGL context created by InputSurface.
592             decoder = MediaCodec.createByCodecName(mDecoderName);
593             outputSurface = new OutputSurface();
594             outputSurface.changeFragmentShader(FRAGMENT_SHADER);
595             // do not allow frame drops
596             inputFormat.setInteger(KEY_ALLOW_FRAME_DROP, 0);
597 
598             decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
599             decoder.start();
600 
601             // verify that we are not dropping frames
602             inputFormat = decoder.getInputFormat();
603             assertEquals("Could not prevent frame dropping",
604                          0, inputFormat.getInteger(KEY_ALLOW_FRAME_DROP));
605 
606             editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
607         } finally {
608             if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder");
609             if (outputSurface != null) {
610                 outputSurface.release();
611             }
612             if (inputSurface != null) {
613                 inputSurface.release();
614             }
615             if (encoder != null) {
616                 encoder.stop();
617                 encoder.release();
618             }
619             if (decoder != null) {
620                 decoder.stop();
621                 decoder.release();
622             }
623         }
624 
625         return outputData;
626     }
627 
628     /**
629      * Edits a stream of video data.
630      */
editVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder, VideoChunks outputData)631     private void editVideoData(VideoChunks inputData, MediaCodec decoder,
632             OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder,
633             VideoChunks outputData) {
634         final int TIMEOUT_USEC = 10000;
635         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
636         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
637         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
638         int inputChunk = 0;
639         int outputCount = 0;
640 
641         boolean outputDone = false;
642         boolean inputDone = false;
643         boolean decoderDone = false;
644         while (!outputDone) {
645             if (VERBOSE) Log.d(TAG, "edit loop");
646 
647             // Feed more data to the decoder.
648             if (!inputDone) {
649                 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
650                 if (inputBufIndex >= 0) {
651                     if (inputChunk == inputData.getNumChunks()) {
652                         // End of stream -- send empty frame with EOS flag set.
653                         decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
654                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
655                         inputDone = true;
656                         if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
657                     } else {
658                         // Copy a chunk of input to the decoder.  The first chunk should have
659                         // the BUFFER_FLAG_CODEC_CONFIG flag set.
660                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
661                         inputBuf.clear();
662                         inputData.getChunkData(inputChunk, inputBuf);
663                         int flags = inputData.getChunkFlags(inputChunk);
664                         long time = inputData.getChunkTime(inputChunk);
665                         decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
666                                 time, flags);
667                         if (VERBOSE) {
668                             Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
669                                     inputBuf.position() + " flags=" + flags);
670                         }
671                         inputChunk++;
672                     }
673                 } else {
674                     if (VERBOSE) Log.d(TAG, "input buffer not available");
675                 }
676             }
677 
678             // Assume output is available.  Loop until both assumptions are false.
679             boolean decoderOutputAvailable = !decoderDone;
680             boolean encoderOutputAvailable = true;
681             while (decoderOutputAvailable || encoderOutputAvailable) {
682                 // Start by draining any pending output from the encoder.  It's important to
683                 // do this before we try to stuff any more data in.
684                 int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
685                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
686                     // no output available yet
687                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
688                     encoderOutputAvailable = false;
689                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
690                     encoderOutputBuffers = encoder.getOutputBuffers();
691                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
692                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
693                     MediaFormat newFormat = encoder.getOutputFormat();
694                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
695                 } else if (encoderStatus < 0) {
696                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
697                 } else { // encoderStatus >= 0
698                     ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
699                     if (encodedData == null) {
700                         fail("encoderOutputBuffer " + encoderStatus + " was null");
701                     }
702 
703                     // Write the data to the output "file".
704                     if (info.size != 0) {
705                         encodedData.position(info.offset);
706                         encodedData.limit(info.offset + info.size);
707 
708                         outputData.addChunk(encodedData, info.flags, info.presentationTimeUs);
709                         outputCount++;
710 
711                         if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes");
712                     }
713                     outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
714                     encoder.releaseOutputBuffer(encoderStatus, false);
715                 }
716                 if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
717                     // Continue attempts to drain output.
718                     continue;
719                 }
720 
721                 // Encoder is drained, check to see if we've got a new frame of output from
722                 // the decoder.  (The output is going to a Surface, rather than a ByteBuffer,
723                 // but we still get information through BufferInfo.)
724                 if (!decoderDone) {
725                     int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
726                     if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
727                         // no output available yet
728                         if (VERBOSE) Log.d(TAG, "no output from decoder available");
729                         decoderOutputAvailable = false;
730                     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
731                         //decoderOutputBuffers = decoder.getOutputBuffers();
732                         if (VERBOSE) Log.d(TAG, "decoder output buffers changed (we don't care)");
733                     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
734                         // expected before first buffer of data
735                         MediaFormat newFormat = decoder.getOutputFormat();
736                         if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
737                     } else if (decoderStatus < 0) {
738                         fail("unexpected result from decoder.dequeueOutputBuffer: "+decoderStatus);
739                     } else { // decoderStatus >= 0
740                         if (VERBOSE) Log.d(TAG, "surface decoder given buffer "
741                                 + decoderStatus + " (size=" + info.size + ")");
742                         // The ByteBuffers are null references, but we still get a nonzero
743                         // size for the decoded data.
744                         boolean doRender = (info.size != 0);
745 
746                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
747                         // to SurfaceTexture to convert to a texture.  The API doesn't
748                         // guarantee that the texture will be available before the call
749                         // returns, so we need to wait for the onFrameAvailable callback to
750                         // fire.  If we don't wait, we risk rendering from the previous frame.
751                         decoder.releaseOutputBuffer(decoderStatus, doRender);
752                         if (doRender) {
753                             // This waits for the image and renders it after it arrives.
754                             if (VERBOSE) Log.d(TAG, "awaiting frame");
755                             outputSurface.awaitNewImage();
756                             outputSurface.drawImage();
757 
758                             // Send it to the encoder.
759                             inputSurface.setPresentationTime(info.presentationTimeUs * 1000);
760                             if (VERBOSE) Log.d(TAG, "swapBuffers");
761                             inputSurface.swapBuffers();
762                         }
763                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
764                             // forward decoder EOS to encoder
765                             if (VERBOSE) Log.d(TAG, "signaling input EOS");
766                             if (WORK_AROUND_BUGS) {
767                                 // Bail early, possibly dropping a frame.
768                                 return;
769                             } else {
770                                 encoder.signalEndOfInputStream();
771                             }
772                         }
773                     }
774                 }
775             }
776         }
777 
778         if (inputChunk != outputCount) {
779             throw new RuntimeException("frame lost: " + inputChunk + " in, " +
780                     outputCount + " out");
781         }
782     }
783 
784     /**
785      * Checks the video file to see if the contents match our expectations.  We decode the
786      * video to a Surface and check the pixels with GL.
787      */
checkVideoFile(VideoChunks inputData)788     private void checkVideoFile(VideoChunks inputData)
789             throws IOException {
790         OutputSurface surface = null;
791         MediaCodec decoder = null;
792 
793         mLargestColorDelta = -1;
794 
795         if (VERBOSE) Log.d(TAG, "checkVideoFile");
796 
797         try {
798             surface = new OutputSurface(mWidth, mHeight, mUseHighBitDepth);
799 
800             MediaFormat format = inputData.getMediaFormat();
801             decoder = MediaCodec.createByCodecName(mDecoderName);
802             format.setInteger(KEY_ALLOW_FRAME_DROP, 0);
803             decoder.configure(format, surface.getSurface(), null, 0);
804             decoder.start();
805 
806             int badFrames = checkVideoData(inputData, decoder, surface);
807             if (badFrames != 0) {
808                 fail("Found " + badFrames + " bad frames");
809             }
810         } finally {
811             if (surface != null) {
812                 surface.release();
813             }
814             if (decoder != null) {
815                 decoder.stop();
816                 decoder.release();
817             }
818 
819             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
820         }
821     }
822 
823     /**
824      * Checks the video data.
825      *
826      * @return the number of bad frames
827      */
checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface)828     private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) {
829         final int TIMEOUT_USEC = 1000;
830         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
831         ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers();
832         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
833         int inputChunk = 0;
834         int checkIndex = 0;
835         int badFrames = 0;
836 
837         boolean outputDone = false;
838         boolean inputDone = false;
839         while (!outputDone) {
840             if (VERBOSE) Log.d(TAG, "check loop");
841 
842             // Feed more data to the decoder.
843             if (!inputDone) {
844                 int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
845                 if (inputBufIndex >= 0) {
846                     if (inputChunk == inputData.getNumChunks()) {
847                         // End of stream -- send empty frame with EOS flag set.
848                         decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L,
849                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
850                         inputDone = true;
851                         if (VERBOSE) Log.d(TAG, "sent input EOS");
852                     } else {
853                         // Copy a chunk of input to the decoder.  The first chunk should have
854                         // the BUFFER_FLAG_CODEC_CONFIG flag set.
855                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
856                         inputBuf.clear();
857                         inputData.getChunkData(inputChunk, inputBuf);
858                         int flags = inputData.getChunkFlags(inputChunk);
859                         long time = inputData.getChunkTime(inputChunk);
860                         decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
861                                 time, flags);
862                         if (VERBOSE) {
863                             Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
864                                     inputBuf.position() + " flags=" + flags);
865                         }
866                         inputChunk++;
867                     }
868                 } else {
869                     if (VERBOSE) Log.d(TAG, "input buffer not available");
870                 }
871             }
872 
873             if (!outputDone) {
874                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
875                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
876                     // no output available yet
877                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
878                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
879                     decoderOutputBuffers = decoder.getOutputBuffers();
880                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
881                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
882                     MediaFormat newFormat = decoder.getOutputFormat();
883                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat);
884                 } else if (decoderStatus < 0) {
885                     fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
886                 } else { // decoderStatus >= 0
887                     ByteBuffer decodedData = decoderOutputBuffers[decoderStatus];
888 
889                     if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
890                             " (size=" + info.size + ")");
891                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
892                         if (VERBOSE) Log.d(TAG, "output EOS");
893                         outputDone = true;
894                     }
895 
896                     boolean doRender = (info.size != 0);
897 
898                     // As soon as we call releaseOutputBuffer, the buffer will be forwarded
899                     // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
900                     // that the texture will be available before the call returns, so we
901                     // need to wait for the onFrameAvailable callback to fire.
902                     decoder.releaseOutputBuffer(decoderStatus, doRender);
903                     if (doRender) {
904                         if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
905                         assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
906                                 info.presentationTimeUs);
907                         surface.awaitNewImage();
908                         surface.drawImage();
909                         if (!checkSurfaceFrame(checkIndex)) {
910                             badFrames++;
911                         }
912                         checkIndex++;
913                     }
914                 }
915             }
916         }
917 
918         return badFrames;
919     }
920 
921     /**
922      * Checks the frame for correctness, using GL to check RGB values.
923      *
924      * @return true if the frame looks good
925      */
checkSurfaceFrame(int frameIndex)926     private boolean checkSurfaceFrame(int frameIndex) {
927         ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this
928         boolean frameFailed = false;
929         // Choose the appropriate initial/regular tolerance
930         int maxDelta = frameIndex < INITIAL_TOLERANCE_FRAME_LIMIT ? INITIAL_TOLERANCE : TOLERANCE;
931         for (int i = 0; i < 8; i++) {
932             // Note the coordinates are inverted on the Y-axis in GL.
933             int x, y;
934             if (i < 4) {
935                 x = i * (mWidth / 4) + (mWidth / 8);
936                 y = (mHeight * 3) / 4;
937             } else {
938                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
939                 y = mHeight / 4;
940             }
941 
942             int r, g, b;
943             if (mUseHighBitDepth) {
944                 GLES30.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA,
945                         GLES30.GL_UNSIGNED_INT_2_10_10_10_REV, pixelBuf);
946                 r = (pixelBuf.get(1) & 0x03) << 8 | (pixelBuf.get(0) & 0xFF);
947                 g = (pixelBuf.get(2) & 0x0F) << 6 | ((pixelBuf.get(1) >> 2) & 0x3F);
948                 b = (pixelBuf.get(3) & 0x3F) << 4 | ((pixelBuf.get(2) >> 4) & 0x0F);
949                 // Convert the values to 8 bit (using rounding division by 4) as comparisons
950                 // later are with 8 bit RGB values
951                 r = (r + 2) >> 2;
952                 g = (g + 2) >> 2;
953                 b = (b + 2) >> 2;
954             } else {
955                 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixelBuf);
956                 r = pixelBuf.get(0) & 0xFF;
957                 g = pixelBuf.get(1) & 0xFF;
958                 b = pixelBuf.get(2) & 0xFF;
959             }
960             //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
961 
962             int expR, expG, expB;
963             if (i == frameIndex % 8) {
964                 // colored rect (green/blue swapped)
965                 expR = TEST_R1;
966                 expG = TEST_B1;
967                 expB = TEST_G1;
968             } else {
969                 // zero background color (green/blue swapped)
970                 expR = TEST_R0;
971                 expG = TEST_B0;
972                 expB = TEST_G0;
973             }
974             if (!isColorClose(r, expR, maxDelta) ||
975                     !isColorClose(g, expG, maxDelta) ||
976                     !isColorClose(b, expB, maxDelta)) {
977                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": rgb=" + r +
978                         "," + g + "," + b + " vs. expected " + expR + "," + expG +
979                         "," + expB + ") for allowed error of " + maxDelta);
980                 frameFailed = true;
981             }
982         }
983 
984         return !frameFailed;
985     }
986 
987     /**
988      * Returns true if the actual color value is close to the expected color value.  Updates
989      * mLargestColorDelta.
990      */
991     boolean isColorClose(int actual, int expected, int maxDelta) {
992         int delta = Math.abs(actual - expected);
993         if (delta > mLargestColorDelta) {
994             mLargestColorDelta = delta;
995         }
996         return (delta <= maxDelta);
997     }
998 
999     /**
1000      * Generates the presentation time for frame N, in microseconds.
1001      */
1002     private static long computePresentationTime(int frameIndex) {
1003         return 123 + frameIndex * 1000000 / FRAME_RATE;
1004     }
1005 
1006 
1007     /**
1008      * The elementary stream coming out of the encoder needs to be fed back into
1009      * the decoder one chunk at a time.  If we just wrote the data to a file, we would lose
1010      * the information about chunk boundaries.  This class stores the encoded data in memory,
1011      * retaining the chunk organization.
1012      */
1013     private static class VideoChunks {
1014         private MediaFormat mMediaFormat;
1015         private ArrayList<byte[]> mChunks = new ArrayList<byte[]>();
1016         private ArrayList<Integer> mFlags = new ArrayList<Integer>();
1017         private ArrayList<Long> mTimes = new ArrayList<Long>();
1018 
1019         /**
1020          * Sets the MediaFormat, for the benefit of a future decoder.
1021          */
1022         public void setMediaFormat(MediaFormat format) {
1023             mMediaFormat = format;
1024         }
1025 
1026         /**
1027          * Gets the MediaFormat that was used by the encoder.
1028          */
1029         public MediaFormat getMediaFormat() {
1030             return new MediaFormat(mMediaFormat);
1031         }
1032 
1033         /**
1034          * Adds a new chunk.  Advances buf.position to buf.limit.
1035          */
1036         public void addChunk(ByteBuffer buf, int flags, long time) {
1037             byte[] data = new byte[buf.remaining()];
1038             buf.get(data);
1039             mChunks.add(data);
1040             mFlags.add(flags);
1041             mTimes.add(time);
1042         }
1043 
1044         /**
1045          * Returns the number of chunks currently held.
1046          */
1047         public int getNumChunks() {
1048             return mChunks.size();
1049         }
1050 
1051         /**
1052          * Copies the data from chunk N into "dest".  Advances dest.position.
1053          */
1054         public void getChunkData(int chunk, ByteBuffer dest) {
1055             byte[] data = mChunks.get(chunk);
1056             dest.put(data);
1057         }
1058 
1059         /**
1060          * Returns the flags associated with chunk N.
1061          */
1062         public int getChunkFlags(int chunk) {
1063             return mFlags.get(chunk);
1064         }
1065 
1066         /**
1067          * Returns the timestamp associated with chunk N.
1068          */
1069         public long getChunkTime(int chunk) {
1070             return mTimes.get(chunk);
1071         }
1072 
1073         /**
1074          * Writes the chunks to a file as a contiguous stream.  Useful for debugging.
1075          */
1076         public void saveToFile(File file) {
1077             Log.d(TAG, "saving chunk data to file " + file);
1078             FileOutputStream fos = null;
1079             BufferedOutputStream bos = null;
1080 
1081             try {
1082                 fos = new FileOutputStream(file);
1083                 bos = new BufferedOutputStream(fos);
1084                 fos = null;     // closing bos will also close fos
1085 
1086                 int numChunks = getNumChunks();
1087                 for (int i = 0; i < numChunks; i++) {
1088                     byte[] chunk = mChunks.get(i);
1089                     bos.write(chunk);
1090                 }
1091             } catch (IOException ioe) {
1092                 throw new RuntimeException(ioe);
1093             } finally {
1094                 try {
1095                     if (bos != null) {
1096                         bos.close();
1097                     }
1098                     if (fos != null) {
1099                         fos.close();
1100                     }
1101                 } catch (IOException ioe) {
1102                     throw new RuntimeException(ioe);
1103                 }
1104             }
1105         }
1106     }
1107 }
1108