• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.codec.cts;
18 
19 import android.media.MediaCodec;
20 import android.media.MediaCodec.CodecException;
21 import android.media.MediaCodecInfo;
22 import android.media.MediaCodecInfo.CodecCapabilities;
23 import android.media.MediaCodecList;
24 import android.media.MediaFormat;
25 import android.media.cts.MediaCodecWrapper;
26 import android.media.cts.NdkMediaCodec;
27 import android.media.cts.Preconditions;
28 import android.media.cts.SdkMediaCodec;
29 import android.os.Bundle;
30 import android.os.Environment;
31 import android.os.Handler;
32 import android.os.Looper;
33 import android.platform.test.annotations.AppModeFull;
34 import android.util.Log;
35 
36 import com.android.compatibility.common.util.MediaUtils;
37 
38 import static org.junit.Assert.assertTrue;
39 import static org.junit.Assume.assumeTrue;
40 
41 import java.io.File;
42 import java.io.FileInputStream;
43 import java.io.FileOutputStream;
44 import java.io.InputStream;
45 import java.nio.ByteBuffer;
46 import java.util.ArrayList;
47 import java.util.Locale;
48 import java.util.concurrent.Callable;
49 import java.util.concurrent.CountDownLatch;
50 
51 /**
52  * Verification test for video encoder and decoder.
53  *
54  * A raw yv12 stream is encoded at various settings and written to an IVF
55  * file. Encoded stream bitrate and key frame interval are checked against target values.
56  * The stream is later decoded by the decoder to verify frames are decodable and to
57  * calculate PSNR values for various bitrates.
58  */
59 @AppModeFull(reason = "Instant apps cannot access the SD card")
60 public class VideoCodecTestBase {
61 
62     protected static final String TAG = "VideoCodecTestBase";
63     protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8;
64     protected static final String VP9_MIME = MediaFormat.MIMETYPE_VIDEO_VP9;
65     protected static final String AVC_MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
66     protected static final String HEVC_MIME = MediaFormat.MIMETYPE_VIDEO_HEVC;
67     protected static final String SDCARD_DIR =
68             Environment.getExternalStorageDirectory().getAbsolutePath();
69     static final String mInpPrefix = WorkDir.getMediaDirString();
70 
71     // Default timeout for MediaCodec buffer dequeue - 200 ms.
72     protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000;
73     // Default timeout for MediaEncoderAsync - 30 sec.
74     protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000;
75     // Default sync frame interval in frames
76     private static final int SYNC_FRAME_INTERVAL = 30;
77     // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h
78     protected static final int VIDEO_ControlRateVariable = 1;
79     protected static final int VIDEO_ControlRateConstant = 2;
80     // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
81     // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
82     private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
83     // Allowable color formats supported by codec - in order of preference.
84     private static final int[] mSupportedColorList = {
85             CodecCapabilities.COLOR_FormatYUV420Planar,
86             CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
87             CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
88             COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
89     };
90     // Scaled image cache list - contains scale factors, for which up-scaled frames
91     // were calculated and were written to yuv file.
92     ArrayList<Integer> mScaledImages = new ArrayList<Integer>();
93 
94     /**
95      *  Video codec properties generated by getVideoCodecProperties() function.
96      */
97     private class CodecProperties {
CodecProperties(String codecName, int colorFormat)98         CodecProperties(String codecName, int colorFormat) {
99             this.codecName = codecName;
100             this.colorFormat = colorFormat;
101         }
102         public final String codecName; // OpenMax component name for Video codec.
103         public final int colorFormat;  // Color format supported by codec.
104     }
105 
106     /**
107      * Function to find Video codec.
108      *
109      * Iterates through the list of available codecs and tries to find
110      * Video codec, which can support either YUV420 planar or NV12 color formats.
111      *
112      * @param isEncoder     Flag if encoder is requested.
113      */
getVideoCodecProperties(boolean isEncoder, MediaFormat format)114     private CodecProperties getVideoCodecProperties(boolean isEncoder, MediaFormat format)
115             throws Exception {
116         CodecProperties codecProperties = null;
117         String mime = format.getString(MediaFormat.KEY_MIME);
118 
119         // Loop through the list of codec components in case platform specific codec
120         // is requested.
121         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
122         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
123             if (isEncoder != codecInfo.isEncoder()) {
124                 continue;
125             }
126             Log.v(TAG, codecInfo.getName());
127 
128             for (String type : codecInfo.getSupportedTypes()) {
129                 if (!type.equalsIgnoreCase(mime)) {
130                     continue;
131                 }
132                 CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type);
133                 if (!capabilities.isFormatSupported(format)) {
134                     continue;
135                 }
136 
137                 // Get candidate codec properties.
138                 Log.v(TAG, "Found candidate codec " + codecInfo.getName());
139                 for (int colorFormat: capabilities.colorFormats) {
140                     Log.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
141                 }
142 
143                 // Check supported color formats.
144                 for (int supportedColorFormat : mSupportedColorList) {
145                     for (int codecColorFormat : capabilities.colorFormats) {
146                         if (codecColorFormat == supportedColorFormat) {
147                             codecProperties = new CodecProperties(codecInfo.getName(),
148                                     codecColorFormat);
149                             Log.v(TAG, "Found target codec " + codecProperties.codecName +
150                                     ". Color: 0x" + Integer.toHexString(codecColorFormat));
151                             // return first vendor codec (hopefully HW) found
152                             if (codecInfo.isVendor()) {
153                                 return codecProperties;
154                             }
155                         }
156                     }
157                 }
158             }
159         }
160         if (codecProperties == null) {
161             Log.i(TAG, "no suitable " + (isEncoder ? "encoder " : "decoder ") + "found for " +
162                     format);
163         }
164         return codecProperties;
165     }
166 
getEncoderProperties(String codecName, MediaFormat format)167     private CodecProperties getEncoderProperties(String codecName, MediaFormat format)
168             throws Exception {
169         assumeTrue("Media format " + format + " is not supported by " + codecName,
170                 MediaUtils.supports(codecName, format));
171         CodecProperties codecProperties = null;
172         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
173         for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
174             if (!codecInfo.isEncoder() || !codecName.equals(codecInfo.getName())) {
175                 continue;
176             }
177             Log.v(TAG, codecInfo.getName());
178             String mime = format.getString(MediaFormat.KEY_MIME);
179             Log.d(TAG, "Name : " + codecInfo.getName() + " mime: " + mime);
180             CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mime);
181             for (int supportedColorFormat : mSupportedColorList) {
182                 for (int codecColorFormat : capabilities.colorFormats) {
183                     if (codecColorFormat == supportedColorFormat) {
184                         codecProperties = new CodecProperties(codecInfo.getName(),
185                                 codecColorFormat);
186                         Log.v(TAG, "Found target codec " + codecProperties.codecName +
187                                 ". Color: 0x" + Integer.toHexString(codecColorFormat));
188                         return codecProperties;
189                     }
190                 }
191             }
192         }
193         assumeTrue("Codec " + codecName + " doesn't support color YUV 420 color formats",
194                 codecProperties != null);
195         return codecProperties;
196     }
197 
198     /**
199      * Parameters for encoded video stream.
200      */
201     protected class EncoderOutputStreamParameters {
202         // Name of raw YUV420 input file. When the value of this parameter
203         // is set to null input file descriptor from inputResource parameter
204         // is used instead.
205         public String inputYuvFilename;
206         // Name of scaled YUV420 input file.
207         public String scaledYuvFilename;
208         // File descriptor for the raw input file (YUV420). Used only if
209         // inputYuvFilename parameter is null.
210         public String inputResource;
211         // Name of the IVF file to write encoded bitsream
212         public String outputIvfFilename;
213         // Mime Type of the Encoded content.
214         public String codecMimeType;
215         // Component Name.
216         public String codecName;
217         // Number of frames to encode.
218         int frameCount;
219         // Frame rate of input file in frames per second.
220         int frameRate;
221         // Encoded frame width.
222         public int frameWidth;
223         // Encoded frame height.
224         public int frameHeight;
225         // Encoding bitrate array in bits/second for every frame. If array length
226         // is shorter than the total number of frames, the last value is re-used for
227         // all remaining frames. For constant bitrate encoding single element
228         // array can be used with first element set to target bitrate value.
229         public int[] bitrateSet;
230         // Encoding bitrate type - VBR or CBR
231         public int bitrateType;
232         // Number of temporal layers
233         public int temporalLayers;
234         // Desired key frame interval - codec is asked to generate key frames
235         // at a period defined by this parameter.
236         public int syncFrameInterval;
237         // Optional parameter - forced key frame interval. Used to
238         // explicitly request the codec to generate key frames using
239         // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter.
240         public int syncForceFrameInterval;
241         // Buffer timeout
242         long timeoutDequeue;
243         // Flag if encoder should run in Looper thread.
244         boolean runInLooperThread;
245         // Flag if use NdkMediaCodec
246         boolean useNdk;
247         // Encoding Statistics Level
248         // 0: None, 1: Average block QP and picture type of a frame
249         public int encodingStatisticsLevel;
250     }
251 
252     /**
253      * Encoding Statistics for a whole sequence
254      */
255     protected class EncodingStatisticsInfo {
256         public float averageSeqQp = 0; // Average qp of a whole sequence,
257                                          // i.e. average of 'per-frame average block QP'
258         public int encodedFrames = 0; // # of encoded frames,
259                                        // i.e. # of average_block_qp is reported
260     }
261 
262     /**
263      * Encoding Statistics for a whole sequence
264      */
265     protected class VideoEncodeOutput{
266         public ArrayList<MediaCodec.BufferInfo> bufferInfo;
267         public EncodingStatisticsInfo encStat;
268 
VideoEncodeOutput( ArrayList<MediaCodec.BufferInfo> bufferInfo, EncodingStatisticsInfo encStat)269         VideoEncodeOutput(
270                 ArrayList<MediaCodec.BufferInfo> bufferInfo,
271                 EncodingStatisticsInfo encStat) {
272             this.bufferInfo = bufferInfo;
273             this.encStat = encStat;
274         }
275     }
276 
getCodecSuffix(String codecMimeType)277     private String getCodecSuffix(String codecMimeType) {
278         switch(codecMimeType) {
279         case VP8_MIME:
280             return "vp8";
281         case VP9_MIME:
282             return "vp9";
283         case AVC_MIME:
284             return "avc";
285         case HEVC_MIME:
286             return "hevc";
287         default:
288             Log.w(TAG, "getCodecSuffix got an unexpected codecMimeType.");
289         }
290         return "video";
291     }
292 
293     /**
294      * Generates an array of default parameters for encoder output stream based on
295      * upscaling value.
296      */
getDefaultEncodingParameterList( String inputYuvName, String outputIvfBaseName, String codecName, String codecMimeType, int encodeSeconds, int[] resolutionScales, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int[] bitrates, boolean syncEncoding)297     protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList(
298             String inputYuvName,
299             String outputIvfBaseName,
300             String codecName,
301             String codecMimeType,
302             int encodeSeconds,
303             int[] resolutionScales,
304             int frameWidth,
305             int frameHeight,
306             int frameRate,
307             int bitrateMode,
308             int[] bitrates,
309             boolean syncEncoding) {
310         assertTrue(resolutionScales.length == bitrates.length);
311         int numCodecs = resolutionScales.length;
312         ArrayList<EncoderOutputStreamParameters> outputParameters =
313                 new ArrayList<EncoderOutputStreamParameters>(numCodecs);
314         for (int i = 0; i < numCodecs; i++) {
315             EncoderOutputStreamParameters params = new EncoderOutputStreamParameters();
316             if (inputYuvName != null) {
317                 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName;
318             } else {
319                 params.inputYuvFilename = null;
320             }
321             params.scaledYuvFilename = SDCARD_DIR + File.separator +
322                     outputIvfBaseName + resolutionScales[i]+ ".yuv";
323             params.inputResource = "football_qvga.yuv";
324             params.codecMimeType = codecMimeType;
325             String codecSuffix = getCodecSuffix(codecMimeType);
326             params.outputIvfFilename = SDCARD_DIR + File.separator +
327                     outputIvfBaseName + resolutionScales[i] + "_" + codecSuffix + ".ivf";
328             params.codecName = codecName;
329             params.frameCount = encodeSeconds * frameRate;
330             params.frameRate = frameRate;
331             params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280);
332             params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720);
333             params.bitrateSet = new int[1];
334             params.bitrateSet[0] = bitrates[i];
335             params.bitrateType = bitrateMode;
336             params.temporalLayers = 0;
337             params.syncFrameInterval = SYNC_FRAME_INTERVAL;
338             params.syncForceFrameInterval = 0;
339             if (syncEncoding) {
340                 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US;
341                 params.runInLooperThread = false;
342             } else {
343                 params.timeoutDequeue = 0;
344                 params.runInLooperThread = true;
345             }
346             outputParameters.add(params);
347             params.encodingStatisticsLevel = MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_NONE;
348         }
349         return outputParameters;
350     }
351 
getDefaultEncodingParameters( String inputYuvName, String outputIvfBaseName, String codecName, String codecMimeType, int encodeSeconds, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int bitrate, boolean syncEncoding)352     protected EncoderOutputStreamParameters getDefaultEncodingParameters(
353             String inputYuvName,
354             String outputIvfBaseName,
355             String codecName,
356             String codecMimeType,
357             int encodeSeconds,
358             int frameWidth,
359             int frameHeight,
360             int frameRate,
361             int bitrateMode,
362             int bitrate,
363             boolean syncEncoding) {
364         int[] scaleValues = { 1 };
365         int[] bitrates = { bitrate };
366         return getDefaultEncodingParameterList(
367                 inputYuvName,
368                 outputIvfBaseName,
369                 codecName,
370                 codecMimeType,
371                 encodeSeconds,
372                 scaleValues,
373                 frameWidth,
374                 frameHeight,
375                 frameRate,
376                 bitrateMode,
377                 bitrates,
378                 syncEncoding).get(0);
379     }
380 
381     /**
382      * Converts (interleaves) YUV420 planar to NV12.
383      * Assumes packed, macroblock-aligned frame with no cropping
384      * (visible/coded row length == stride).
385      */
YUV420ToNV(int width, int height, byte[] yuv)386     private static byte[] YUV420ToNV(int width, int height, byte[] yuv) {
387         byte[] nv = new byte[yuv.length];
388         // Y plane we just copy.
389         System.arraycopy(yuv, 0, nv, 0, width * height);
390 
391         // U & V plane we interleave.
392         int u_offset = width * height;
393         int v_offset = u_offset + u_offset / 4;
394         int nv_offset = width * height;
395         for (int i = 0; i < width * height / 4; i++) {
396             nv[nv_offset++] = yuv[u_offset++];
397             nv[nv_offset++] = yuv[v_offset++];
398         }
399         return nv;
400     }
401 
402     /**
403      * Converts (de-interleaves) NV12 to YUV420 planar.
404      * Stride may be greater than width, slice height may be greater than height.
405      */
NV12ToYUV420(int width, int height, int stride, int sliceHeight, byte[] nv12)406     private static byte[] NV12ToYUV420(int width, int height,
407             int stride, int sliceHeight, byte[] nv12) {
408         byte[] yuv = new byte[width * height * 3 / 2];
409 
410         // Y plane we just copy.
411         for (int i = 0; i < height; i++) {
412             System.arraycopy(nv12, i * stride, yuv, i * width, width);
413         }
414 
415         // U & V plane - de-interleave.
416         int u_offset = width * height;
417         int v_offset = u_offset + u_offset / 4;
418         int nv_offset;
419         for (int i = 0; i < height / 2; i++) {
420             nv_offset = stride * (sliceHeight + i);
421             for (int j = 0; j < width / 2; j++) {
422                 yuv[u_offset++] = nv12[nv_offset++];
423                 yuv[v_offset++] = nv12[nv_offset++];
424             }
425         }
426         return yuv;
427     }
428 
429     /**
430      * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice
431      * height equal to the crop window.
432      */
PackYUV420(int left, int top, int width, int height, int stride, int sliceHeight, byte[] src)433     private static byte[] PackYUV420(int left, int top, int width, int height,
434             int stride, int sliceHeight, byte[] src) {
435         byte[] dst = new byte[width * height * 3 / 2];
436         // Y copy.
437         for (int i = 0; i < height; i++) {
438             System.arraycopy(src, (i + top) * stride + left, dst, i * width, width);
439         }
440         // U and V copy.
441         int u_src_offset = stride * sliceHeight;
442         int v_src_offset = u_src_offset + u_src_offset / 4;
443         int u_dst_offset = width * height;
444         int v_dst_offset = u_dst_offset + u_dst_offset / 4;
445         // Downsample and align to floor-2 for crop origin.
446         left /= 2;
447         top /= 2;
448         for (int i = 0; i < height / 2; i++) {
449             System.arraycopy(src, u_src_offset + (i + top) * (stride / 2) + left,
450                     dst, u_dst_offset + i * (width / 2), width / 2);
451             System.arraycopy(src, v_src_offset + (i + top) * (stride / 2) + left,
452                     dst, v_dst_offset + i * (width / 2), width / 2);
453         }
454         return dst;
455     }
456 
457 
imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, byte[] dst, int dstByteOffset, int dstWidth, int dstHeight)458     private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride,
459             byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) {
460         for (int i = 0; i < dstHeight/2 - 1; i++) {
461             int dstOffset0 = 2 * i * dstWidth + dstByteOffset;
462             int dstOffset1 = dstOffset0 + dstWidth;
463             int srcOffset0 = i * srcStride + srcByteOffset;
464             int srcOffset1 = srcOffset0 + srcStride;
465             int pixel00 = (int)src[srcOffset0++] & 0xff;
466             int pixel10 = (int)src[srcOffset1++] & 0xff;
467             for (int j = 0; j < dstWidth/2 - 1; j++) {
468                 int pixel01 = (int)src[srcOffset0++] & 0xff;
469                 int pixel11 = (int)src[srcOffset1++] & 0xff;
470                 dst[dstOffset0++] = (byte)pixel00;
471                 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
472                 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
473                 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4);
474                 pixel00 = pixel01;
475                 pixel10 = pixel11;
476             }
477             // last column
478             dst[dstOffset0++] = (byte)pixel00;
479             dst[dstOffset0++] = (byte)pixel00;
480             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
481             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
482         }
483 
484         // last row
485         int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset;
486         int dstOffset1 = dstOffset0 + dstWidth;
487         int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset;
488         int pixel00 = (int)src[srcOffset0++] & 0xff;
489         for (int j = 0; j < dstWidth/2 - 1; j++) {
490             int pixel01 = (int)src[srcOffset0++] & 0xff;
491             dst[dstOffset0++] = (byte)pixel00;
492             dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
493             dst[dstOffset1++] = (byte)pixel00;
494             dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2);
495             pixel00 = pixel01;
496         }
497         // the very last pixel - bottom right
498         dst[dstOffset0++] = (byte)pixel00;
499         dst[dstOffset0++] = (byte)pixel00;
500         dst[dstOffset1++] = (byte)pixel00;
501         dst[dstOffset1++] = (byte)pixel00;
502     }
503 
504     /**
505     * Up-scale image.
506     * Scale factor is defined by source and destination width ratio.
507     * Only 1:2 and 1:4 up-scaling is supported for now.
508     * For 640x480 -> 1280x720 conversion only top 640x360 part of the original
509     * image is scaled.
510     */
imageScale(byte[] src, int srcWidth, int srcHeight, int dstWidth, int dstHeight)511     private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight,
512             int dstWidth, int dstHeight) throws Exception {
513         int srcYSize = srcWidth * srcHeight;
514         int dstYSize = dstWidth * dstHeight;
515         byte[] dst = null;
516         if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) {
517             // 1:2 upscale
518             dst = new byte[dstWidth * dstHeight * 3 / 2];
519             imageUpscale1To2(src, 0, srcWidth,
520                     dst, 0, dstWidth, dstHeight);                                 // Y
521             imageUpscale1To2(src, srcYSize, srcWidth / 2,
522                     dst, dstYSize, dstWidth / 2, dstHeight / 2);                  // U
523             imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2,
524                     dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2);          // V
525         } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) {
526             // 1:4 upscale - in two steps
527             int midWidth = 2 * srcWidth;
528             int midHeight = 2 * srcHeight;
529             byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight);
530             dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight);
531 
532         } else {
533             throw new RuntimeException("Can not find proper scaling function");
534         }
535 
536         return dst;
537     }
538 
cacheScaledImage( String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight, String dstYuvFilename, int dstFrameWidth, int dstFrameHeight)539     private void cacheScaledImage(
540             String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight,
541             String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception {
542         InputStream srcStream = OpenFileOrResource(srcYuvFilename, srcResource);
543         FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false);
544         int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2;
545         byte[] srcFrame = new byte[srcFrameSize];
546         byte[] dstFrame = null;
547         Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename);
548         while (true) {
549             int bytesRead = srcStream.read(srcFrame);
550             if (bytesRead != srcFrame.length) {
551                 break;
552             }
553             if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) {
554                 dstFrame = srcFrame;
555             } else {
556                 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight,
557                         dstFrameWidth, dstFrameHeight);
558             }
559             dstFile.write(dstFrame);
560         }
561         srcStream.close();
562         dstFile.close();
563     }
564 
565 
566     /**
567      * A basic check if an encoded stream is decodable.
568      *
569      * The most basic confirmation we can get about a frame
570      * being properly encoded is trying to decode it.
571      * (Especially in realtime mode encode output is non-
572      * deterministic, therefore a more thorough check like
573      * md5 sum comparison wouldn't work.)
574      *
575      * Indeed, MediaCodec will raise an IllegalStateException
576      * whenever video decoder fails to decode a frame, and
577      * this test uses that fact to verify the bitstream.
578      *
579      * @param inputIvfFilename  The name of the IVF file containing encoded bitsream.
580      * @param outputYuvFilename The name of the output YUV file (optional).
581      * @param frameRate         Frame rate of input file in frames per second
582      * @param codecConfigs      Codec config buffers to be added to the format
583      */
decode( String inputIvfFilename, String outputYuvFilename, String codecMimeType, int frameRate, ArrayList<ByteBuffer> codecConfigs)584     protected ArrayList<MediaCodec.BufferInfo> decode(
585             String inputIvfFilename,
586             String outputYuvFilename,
587             String codecMimeType,
588             int frameRate,
589             ArrayList<ByteBuffer> codecConfigs) throws Exception {
590         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
591 
592         // Open input/output.
593         IvfReader ivf = new IvfReader(inputIvfFilename);
594         int frameWidth = ivf.getWidth();
595         int frameHeight = ivf.getHeight();
596         int frameCount = ivf.getFrameCount();
597         int frameStride = frameWidth;
598         int frameSliceHeight = frameHeight;
599         int cropLeft = 0;
600         int cropTop = 0;
601         int cropWidth = frameWidth;
602         int cropHeight = frameHeight;
603         assertTrue(frameWidth > 0);
604         assertTrue(frameHeight > 0);
605         assertTrue(frameCount > 0);
606 
607         // Create decoder.
608         MediaFormat format = MediaFormat.createVideoFormat(
609                 codecMimeType, ivf.getWidth(), ivf.getHeight());
610         CodecProperties properties = getVideoCodecProperties(false /* encoder */, format);
611         if (properties == null) {
612             ivf.close();
613             return null;
614         }
615         int frameColorFormat = properties.colorFormat;
616         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
617         int csdIndex = 0;
618         for (ByteBuffer config : codecConfigs) {
619             format.setByteBuffer("csd-" + csdIndex, config);
620             ++csdIndex;
621         }
622 
623         FileOutputStream yuv = null;
624         if (outputYuvFilename != null) {
625             yuv = new FileOutputStream(outputYuvFilename, false);
626         }
627 
628         Log.d(TAG, "Creating decoder " + properties.codecName +
629                 ". Color format: 0x" + Integer.toHexString(frameColorFormat) +
630                 ". " + frameWidth + " x " + frameHeight);
631         Log.d(TAG, "  Format: " + format);
632         Log.d(TAG, "  In: " + inputIvfFilename + ". Out:" + outputYuvFilename);
633         MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName);
634         decoder.configure(format,
635                           null,  // surface
636                           null,  // crypto
637                           0);    // flags
638         decoder.start();
639 
640         ByteBuffer[] inputBuffers = decoder.getInputBuffers();
641         ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
642         MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
643 
644         // decode loop
645         int inputFrameIndex = 0;
646         int outputFrameIndex = 0;
647         long inPresentationTimeUs = 0;
648         long outPresentationTimeUs = 0;
649         boolean sawOutputEOS = false;
650         boolean sawInputEOS = false;
651 
652         while (!sawOutputEOS) {
653             if (!sawInputEOS) {
654                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US);
655                 if (inputBufIndex >= 0) {
656                     byte[] frame = ivf.readFrame(inputFrameIndex);
657 
658                     if (inputFrameIndex == frameCount - 1) {
659                         Log.d(TAG, "  Input EOS for frame # " + inputFrameIndex);
660                         sawInputEOS = true;
661                     }
662 
663                     inputBuffers[inputBufIndex].clear();
664                     inputBuffers[inputBufIndex].put(frame);
665                     inputBuffers[inputBufIndex].rewind();
666                     inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate;
667 
668                     decoder.queueInputBuffer(
669                             inputBufIndex,
670                             0,  // offset
671                             frame.length,
672                             inPresentationTimeUs,
673                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
674 
675                     inputFrameIndex++;
676                 }
677             }
678 
679             int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
680             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
681                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
682                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
683                     outputBuffers = decoder.getOutputBuffers();
684                 } else  if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
685                     // Process format change
686                     format = decoder.getOutputFormat();
687                     frameWidth = format.getInteger(MediaFormat.KEY_WIDTH);
688                     frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
689                     frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
690                     Log.d(TAG, "Decoder output format change. Color: 0x" +
691                             Integer.toHexString(frameColorFormat));
692                     Log.d(TAG, "Format: " + format.toString());
693 
694                     // Parse frame and slice height from undocumented values
695                     if (format.containsKey("stride")) {
696                         frameStride = format.getInteger("stride");
697                     } else {
698                         frameStride = frameWidth;
699                     }
700                     if (format.containsKey("slice-height")) {
701                         frameSliceHeight = format.getInteger("slice-height");
702                     } else {
703                         frameSliceHeight = frameHeight;
704                     }
705                     Log.d(TAG, "Frame stride and slice height: " + frameStride +
706                             " x " + frameSliceHeight);
707                     frameStride = Math.max(frameWidth, frameStride);
708                     frameSliceHeight = Math.max(frameHeight, frameSliceHeight);
709 
710                     // Parse crop window for the area of recording decoded frame data.
711                     if (format.containsKey("crop-left")) {
712                         cropLeft = format.getInteger("crop-left");
713                     }
714                     if (format.containsKey("crop-top")) {
715                         cropTop = format.getInteger("crop-top");
716                     }
717                     if (format.containsKey("crop-right")) {
718                         cropWidth = format.getInteger("crop-right") - cropLeft + 1;
719                     } else {
720                         cropWidth = frameWidth;
721                     }
722                     if (format.containsKey("crop-bottom")) {
723                         cropHeight = format.getInteger("crop-bottom") - cropTop + 1;
724                     } else {
725                         cropHeight = frameHeight;
726                     }
727                     Log.d(TAG, "Frame crop window origin: " + cropLeft + " x " + cropTop
728                             + ", size: " + cropWidth + " x " + cropHeight);
729                     cropWidth = Math.min(frameWidth - cropLeft, cropWidth);
730                     cropHeight = Math.min(frameHeight - cropTop, cropHeight);
731                 }
732                 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
733             }
734             if (result >= 0) {
735                 int outputBufIndex = result;
736                 outPresentationTimeUs = bufferInfo.presentationTimeUs;
737                 Log.v(TAG, "Writing buffer # " + outputFrameIndex +
738                         ". Size: " + bufferInfo.size +
739                         ". InTime: " + (inPresentationTimeUs + 500)/1000 +
740                         ". OutTime: " + (outPresentationTimeUs + 500)/1000);
741                 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
742                     sawOutputEOS = true;
743                     Log.d(TAG, "   Output EOS for frame # " + outputFrameIndex);
744                 }
745 
746                 if (bufferInfo.size > 0) {
747                     // Save decoder output to yuv file.
748                     if (yuv != null) {
749                         byte[] frame = new byte[bufferInfo.size];
750                         outputBuffers[outputBufIndex].position(bufferInfo.offset);
751                         outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size);
752                         // Convert NV12 to YUV420 if necessary.
753                         if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
754                             frame = NV12ToYUV420(frameWidth, frameHeight,
755                                     frameStride, frameSliceHeight, frame);
756                         }
757                         int writeLength = Math.min(cropWidth * cropHeight * 3 / 2, frame.length);
758                         // Pack frame if necessary.
759                         if (writeLength < frame.length &&
760                                 (frameStride > cropWidth || frameSliceHeight > cropHeight)) {
761                             frame = PackYUV420(cropLeft, cropTop, cropWidth, cropHeight,
762                                     frameStride, frameSliceHeight, frame);
763                         }
764                         yuv.write(frame, 0, writeLength);
765                     }
766                     outputFrameIndex++;
767 
768                     // Update statistics - store presentation time delay in offset
769                     long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs;
770                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
771                     bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size,
772                             outPresentationTimeUs, bufferInfo.flags);
773                     bufferInfos.add(bufferInfoCopy);
774                 }
775                 decoder.releaseOutputBuffer(outputBufIndex, false);
776             }
777         }
778         decoder.stop();
779         decoder.release();
780         ivf.close();
781         if (yuv != null) {
782             yuv.close();
783         }
784 
785         return bufferInfos;
786     }
787 
788 
789     /**
790      * Helper function to return InputStream from either fully specified filename (if set)
791      * or resource name within test assets (if filename is not set).
792      */
OpenFileOrResource(String filename, final String resource)793     private InputStream OpenFileOrResource(String filename, final String resource)
794             throws Exception {
795         if (filename != null) {
796             Preconditions.assertTestFileExists(filename);
797             return new FileInputStream(filename);
798         }
799         Preconditions.assertTestFileExists(mInpPrefix + resource);
800         return new FileInputStream(mInpPrefix + resource);
801     }
802 
803     /**
804      * Results of frame encoding.
805      */
806     protected class MediaEncoderOutput {
807         public long inPresentationTimeUs;
808         public long outPresentationTimeUs;
809         public boolean outputGenerated;
810         public int flags;
811         public byte[] buffer;
812     }
813 
814     protected class MediaEncoderAsyncHelper {
815         private final EncoderOutputStreamParameters mStreamParams;
816         private final CodecProperties mProperties;
817         private final ArrayList<MediaCodec.BufferInfo> mBufferInfos;
818         private final IvfWriter mIvf;
819         private final ArrayList<ByteBuffer> mCodecConfigs;
820         private final byte[] mSrcFrame;
821 
822         private InputStream mYuvStream;
823         private int mInputFrameIndex;
824         private final EncodingStatisticsInfo mEncStatInfo;
825 
MediaEncoderAsyncHelper( EncoderOutputStreamParameters streamParams, CodecProperties properties, ArrayList<MediaCodec.BufferInfo> bufferInfos, IvfWriter ivf, ArrayList<ByteBuffer> codecConfigs, EncodingStatisticsInfo encStatInfo)826         MediaEncoderAsyncHelper(
827                 EncoderOutputStreamParameters streamParams,
828                 CodecProperties properties,
829                 ArrayList<MediaCodec.BufferInfo> bufferInfos,
830                 IvfWriter ivf,
831                 ArrayList<ByteBuffer> codecConfigs,
832                 EncodingStatisticsInfo encStatInfo)
833                 throws Exception {
834             mStreamParams = streamParams;
835             mProperties = properties;
836             mBufferInfos = bufferInfos;
837             mIvf = ivf;
838             mCodecConfigs = codecConfigs;
839             mEncStatInfo = encStatInfo;
840 
841             int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
842             mSrcFrame = new byte[srcFrameSize];
843 
844             mYuvStream = OpenFileOrResource(
845                     streamParams.inputYuvFilename, streamParams.inputResource);
846         }
847 
getInputFrame()848         public byte[] getInputFrame() {
849             // Check EOS
850             if (mStreamParams.frameCount == 0
851                     || (mStreamParams.frameCount > 0
852                             && mInputFrameIndex >= mStreamParams.frameCount)) {
853                 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex);
854                 return null;
855             }
856 
857             try {
858                 int bytesRead = mYuvStream.read(mSrcFrame);
859 
860                 if (bytesRead == -1) {
861                     // rewind to beginning of file
862                     mYuvStream.close();
863                     mYuvStream = OpenFileOrResource(
864                             mStreamParams.inputYuvFilename, mStreamParams.inputResource);
865                     bytesRead = mYuvStream.read(mSrcFrame);
866                 }
867             } catch (Exception e) {
868                 Log.e(TAG, "Failed to read YUV file.");
869                 return null;
870             }
871             mInputFrameIndex++;
872 
873             // Convert YUV420 to NV12 if necessary
874             if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
875                 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight,
876                         mSrcFrame);
877             } else {
878                 return mSrcFrame;
879             }
880         }
881 
saveOutputFrame(MediaEncoderOutput out)882         public boolean saveOutputFrame(MediaEncoderOutput out) {
883             if (out.outputGenerated) {
884                 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
885                     Log.d(TAG, "Storing codec config separately");
886                     ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
887                     csdBuffer.rewind();
888                     mCodecConfigs.add(csdBuffer);
889                     out.buffer = new byte[0];
890                 }
891                 if (out.buffer.length > 0) {
892                     // Save frame
893                     try {
894                         mIvf.writeFrame(out.buffer, out.outPresentationTimeUs);
895                     } catch (Exception e) {
896                         Log.d(TAG, "Failed to write frame");
897                         return true;
898                     }
899 
900                     // Update statistics - store presentation time delay in offset
901                     long presentationTimeUsDelta = out.inPresentationTimeUs -
902                             out.outPresentationTimeUs;
903                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
904                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
905                             out.outPresentationTimeUs, out.flags);
906                     mBufferInfos.add(bufferInfoCopy);
907                 }
908                 // Detect output EOS
909                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
910                     Log.d(TAG, "----Output EOS ");
911                     return true;
912                 }
913             }
914             return false;
915         }
916 
saveAvgQp(int avg_qp)917         public void saveAvgQp(int avg_qp) {
918             mEncStatInfo.averageSeqQp += (float) avg_qp;
919             ++mEncStatInfo.encodedFrames;  // Note: Duplicated info to  mOutputFrameIndex
920         }
921     }
922 
923     /**
924      * Video encoder wrapper class.
925      * Allows to run the encoder either in a callee's thread or in a looper thread
926      * using buffer dequeue ready notification callbacks.
927      *
928      * Function feedInput() is used to send raw video frame to the encoder input. When encoder
929      * is configured to run in async mode the function will run in a looper thread.
930      * Encoded frame can be retrieved by calling getOutput() function.
931      */
932     protected class MediaEncoderAsync extends Thread {
933         private int mId;
934         private MediaCodecWrapper mCodec;
935         private ByteBuffer[] mInputBuffers;
936         private ByteBuffer[] mOutputBuffers;
937         private int mInputFrameIndex;
938         private int mOutputFrameIndex;
939         private int mInputBufIndex;
940         private int mFrameRate;
941         private long mTimeout;
942         private MediaCodec.BufferInfo mBufferInfo;
943         private long mInPresentationTimeUs;
944         private long mOutPresentationTimeUs;
945         private boolean mAsync;
946         // Flag indicating if input frame was consumed by the encoder in feedInput() call.
947         private boolean mConsumedInput;
948         // Result of frame encoding returned by getOutput() call.
949         private MediaEncoderOutput mOutput;
950         // Object used to signal that looper thread has started and Handler instance associated
951         // with looper thread has been allocated.
952         private final Object mThreadEvent = new Object();
953         // Object used to signal that MediaCodec buffer dequeue notification callback
954         // was received.
955         private final Object mCallbackEvent = new Object();
956         private Handler mHandler;
957         private boolean mCallbackReceived;
958         private MediaEncoderAsyncHelper mHelper;
959         private final Object mCompletionEvent = new Object();
960         private boolean mCompleted;
961         private boolean mInitialSyncFrameReceived;
962 
963         private MediaCodec.Callback mCallback = new MediaCodec.Callback() {
964             @Override
965             public void onInputBufferAvailable(MediaCodec codec, int index) {
966                 if (mHelper == null) {
967                     Log.e(TAG, "async helper not available");
968                     return;
969                 }
970 
971                 byte[] encFrame = mHelper.getInputFrame();
972                 boolean inputEOS = (encFrame == null);
973 
974                 int encFrameLength = 0;
975                 int flags = 0;
976                 if (inputEOS) {
977                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
978                 } else {
979                     encFrameLength = encFrame.length;
980 
981                     ByteBuffer byteBuffer = mCodec.getInputBuffer(index);
982                     byteBuffer.put(encFrame);
983                     byteBuffer.rewind();
984 
985                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
986 
987                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
988                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
989 
990                     mInputFrameIndex++;
991                 }
992 
993                 mCodec.queueInputBuffer(
994                         index,
995                         0,  // offset
996                         encFrameLength,  // size
997                         mInPresentationTimeUs,
998                         flags);
999             }
1000 
1001             @Override
1002             public void onOutputBufferAvailable(MediaCodec codec,
1003                     int index, MediaCodec.BufferInfo info) {
1004                 if (mHelper == null) {
1005                     Log.e(TAG, "async helper not available");
1006                     return;
1007                 }
1008 
1009                 MediaEncoderOutput out = new MediaEncoderOutput();
1010 
1011                 out.buffer = new byte[info.size];
1012                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index);
1013                 outputBuffer.get(out.buffer, 0, info.size);
1014                 mOutPresentationTimeUs = info.presentationTimeUs;
1015 
1016                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
1017                 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1018                     logStr += " CONFIG. ";
1019                 }
1020                 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1021                     logStr += " KEY. ";
1022                     if (!mInitialSyncFrameReceived) {
1023                         mInitialSyncFrameReceived = true;
1024                     }
1025                 }
1026                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1027                     logStr += " EOS. ";
1028                 }
1029                 logStr += " Size: " + info.size;
1030                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
1031                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
1032                 Log.v(TAG, logStr);
1033 
1034                 if (!mInitialSyncFrameReceived
1035                         && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
1036                     throw new RuntimeException("Non codec_config_frame before first sync.");
1037                 }
1038 
1039                 if (info.size > 0) {
1040                     mOutputFrameIndex++;
1041                     out.inPresentationTimeUs = mInPresentationTimeUs;
1042                     out.outPresentationTimeUs = mOutPresentationTimeUs;
1043                 }
1044 
1045                 MediaFormat format = codec.getOutputFormat(index);
1046                 if (format.containsKey(MediaFormat.KEY_VIDEO_QP_AVERAGE)) {
1047                     int avgQp = format.getInteger(MediaFormat.KEY_VIDEO_QP_AVERAGE);
1048                     // Copy per-frame avgQp to sequence level buffer
1049                     mHelper.saveAvgQp(avgQp);
1050                 }
1051 
1052                 mCodec.releaseOutputBuffer(index, false);
1053 
1054                 out.flags = info.flags;
1055                 out.outputGenerated = true;
1056 
1057                 if (mHelper.saveOutputFrame(out)) {
1058                     // output EOS
1059                     signalCompletion();
1060                 }
1061             }
1062 
1063             @Override
1064             public void onError(MediaCodec codec, CodecException e) {
1065                 Log.e(TAG, "onError: " + e
1066                         + ", transient " + e.isTransient()
1067                         + ", recoverable " + e.isRecoverable()
1068                         + ", error " + e.getErrorCode());
1069             }
1070 
1071             @Override
1072             public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
1073                 Log.i(TAG, "onOutputFormatChanged: " + format.toString());
1074             }
1075         };
1076 
requestStart()1077         private synchronized void requestStart() throws Exception {
1078             mHandler = null;
1079             start();
1080             // Wait for Hander allocation
1081             synchronized (mThreadEvent) {
1082                 while (mHandler == null) {
1083                     mThreadEvent.wait();
1084                 }
1085             }
1086         }
1087 
setAsyncHelper(MediaEncoderAsyncHelper helper)1088         public void setAsyncHelper(MediaEncoderAsyncHelper helper) {
1089             mHelper = helper;
1090         }
1091 
1092         @Override
run()1093         public void run() {
1094             Looper.prepare();
1095             setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
1096                 @Override
1097                 public void uncaughtException(Thread t, Throwable e) {
1098                     Log.e(TAG, "thread " + t + " exception " + e);
1099                     try {
1100                         deleteCodec();
1101                     } catch (Exception ex) {
1102                         Log.e(TAG, "exception from deleteCodec " + e);
1103                     }
1104                 }
1105             });
1106             synchronized (mThreadEvent) {
1107                 mHandler = new Handler();
1108                 mThreadEvent.notify();
1109             }
1110             Looper.loop();
1111         }
1112 
runCallable(final Callable<?> callable)1113         private void runCallable(final Callable<?> callable) throws Exception {
1114             if (mAsync) {
1115                 final Exception[] exception = new Exception[1];
1116                 final CountDownLatch countDownLatch = new CountDownLatch(1);
1117                 mHandler.post( new Runnable() {
1118                     @Override
1119                     public void run() {
1120                         try {
1121                             callable.call();
1122                         } catch (Exception e) {
1123                             exception[0] = e;
1124                         } finally {
1125                             countDownLatch.countDown();
1126                         }
1127                     }
1128                 } );
1129 
1130                 // Wait for task completion
1131                 countDownLatch.await();
1132                 if (exception[0] != null) {
1133                     throw exception[0];
1134                 }
1135             } else {
1136                 callable.call();
1137             }
1138         }
1139 
requestStop()1140         private synchronized void requestStop() throws Exception {
1141             mHandler.post( new Runnable() {
1142                 @Override
1143                 public void run() {
1144                     // This will run on the Looper thread
1145                     Log.v(TAG, "MediaEncoder looper quitting");
1146                     Looper.myLooper().quitSafely();
1147                 }
1148             } );
1149             // Wait for completion
1150             join();
1151             mHandler = null;
1152         }
1153 
createCodecInternal(final String name, final MediaFormat format, final long timeout, boolean useNdk)1154         private void createCodecInternal(final String name,
1155                 final MediaFormat format, final long timeout, boolean useNdk) throws Exception {
1156             mBufferInfo = new MediaCodec.BufferInfo();
1157             mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
1158             mTimeout = timeout;
1159             mInputFrameIndex = 0;
1160             mOutputFrameIndex = 0;
1161             mInPresentationTimeUs = 0;
1162             mOutPresentationTimeUs = 0;
1163 
1164             if (useNdk) {
1165                 mCodec = new NdkMediaCodec(name);
1166             } else {
1167                 mCodec = new SdkMediaCodec(MediaCodec.createByCodecName(name), mAsync);
1168             }
1169             if (mAsync) {
1170                 mCodec.setCallback(mCallback);
1171             }
1172             mCodec.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE);
1173             mCodec.start();
1174 
1175             // get the cached input/output only in sync mode
1176             if (!mAsync) {
1177                 mInputBuffers = mCodec.getInputBuffers();
1178                 mOutputBuffers = mCodec.getOutputBuffers();
1179             }
1180         }
1181 
createCodec(int id, final String name, final MediaFormat format, final long timeout, boolean async, final boolean useNdk)1182         public void createCodec(int id, final String name, final MediaFormat format,
1183                 final long timeout, boolean async, final boolean useNdk)  throws Exception {
1184             mId = id;
1185             mAsync = async;
1186             if (mAsync) {
1187                 requestStart(); // start looper thread
1188             }
1189             runCallable( new Callable<Void>() {
1190                 @Override
1191                 public Void call() throws Exception {
1192                     createCodecInternal(name, format, timeout, useNdk);
1193                     return null;
1194                 }
1195             } );
1196         }
1197 
feedInputInternal(final byte[] encFrame, final boolean inputEOS)1198         private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) {
1199             mConsumedInput = false;
1200             // Feed input
1201             mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout);
1202 
1203             if (mInputBufIndex >= 0) {
1204                 ByteBuffer inputBuffer = mCodec.getInputBuffer(mInputBufIndex);
1205                 inputBuffer.clear();
1206                 inputBuffer.put(encFrame);
1207                 inputBuffer.rewind();
1208                 int encFrameLength = encFrame.length;
1209                 int flags = 0;
1210                 if (inputEOS) {
1211                     encFrameLength = 0;
1212                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
1213                 }
1214                 if (!inputEOS) {
1215                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
1216                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
1217                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
1218                     mInputFrameIndex++;
1219                 }
1220 
1221                 mCodec.queueInputBuffer(
1222                         mInputBufIndex,
1223                         0,  // offset
1224                         encFrameLength,  // size
1225                         mInPresentationTimeUs,
1226                         flags);
1227 
1228                 mConsumedInput = true;
1229             } else {
1230                 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER");
1231             }
1232             mCallbackReceived = false;
1233         }
1234 
feedInput(final byte[] encFrame, final boolean inputEOS)1235         public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception {
1236             runCallable( new Callable<Void>() {
1237                 @Override
1238                 public Void call() throws Exception {
1239                     feedInputInternal(encFrame, inputEOS);
1240                     return null;
1241                 }
1242             } );
1243             return mConsumedInput;
1244         }
1245 
getOutputInternal()1246         private void getOutputInternal() {
1247             mOutput = new MediaEncoderOutput();
1248             mOutput.inPresentationTimeUs = mInPresentationTimeUs;
1249             mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
1250             mOutput.outputGenerated = false;
1251 
1252             // Get output from the encoder
1253             int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
1254             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
1255                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1256                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1257                     mOutputBuffers = mCodec.getOutputBuffers();
1258                 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1259                     Log.d(TAG, "Format changed: " + mCodec.getOutputFormatString());
1260                 }
1261                 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
1262             }
1263             if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
1264                 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER");
1265             }
1266 
1267             if (result >= 0) {
1268                 int outputBufIndex = result;
1269                 mOutput.buffer = new byte[mBufferInfo.size];
1270                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufIndex);
1271                 outputBuffer.position(mBufferInfo.offset);
1272                 outputBuffer.get(mOutput.buffer, 0, mBufferInfo.size);
1273                 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs;
1274 
1275                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
1276                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1277                     logStr += " CONFIG. ";
1278                 }
1279                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1280                     logStr += " KEY. ";
1281                     if (!mInitialSyncFrameReceived) {
1282                         mInitialSyncFrameReceived = true;
1283                     }
1284                 }
1285                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1286                     logStr += " EOS. ";
1287                 }
1288                 logStr += " Size: " + mBufferInfo.size;
1289                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
1290                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
1291                 Log.v(TAG, logStr);
1292 
1293                 if (!mInitialSyncFrameReceived
1294                         && (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
1295                     throw new RuntimeException("Non codec_config_frame before first sync.");
1296                 }
1297 
1298                 if (mBufferInfo.size > 0) {
1299                     mOutputFrameIndex++;
1300                     mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
1301                 }
1302                 mCodec.releaseOutputBuffer(outputBufIndex, false);
1303 
1304                 mOutput.flags = mBufferInfo.flags;
1305                 mOutput.outputGenerated = true;
1306             }
1307             mCallbackReceived = false;
1308         }
1309 
getOutput()1310         public MediaEncoderOutput getOutput() throws Exception {
1311             runCallable( new Callable<Void>() {
1312                 @Override
1313                 public Void call() throws Exception {
1314                     getOutputInternal();
1315                     return null;
1316                 }
1317             } );
1318             return mOutput;
1319         }
1320 
forceSyncFrame()1321         public void forceSyncFrame() throws Exception {
1322             final Bundle syncFrame = new Bundle();
1323             syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
1324             runCallable( new Callable<Void>() {
1325                 @Override
1326                 public Void call() throws Exception {
1327                     mCodec.setParameters(syncFrame);
1328                     return null;
1329                 }
1330             } );
1331         }
1332 
updateBitrate(int bitrate)1333         public void updateBitrate(int bitrate) throws Exception {
1334             final Bundle bitrateUpdate = new Bundle();
1335             bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate);
1336             runCallable( new Callable<Void>() {
1337                 @Override
1338                 public Void call() throws Exception {
1339                     mCodec.setParameters(bitrateUpdate);
1340                     return null;
1341                 }
1342             } );
1343         }
1344 
1345 
waitForBufferEvent()1346         public void waitForBufferEvent() throws Exception {
1347             Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent");
1348             if (mAsync) {
1349                 synchronized (mCallbackEvent) {
1350                     if (!mCallbackReceived) {
1351                         mCallbackEvent.wait(1000); // wait 1 sec for a callback
1352                         // throw an exception if callback was not received
1353                         if (!mCallbackReceived) {
1354                             throw new RuntimeException("MediaCodec callback was not received");
1355                         }
1356                     }
1357                 }
1358             } else {
1359                 Thread.sleep(5);
1360             }
1361             Log.v(TAG, "----Waiting for bufferEvent done");
1362         }
1363 
1364 
waitForCompletion(long timeoutMs)1365         public void waitForCompletion(long timeoutMs) throws Exception {
1366             synchronized (mCompletionEvent) {
1367                 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs;
1368 
1369                 while (!mCompleted) {
1370                     mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis());
1371                     if (System.currentTimeMillis() >= timeoutExpiredMs) {
1372                         throw new RuntimeException("encoding has timed out!");
1373                     }
1374                 }
1375             }
1376         }
1377 
signalCompletion()1378         public void signalCompletion() {
1379             synchronized (mCompletionEvent) {
1380                 mCompleted = true;
1381                 mCompletionEvent.notify();
1382             }
1383         }
1384 
deleteCodec()1385         public void deleteCodec() throws Exception {
1386             runCallable( new Callable<Void>() {
1387                 @Override
1388                 public Void call() throws Exception {
1389                     mCodec.stop();
1390                     mCodec.release();
1391                     return null;
1392                 }
1393             } );
1394             if (mAsync) {
1395                 requestStop(); // Stop looper thread
1396             }
1397         }
1398     }
1399 
1400     /**
1401      * @see #encode(EncoderOutputStreamParameters, ArrayList<ByteBuffer>)
1402      */
encode( EncoderOutputStreamParameters streamParams)1403     protected VideoEncodeOutput encode(
1404             EncoderOutputStreamParameters streamParams) throws Exception {
1405         return encode(streamParams, new ArrayList<ByteBuffer>());
1406     }
1407 
1408     /**
1409      * Video encoding loop supporting encoding single streams with an option
1410      * to run in a looper thread and use buffer ready notification callbacks.
1411      *
1412      * Output stream is described by encodingParams parameters.
1413      *
1414      * MediaCodec will raise an IllegalStateException
1415      * whenever video encoder fails to encode a frame.
1416      *
1417      * Color format of input file should be YUV420, and frameWidth,
1418      * frameHeight should be supplied correctly as raw input file doesn't
1419      * include any header data.
1420      *
1421      * @param streamParams  Structure with encoder parameters
1422      * @param codecConfigs  List to be filled with codec config buffers
1423      * @return              Returns VideoEncodeOutput, which consists of
1424      *                      array of encoded frames information for each frame and Encoding
1425      *                      Statistics Information.
1426      */
encode( EncoderOutputStreamParameters streamParams, ArrayList<ByteBuffer> codecConfigs)1427     protected VideoEncodeOutput encode(
1428             EncoderOutputStreamParameters streamParams,
1429             ArrayList<ByteBuffer> codecConfigs) throws Exception {
1430 
1431         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
1432         EncodingStatisticsInfo encStatInfo = new EncodingStatisticsInfo();
1433         Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
1434                 streamParams.frameHeight);
1435         int bitrate = streamParams.bitrateSet[0];
1436 
1437         // Create minimal media format signifying desired output.
1438         MediaFormat format = MediaFormat.createVideoFormat(
1439                 streamParams.codecMimeType, streamParams.frameWidth,
1440                 streamParams.frameHeight);
1441         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1442         CodecProperties properties = getEncoderProperties(streamParams.codecName, format);
1443 
1444         // Open input/output
1445         InputStream yuvStream = OpenFileOrResource(
1446                 streamParams.inputYuvFilename, streamParams.inputResource);
1447         IvfWriter ivf = new IvfWriter(
1448                 streamParams.outputIvfFilename, streamParams.codecMimeType,
1449                 streamParams.frameWidth, streamParams.frameHeight);
1450 
1451         // Create a media format signifying desired output.
1452         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
1453             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1454         }
1455         if (streamParams.temporalLayers > 0) {
1456             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
1457         }
1458         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1459         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
1460         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
1461                 streamParams.frameRate;
1462         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1463         if (streamParams.encodingStatisticsLevel !=
1464                 MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_NONE) {
1465             format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
1466                     streamParams.encodingStatisticsLevel);
1467         }
1468 
1469         // Create encoder
1470         Log.d(TAG, "Creating encoder " + properties.codecName +
1471                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1472                 streamParams.frameWidth + " x " + streamParams.frameHeight +
1473                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
1474                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
1475                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
1476                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
1477         Log.d(TAG, "  Format: " + format);
1478         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
1479         MediaEncoderAsync codec = new MediaEncoderAsync();
1480         codec.createCodec(0, properties.codecName, format,
1481                 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
1482 
1483         // encode loop
1484         boolean sawInputEOS = false;  // no more data
1485         boolean consumedInputEOS = false; // EOS flag is consumed dy encoder
1486         boolean sawOutputEOS = false;
1487         boolean inputConsumed = true;
1488         int inputFrameIndex = 0;
1489         int lastBitrate = bitrate;
1490         int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
1491         byte[] srcFrame = new byte[srcFrameSize];
1492 
1493         while (!sawOutputEOS) {
1494 
1495             // Read and feed input frame
1496             if (!consumedInputEOS) {
1497 
1498                 // Read new input buffers - if previous input was consumed and no EOS
1499                 if (inputConsumed && !sawInputEOS) {
1500                     int bytesRead = yuvStream.read(srcFrame);
1501 
1502                     // Check EOS
1503                     if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) {
1504                         sawInputEOS = true;
1505                         Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
1506                     }
1507 
1508                     if (!sawInputEOS && bytesRead == -1) {
1509                         if (streamParams.frameCount == 0) {
1510                             sawInputEOS = true;
1511                             Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
1512                         } else {
1513                             yuvStream.close();
1514                             yuvStream = OpenFileOrResource(
1515                                     streamParams.inputYuvFilename, streamParams.inputResource);
1516                             bytesRead = yuvStream.read(srcFrame);
1517                         }
1518                     }
1519 
1520                     // Force sync frame if syncForceFrameinterval is set.
1521                     if (!sawInputEOS && inputFrameIndex > 0 &&
1522                             streamParams.syncForceFrameInterval > 0 &&
1523                             (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) {
1524                         Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex);
1525                         codec.forceSyncFrame();
1526                     }
1527 
1528                     // Dynamic bitrate change.
1529                     if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) {
1530                         int newBitrate = streamParams.bitrateSet[inputFrameIndex];
1531                         if (newBitrate != lastBitrate) {
1532                             Log.d(TAG, "--- Requesting new bitrate " + newBitrate +
1533                                     " for frame " + inputFrameIndex);
1534                             codec.updateBitrate(newBitrate);
1535                             lastBitrate = newBitrate;
1536                         }
1537                     }
1538 
1539                     // Convert YUV420 to NV12 if necessary
1540                     if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
1541                         srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight,
1542                                 srcFrame);
1543                     }
1544                 }
1545 
1546                 inputConsumed = codec.feedInput(srcFrame, sawInputEOS);
1547                 if (inputConsumed) {
1548                     inputFrameIndex++;
1549                     consumedInputEOS = sawInputEOS;
1550                 }
1551             }
1552 
1553             // Get output from the encoder
1554             MediaEncoderOutput out = codec.getOutput();
1555             if (out.outputGenerated) {
1556                 // Detect output EOS
1557                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1558                     Log.d(TAG, "----Output EOS ");
1559                     sawOutputEOS = true;
1560                 }
1561                 if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1562                     Log.d(TAG, "Storing codec config separately");
1563                     ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
1564                     csdBuffer.rewind();
1565                     codecConfigs.add(csdBuffer);
1566                     out.buffer = new byte[0];
1567                 }
1568 
1569                 if (out.buffer.length > 0) {
1570                     // Save frame
1571                     ivf.writeFrame(out.buffer, out.outPresentationTimeUs);
1572 
1573                     // Update statistics - store presentation time delay in offset
1574                     long presentationTimeUsDelta = out.inPresentationTimeUs -
1575                             out.outPresentationTimeUs;
1576                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
1577                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
1578                             out.outPresentationTimeUs, out.flags);
1579                     bufferInfos.add(bufferInfoCopy);
1580                 }
1581             }
1582 
1583             // If codec is not ready to accept input/poutput - wait for buffer ready callback
1584             if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) {
1585                 codec.waitForBufferEvent();
1586             }
1587         }
1588 
1589         codec.deleteCodec();
1590         ivf.close();
1591         yuvStream.close();
1592 
1593         return new VideoEncodeOutput(bufferInfos, encStatInfo);
1594     }
1595 
1596     /**
1597      * Video encoding run in a looper thread and use buffer ready callbacks.
1598      *
1599      * Output stream is described by encodingParams parameters.
1600      *
1601      * MediaCodec will raise an IllegalStateException
1602      * whenever video encoder fails to encode a frame.
1603      *
1604      * Color format of input file should be YUV420, and frameWidth,
1605      * frameHeight should be supplied correctly as raw input file doesn't
1606      * include any header data.
1607      *
1608      * @param streamParams  Structure with encoder parameters
1609      * @param codecConfigs  List to be filled with codec config buffers
1610      * @return              Returns VideoEncodeOutput, which consists of
1611      *                      array of encoded frames information for each frame and Encoding
1612      *                      Statistics Information.
1613      */
encodeAsync( EncoderOutputStreamParameters streamParams, ArrayList<ByteBuffer> codecConfigs)1614     protected VideoEncodeOutput encodeAsync(
1615             EncoderOutputStreamParameters streamParams,
1616             ArrayList<ByteBuffer> codecConfigs) throws Exception {
1617         if (!streamParams.runInLooperThread) {
1618             throw new RuntimeException("encodeAsync should run with a looper thread!");
1619         }
1620 
1621         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
1622         EncodingStatisticsInfo encStatInfo = new EncodingStatisticsInfo();
1623         Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
1624                 streamParams.frameHeight);
1625         int bitrate = streamParams.bitrateSet[0];
1626 
1627         // Create minimal media format signifying desired output.
1628         MediaFormat format = MediaFormat.createVideoFormat(
1629                 streamParams.codecMimeType, streamParams.frameWidth,
1630                 streamParams.frameHeight);
1631         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1632         CodecProperties properties = getEncoderProperties(streamParams.codecName, format);
1633 
1634         // Open input/output
1635         IvfWriter ivf = new IvfWriter(
1636                 streamParams.outputIvfFilename, streamParams.codecMimeType,
1637                 streamParams.frameWidth, streamParams.frameHeight);
1638 
1639         // Create a media format signifying desired output.
1640         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
1641             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1642         }
1643         if (streamParams.temporalLayers > 0) {
1644             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
1645         }
1646         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1647         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
1648         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
1649                 streamParams.frameRate;
1650         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1651         if (streamParams.encodingStatisticsLevel !=
1652                 MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_NONE) {
1653             format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL,
1654                     MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1);
1655         }
1656         // Create encoder
1657         Log.d(TAG, "Creating encoder " + properties.codecName +
1658                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1659                 streamParams.frameWidth + " x " + streamParams.frameHeight +
1660                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
1661                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
1662                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
1663                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
1664         Log.d(TAG, "  Format: " + format);
1665         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
1666 
1667         MediaEncoderAsync codec = new MediaEncoderAsync();
1668         MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper(
1669                 streamParams, properties, bufferInfos, ivf, codecConfigs, encStatInfo);
1670 
1671         codec.setAsyncHelper(helper);
1672         codec.createCodec(0, properties.codecName, format,
1673                 streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
1674         codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS);
1675 
1676         codec.deleteCodec();
1677         ivf.close();
1678 
1679         return new VideoEncodeOutput(bufferInfos, encStatInfo);
1680     }
1681 
1682     /**
1683      * Video encoding loop supporting encoding multiple streams at a time.
1684      * Each output stream is described by encodingParams parameters allowing
1685      * simultaneous encoding of various resolutions, bitrates with an option to
1686      * control key frame and dynamic bitrate for each output stream indepandently.
1687      *
1688      * MediaCodec will raise an IllegalStateException
1689      * whenever video encoder fails to encode a frame.
1690      *
1691      * Color format of input file should be YUV420, and frameWidth,
1692      * frameHeight should be supplied correctly as raw input file doesn't
1693      * include any header data.
1694      *
1695      * @param srcFrameWidth     Frame width of input yuv file
1696      * @param srcFrameHeight    Frame height of input yuv file
1697      * @param encodingParams    Encoder parameters
1698      * @param codecConfigs      List to be filled with codec config buffers
1699      * @return                  Returns 2D array of encoded frames information for each stream and
1700      *                          for each frame.
1701      */
encodeSimulcast( int srcFrameWidth, int srcFrameHeight, ArrayList<EncoderOutputStreamParameters> encodingParams, ArrayList<ArrayList<ByteBuffer>> codecConfigs)1702     protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast(
1703             int srcFrameWidth,
1704             int srcFrameHeight,
1705             ArrayList<EncoderOutputStreamParameters> encodingParams,
1706             ArrayList<ArrayList<ByteBuffer>> codecConfigs) throws Exception {
1707         int numEncoders = encodingParams.size();
1708 
1709         // Create arrays of input/output, formats, bitrates etc
1710         ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos =
1711                 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders);
1712         InputStream yuvStream[] = new InputStream[numEncoders];
1713         IvfWriter[] ivf = new IvfWriter[numEncoders];
1714         FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders];
1715         MediaFormat[] format = new MediaFormat[numEncoders];
1716         MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders];
1717         int[] inputFrameIndex = new int[numEncoders];
1718         boolean[] sawInputEOS = new boolean[numEncoders];
1719         boolean[] consumedInputEOS = new boolean[numEncoders];
1720         boolean[] inputConsumed = new boolean[numEncoders];
1721         boolean[] bufferConsumed = new boolean[numEncoders];
1722         boolean[] sawOutputEOS = new boolean[numEncoders];
1723         byte[][] srcFrame = new byte[numEncoders][];
1724         boolean sawOutputEOSTotal = false;
1725         boolean bufferConsumedTotal = false;
1726         CodecProperties[] codecProperties = new CodecProperties[numEncoders];
1727 
1728         numEncoders = 0;
1729         for (EncoderOutputStreamParameters params : encodingParams) {
1730             int i = numEncoders;
1731             Log.d(TAG, "Source resolution: " + params.frameWidth + " x " +
1732                     params.frameHeight);
1733             int bitrate = params.bitrateSet[0];
1734 
1735             // Create minimal media format signifying desired output.
1736             format[i] = MediaFormat.createVideoFormat(
1737                     params.codecMimeType, params.frameWidth,
1738                     params.frameHeight);
1739             format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1740             CodecProperties properties = getEncoderProperties(params.codecName, format[i]);
1741 
1742             // Check if scaled image was created
1743             int scale = params.frameWidth / srcFrameWidth;
1744             if (!mScaledImages.contains(scale)) {
1745                 // resize image
1746                 cacheScaledImage(params.inputYuvFilename, params.inputResource,
1747                         srcFrameWidth, srcFrameHeight,
1748                         params.scaledYuvFilename, params.frameWidth, params.frameHeight);
1749                 mScaledImages.add(scale);
1750             }
1751 
1752             // Create buffer info storage
1753             bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>());
1754 
1755             // Create YUV reader
1756             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
1757 
1758             // Create IVF writer
1759             ivf[i] = new IvfWriter(
1760                     params.outputIvfFilename, params.codecMimeType,
1761                     params.frameWidth, params.frameHeight);
1762 
1763             // Frame buffer
1764             int frameSize = params.frameWidth * params.frameHeight * 3 / 2;
1765             srcFrame[i] = new byte[frameSize];
1766 
1767             // Create a media format signifying desired output.
1768             if (params.bitrateType == VIDEO_ControlRateConstant) {
1769                 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1770             }
1771             if (params.temporalLayers > 0) {
1772                 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer
1773             }
1774             format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1775             format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate);
1776             int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) /
1777                     params.frameRate; // in sec
1778             format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1779             // Create encoder
1780             Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName +
1781                     ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1782                     params.frameWidth + " x " + params.frameHeight +
1783                     ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType +
1784                     ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers +
1785                     ". Key frame:" + syncFrameInterval * params.frameRate +
1786                     ". Force keyFrame: " + params.syncForceFrameInterval);
1787             Log.d(TAG, "  Format: " + format[i]);
1788             Log.d(TAG, "  Output ivf:" + params.outputIvfFilename);
1789 
1790             // Create encoder
1791             codec[i] = new MediaEncoderAsync();
1792             codec[i].createCodec(i, properties.codecName, format[i],
1793                     params.timeoutDequeue, params.runInLooperThread, params.useNdk);
1794             codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat);
1795 
1796             inputConsumed[i] = true;
1797             ++numEncoders;
1798         }
1799         if (numEncoders == 0) {
1800             Log.i(TAG, "no suitable encoders found for any of the streams");
1801             return null;
1802         }
1803 
1804         while (!sawOutputEOSTotal) {
1805             // Feed input buffer to all encoders
1806             for (int i = 0; i < numEncoders; i++) {
1807                 bufferConsumed[i] = false;
1808                 if (consumedInputEOS[i]) {
1809                     continue;
1810                 }
1811 
1812                 EncoderOutputStreamParameters params = encodingParams.get(i);
1813                 // Read new input buffers - if previous input was consumed and no EOS
1814                 if (inputConsumed[i] && !sawInputEOS[i]) {
1815                     int bytesRead = yuvStream[i].read(srcFrame[i]);
1816 
1817                     // Check EOS
1818                     if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) {
1819                         sawInputEOS[i] = true;
1820                         Log.d(TAG, "---Enc" + i +
1821                                 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
1822                     }
1823 
1824                     if (!sawInputEOS[i] && bytesRead == -1) {
1825                         if (params.frameCount == 0) {
1826                             sawInputEOS[i] = true;
1827                             Log.d(TAG, "---Enc" + i +
1828                                     ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
1829                         } else {
1830                             yuvStream[i].close();
1831                             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
1832                             bytesRead = yuvStream[i].read(srcFrame[i]);
1833                         }
1834                     }
1835 
1836                     // Convert YUV420 to NV12 if necessary
1837                     if (codecProperties[i].colorFormat !=
1838                             CodecCapabilities.COLOR_FormatYUV420Planar) {
1839                         srcFrame[i] =
1840                             YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]);
1841                     }
1842                 }
1843 
1844                 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]);
1845                 if (inputConsumed[i]) {
1846                     inputFrameIndex[i]++;
1847                     consumedInputEOS[i] = sawInputEOS[i];
1848                     bufferConsumed[i] = true;
1849                 }
1850 
1851             }
1852 
1853             // Get output from all encoders
1854             for (int i = 0; i < numEncoders; i++) {
1855                 if (sawOutputEOS[i]) {
1856                     continue;
1857                 }
1858 
1859                 MediaEncoderOutput out = codec[i].getOutput();
1860                 if (out.outputGenerated) {
1861                     bufferConsumed[i] = true;
1862                     // Detect output EOS
1863                     if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1864                         Log.d(TAG, "----Enc" + i + ". Output EOS ");
1865                         sawOutputEOS[i] = true;
1866                     }
1867                     if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1868                         Log.d(TAG, "----Enc" + i + ". Storing codec config separately");
1869                         ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
1870                         csdBuffer.rewind();
1871                         codecConfigs.get(i).add(csdBuffer);
1872                         out.buffer = new byte[0];
1873                     }
1874 
1875                     if (out.buffer.length > 0) {
1876                         // Save frame
1877                         ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs);
1878 
1879                         // Update statistics - store presentation time delay in offset
1880                         long presentationTimeUsDelta = out.inPresentationTimeUs -
1881                                 out.outPresentationTimeUs;
1882                         MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
1883                         bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
1884                                 out.outPresentationTimeUs, out.flags);
1885                         bufferInfos.get(i).add(bufferInfoCopy);
1886                     }
1887                 }
1888             }
1889 
1890             // If codec is not ready to accept input/output - wait for buffer ready callback
1891             bufferConsumedTotal = false;
1892             for (boolean bufferConsumedCurrent : bufferConsumed) {
1893                 bufferConsumedTotal |= bufferConsumedCurrent;
1894             }
1895             if (!bufferConsumedTotal) {
1896                 // Pick the encoder to wait for
1897                 for (int i = 0; i < numEncoders; i++) {
1898                     if (!bufferConsumed[i] && !sawOutputEOS[i]) {
1899                         codec[i].waitForBufferEvent();
1900                         break;
1901                     }
1902                 }
1903             }
1904 
1905             // Check if EOS happened for all encoders
1906             sawOutputEOSTotal = true;
1907             for (boolean sawOutputEOSStream : sawOutputEOS) {
1908                 sawOutputEOSTotal &= sawOutputEOSStream;
1909             }
1910         }
1911 
1912         for (int i = 0; i < numEncoders; i++) {
1913             codec[i].deleteCodec();
1914             ivf[i].close();
1915             yuvStream[i].close();
1916             if (yuvScaled[i] != null) {
1917                 yuvScaled[i].close();
1918             }
1919         }
1920 
1921         return bufferInfos;
1922     }
1923 
1924     /**
1925      * Some encoding statistics.
1926      */
1927     protected class VideoEncodingStatistics {
VideoEncodingStatistics()1928         VideoEncodingStatistics() {
1929             mBitrates = new ArrayList<Integer>();
1930             mFrames = new ArrayList<Integer>();
1931             mKeyFrames = new ArrayList<Integer>();
1932             mMinimumKeyFrameInterval = Integer.MAX_VALUE;
1933         }
1934 
1935         public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream.
1936         public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream.
1937         public int mAverageBitrate;         // Average stream bitrate.
1938         public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream.
1939         public int mAverageKeyFrameInterval; // Average key frame interval.
1940         public int mMaximumKeyFrameInterval; // Maximum key frame interval.
1941         public int mMinimumKeyFrameInterval; // Minimum key frame interval.
1942     }
1943 
1944     /**
1945      * Calculates average bitrate and key frame interval for the encoded streams.
1946      * Output mBitrates field will contain bitrate values for every second
1947      * of the encoded stream.
1948      * Average stream bitrate will be stored in mAverageBitrate field.
1949      * mKeyFrames array will contain the position of key frames in the encoded stream and
1950      * mKeyFrameInterval - average key frame interval.
1951      */
computeEncodingStatistics(int encoderId, ArrayList<MediaCodec.BufferInfo> bufferInfos )1952     protected VideoEncodingStatistics computeEncodingStatistics(int encoderId,
1953             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
1954         VideoEncodingStatistics statistics = new VideoEncodingStatistics();
1955 
1956         int totalSize = 0;
1957         int frames = 0;
1958         int framesPerSecond = 0;
1959         int totalFrameSizePerSecond = 0;
1960         int maxFrameSize = 0;
1961         int currentSecond;
1962         int nextSecond = 0;
1963         String keyFrameList = "  IFrame List: ";
1964         String bitrateList = "  Bitrate list: ";
1965         String framesList = "  FPS list: ";
1966 
1967 
1968         for (int j = 0; j < bufferInfos.size(); j++) {
1969             MediaCodec.BufferInfo info = bufferInfos.get(j);
1970             currentSecond = (int)(info.presentationTimeUs / 1000000);
1971             boolean lastFrame = (j == bufferInfos.size() - 1);
1972             if (!lastFrame) {
1973                 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000);
1974             }
1975 
1976             totalSize += info.size;
1977             totalFrameSizePerSecond += info.size;
1978             maxFrameSize = Math.max(maxFrameSize, info.size);
1979             framesPerSecond++;
1980             frames++;
1981 
1982             // Update the bitrate statistics if the next frame will
1983             // be for the next second
1984             if (lastFrame || nextSecond > currentSecond) {
1985                 int currentBitrate = totalFrameSizePerSecond * 8;
1986                 bitrateList += (currentBitrate + " ");
1987                 framesList += (framesPerSecond + " ");
1988                 statistics.mBitrates.add(currentBitrate);
1989                 statistics.mFrames.add(framesPerSecond);
1990                 totalFrameSizePerSecond = 0;
1991                 framesPerSecond = 0;
1992             }
1993 
1994             // Update key frame statistics.
1995             if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1996                 statistics.mKeyFrames.add(j);
1997                 keyFrameList += (j + "  ");
1998             }
1999         }
2000         int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000);
2001         duration = (duration + 500) / 1000;
2002         statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration);
2003         Log.d(TAG, "Statistics for encoder # " + encoderId);
2004         // Calculate average key frame interval in frames.
2005         int keyFrames = statistics.mKeyFrames.size();
2006         if (keyFrames > 1) {
2007             statistics.mAverageKeyFrameInterval =
2008                     statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0);
2009             statistics.mAverageKeyFrameInterval =
2010                     Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1));
2011             for (int j = 1; j < keyFrames; j++) {
2012                 int keyFrameInterval =
2013                         statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1);
2014                 statistics.mMaximumKeyFrameInterval =
2015                         Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval);
2016                 statistics.mMinimumKeyFrameInterval =
2017                         Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval);
2018             }
2019             Log.d(TAG, "  Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval +
2020                     ". Min: " + statistics.mMinimumKeyFrameInterval +
2021                     ". Avg: " + statistics.mAverageKeyFrameInterval);
2022         }
2023         Log.d(TAG, "  Frames: " + frames + ". Duration: " + duration +
2024                 ". Total size: " + totalSize + ". Key frames: " + keyFrames);
2025         Log.d(TAG, keyFrameList);
2026         Log.d(TAG, bitrateList);
2027         Log.d(TAG, framesList);
2028         Log.d(TAG, "  Bitrate average: " + statistics.mAverageBitrate);
2029         Log.d(TAG, "  Maximum frame size: " + maxFrameSize);
2030 
2031         return statistics;
2032     }
2033 
computeEncodingStatistics( ArrayList<MediaCodec.BufferInfo> bufferInfos )2034     protected VideoEncodingStatistics computeEncodingStatistics(
2035             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
2036         return computeEncodingStatistics(0, bufferInfos);
2037     }
2038 
computeSimulcastEncodingStatistics( ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos)2039     protected ArrayList<VideoEncodingStatistics> computeSimulcastEncodingStatistics(
2040             ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) {
2041         int numCodecs = bufferInfos.size();
2042         ArrayList<VideoEncodingStatistics> statistics = new ArrayList<VideoEncodingStatistics>();
2043 
2044         for (int i = 0; i < numCodecs; i++) {
2045             VideoEncodingStatistics currentStatistics =
2046                     computeEncodingStatistics(i, bufferInfos.get(i));
2047             statistics.add(currentStatistics);
2048         }
2049         return statistics;
2050     }
2051 
2052     /**
2053      * Calculates maximum latency for encoder/decoder based on buffer info array
2054      * generated either by encoder or decoder.
2055      */
maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos)2056     protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) {
2057         int maxValue = 0;
2058         for (MediaCodec.BufferInfo bufferInfo : bufferInfos) {
2059             maxValue = Math.max(maxValue,  bufferInfo.offset);
2060         }
2061         maxValue = (maxValue + 500) / 1000; // mcs -> ms
2062         return maxValue;
2063     }
2064 
2065     /**
2066      * Decoding PSNR statistics.
2067      */
2068     protected class VideoDecodingStatistics {
VideoDecodingStatistics()2069         VideoDecodingStatistics() {
2070             mMinimumPSNR = Integer.MAX_VALUE;
2071         }
2072         public double mAveragePSNR;
2073         public double mMinimumPSNR;
2074     }
2075 
2076     /**
2077      * Calculates PSNR value between two video frames.
2078      */
computePSNR(byte[] data0, byte[] data1)2079     private double computePSNR(byte[] data0, byte[] data1) {
2080         long squareError = 0;
2081         assertTrue(data0.length == data1.length);
2082         int length = data0.length;
2083         for (int i = 0 ; i < length; i++) {
2084             int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff);
2085             squareError += diff * diff;
2086         }
2087         double meanSquareError = (double)squareError / length;
2088         double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError);
2089         return psnr;
2090     }
2091 
2092     /**
2093      * Calculates average and minimum PSNR values between
2094      * set of reference and decoded video frames.
2095      * Runs PSNR calculation for the full duration of the decoded data.
2096      */
computeDecodingStatistics( String referenceYuvFilename, String referenceYuvRaw, String decodedYuvFilename, int width, int height)2097     protected VideoDecodingStatistics computeDecodingStatistics(
2098             String referenceYuvFilename,
2099             String referenceYuvRaw,
2100             String decodedYuvFilename,
2101             int width,
2102             int height) throws Exception {
2103         VideoDecodingStatistics statistics = new VideoDecodingStatistics();
2104         InputStream referenceStream =
2105                 OpenFileOrResource(referenceYuvFilename, referenceYuvRaw);
2106         InputStream decodedStream = new FileInputStream(decodedYuvFilename);
2107 
2108         int ySize = width * height;
2109         int uvSize = width * height / 4;
2110         byte[] yRef = new byte[ySize];
2111         byte[] yDec = new byte[ySize];
2112         byte[] uvRef = new byte[uvSize];
2113         byte[] uvDec = new byte[uvSize];
2114 
2115         int frames = 0;
2116         double averageYPSNR = 0;
2117         double averageUPSNR = 0;
2118         double averageVPSNR = 0;
2119         double minimumYPSNR = Integer.MAX_VALUE;
2120         double minimumUPSNR = Integer.MAX_VALUE;
2121         double minimumVPSNR = Integer.MAX_VALUE;
2122         int minimumPSNRFrameIndex = 0;
2123 
2124         while (true) {
2125             // Calculate Y PSNR.
2126             int bytesReadRef = referenceStream.read(yRef);
2127             int bytesReadDec = decodedStream.read(yDec);
2128             if (bytesReadDec == -1) {
2129                 break;
2130             }
2131             if (bytesReadRef == -1) {
2132                 // Reference file wrapping up
2133                 referenceStream.close();
2134                 referenceStream =
2135                         OpenFileOrResource(referenceYuvFilename, referenceYuvRaw);
2136                 bytesReadRef = referenceStream.read(yRef);
2137             }
2138             double curYPSNR = computePSNR(yRef, yDec);
2139             averageYPSNR += curYPSNR;
2140             minimumYPSNR = Math.min(minimumYPSNR, curYPSNR);
2141             double curMinimumPSNR = curYPSNR;
2142 
2143             // Calculate U PSNR.
2144             bytesReadRef = referenceStream.read(uvRef);
2145             bytesReadDec = decodedStream.read(uvDec);
2146             double curUPSNR = computePSNR(uvRef, uvDec);
2147             averageUPSNR += curUPSNR;
2148             minimumUPSNR = Math.min(minimumUPSNR, curUPSNR);
2149             curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR);
2150 
2151             // Calculate V PSNR.
2152             bytesReadRef = referenceStream.read(uvRef);
2153             bytesReadDec = decodedStream.read(uvDec);
2154             double curVPSNR = computePSNR(uvRef, uvDec);
2155             averageVPSNR += curVPSNR;
2156             minimumVPSNR = Math.min(minimumVPSNR, curVPSNR);
2157             curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR);
2158 
2159             // Frame index for minimum PSNR value - help to detect possible distortions
2160             if (curMinimumPSNR < statistics.mMinimumPSNR) {
2161                 statistics.mMinimumPSNR = curMinimumPSNR;
2162                 minimumPSNRFrameIndex = frames;
2163             }
2164 
2165             String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f",
2166                     frames, curYPSNR, curUPSNR, curVPSNR);
2167             Log.v(TAG, logStr);
2168 
2169             frames++;
2170         }
2171 
2172         averageYPSNR /= frames;
2173         averageUPSNR /= frames;
2174         averageVPSNR /= frames;
2175         statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6;
2176 
2177         Log.d(TAG, "PSNR statistics for " + frames + " frames.");
2178         String logStr = String.format(Locale.US,
2179                 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f",
2180                 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR);
2181         Log.d(TAG, logStr);
2182         logStr = String.format(Locale.US,
2183                 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d",
2184                 minimumYPSNR, minimumUPSNR, minimumVPSNR,
2185                 statistics.mMinimumPSNR, minimumPSNRFrameIndex);
2186         Log.d(TAG, logStr);
2187 
2188         referenceStream.close();
2189         decodedStream.close();
2190         return statistics;
2191     }
2192 }
2193