• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.cts;
18 
19 import android.content.Context;
20 import android.content.res.Resources;
21 import android.media.MediaCodec;
22 import android.media.MediaCodec.CodecException;
23 import android.media.MediaCodecInfo.CodecCapabilities;
24 import android.media.MediaCodecList;
25 import android.media.MediaCodecInfo;
26 import android.media.MediaFormat;
27 import android.os.Bundle;
28 import android.os.Environment;
29 import android.os.Looper;
30 import android.os.Handler;
31 import android.test.AndroidTestCase;
32 import android.util.Log;
33 import com.android.cts.media.R;
34 
35 import java.io.File;
36 import java.io.FileInputStream;
37 import java.io.FileOutputStream;
38 import java.io.InputStream;
39 import java.nio.ByteBuffer;
40 import java.util.Locale;
41 import java.util.ArrayList;
42 import java.util.concurrent.Callable;
43 import java.util.concurrent.CountDownLatch;
44 
45 /**
46  * Verification test for vp8 encoder and decoder.
47  *
48  * A raw yv12 stream is encoded at various settings and written to an IVF
49  * file. Encoded stream bitrate and key frame interval are checked against target values.
50  * The stream is later decoded by vp8 decoder to verify frames are decodable and to
51  * calculate PSNR values for various bitrates.
52  */
53 public class Vp8CodecTestBase extends AndroidTestCase {
54 
55     protected static final String TAG = "VP8CodecTestBase";
56     protected static final String VP8_MIME = "video/x-vnd.on2.vp8";
57     private static final String VPX_SW_DECODER_NAME = "OMX.google.vp8.decoder";
58     private static final String VPX_SW_ENCODER_NAME = "OMX.google.vp8.encoder";
59     private static final String OMX_SW_CODEC_PREFIX = "OMX.google";
60     protected static final String SDCARD_DIR =
61             Environment.getExternalStorageDirectory().getAbsolutePath();
62 
63     // Default timeout for MediaCodec buffer dequeue - 200 ms.
64     protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000;
65     // Default timeout for MediaEncoderAsync - 30 sec.
66     protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000;
67     // Default sync frame interval in frames (zero means allow the encoder to auto-select
68     // key frame interval).
69     private static final int SYNC_FRAME_INTERVAL = 0;
70     // Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h
71     protected static final int VIDEO_ControlRateVariable = 1;
72     protected static final int VIDEO_ControlRateConstant = 2;
73     // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
74     // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
75     private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
76     // Allowable color formats supported by codec - in order of preference.
77     private static final int[] mSupportedColorList = {
78             CodecCapabilities.COLOR_FormatYUV420Planar,
79             CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
80             CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
81             COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
82     };
83     // Scaled image cache list - contains scale factors, for which up-scaled frames
84     // were calculated and were written to yuv file.
85     ArrayList<Integer> mScaledImages = new ArrayList<Integer>();
86 
87     private Resources mResources;
88 
89     @Override
setContext(Context context)90     public void setContext(Context context) {
91         super.setContext(context);
92         mResources = mContext.getResources();
93     }
94 
95     /**
96      * Returns the first codec capable of encoding the specified MIME type, or null if no
97      * match was found.
98      */
selectCodec(String mimeType)99     protected static MediaCodecInfo selectCodec(String mimeType) {
100         int numCodecs = MediaCodecList.getCodecCount();
101         for (int i = 0; i < numCodecs; i++) {
102             MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
103 
104             if (!codecInfo.isEncoder()) {
105                 continue;
106             }
107 
108             String[] types = codecInfo.getSupportedTypes();
109             for (int j = 0; j < types.length; j++) {
110                 if (types[j].equalsIgnoreCase(mimeType)) {
111                     return codecInfo;
112                 }
113             }
114         }
115         return null;
116     }
117 
118     /**
119      *  VP8 codec properties generated by getVp8CodecProperties() function.
120      */
121     private class CodecProperties {
CodecProperties(String codecName, int colorFormat)122         CodecProperties(String codecName, int colorFormat) {
123             this.codecName = codecName;
124             this.colorFormat = colorFormat;
125         }
isGoogleSwCodec()126         public boolean  isGoogleSwCodec() {
127             return codecName.startsWith(OMX_SW_CODEC_PREFIX);
128         }
129 
130         public final String codecName; // OpenMax component name for VP8 codec.
131         public final int colorFormat;  // Color format supported by codec.
132     }
133 
134     /**
135      * Function to find VP8 codec.
136      *
137      * Iterates through the list of available codecs and tries to find
138      * VP8 codec, which can support either YUV420 planar or NV12 color formats.
139      * If forceSwGoogleCodec parameter set to true the function always returns
140      * Google sw VP8 codec.
141      * If forceSwGoogleCodec parameter set to false the functions looks for platform
142      * specific VP8 codec first. If no platform specific codec exist, falls back to
143      * Google sw VP8 codec.
144      *
145      * @param isEncoder     Flag if encoder is requested.
146      * @param forceSwGoogleCodec  Forces to use Google sw codec.
147      */
getVp8CodecProperties(boolean isEncoder, boolean forceSwGoogleCodec)148     private CodecProperties getVp8CodecProperties(boolean isEncoder,
149             boolean forceSwGoogleCodec) throws Exception {
150         CodecProperties codecProperties = null;
151 
152         if (!forceSwGoogleCodec) {
153             // Loop through the list of omx components in case platform specific codec
154             // is requested.
155             for (int i = 0; i < MediaCodecList.getCodecCount(); i++) {
156                 MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
157                 if (isEncoder != codecInfo.isEncoder()) {
158                     continue;
159                 }
160                 Log.v(TAG, codecInfo.getName());
161                 // Check if this is sw Google codec - we should ignore it.
162                 boolean isGoogleSwCodec = codecInfo.getName().startsWith(OMX_SW_CODEC_PREFIX);
163                 if (isGoogleSwCodec) {
164                     continue;
165                 }
166 
167                 for (String type : codecInfo.getSupportedTypes()) {
168                     if (!type.equalsIgnoreCase(VP8_MIME)) {
169                         continue;
170                     }
171                     CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(VP8_MIME);
172 
173                     // Get candidate codec properties.
174                     Log.v(TAG, "Found candidate codec " + codecInfo.getName());
175                     for (int colorFormat : capabilities.colorFormats) {
176                         Log.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
177                     }
178 
179                     // Check supported color formats.
180                     for (int supportedColorFormat : mSupportedColorList) {
181                         for (int codecColorFormat : capabilities.colorFormats) {
182                             if (codecColorFormat == supportedColorFormat) {
183                                 codecProperties = new CodecProperties(codecInfo.getName(),
184                                         codecColorFormat);
185                                 Log.v(TAG, "Found target codec " + codecProperties.codecName +
186                                         ". Color: 0x" + Integer.toHexString(codecColorFormat));
187                                 return codecProperties;
188                             }
189                         }
190                     }
191                     // HW codec we found does not support one of necessary color formats.
192                     throw new RuntimeException("No hw codec with YUV420 or NV12 color formats");
193                 }
194             }
195         }
196         // If no hw vp8 codec exist or sw codec is requested use default Google sw codec.
197         if (codecProperties == null) {
198             Log.v(TAG, "Use SW VP8 codec");
199             if (isEncoder) {
200                 codecProperties = new CodecProperties(VPX_SW_ENCODER_NAME,
201                         CodecCapabilities.COLOR_FormatYUV420Planar);
202             } else {
203                 codecProperties = new CodecProperties(VPX_SW_DECODER_NAME,
204                         CodecCapabilities.COLOR_FormatYUV420Planar);
205             }
206         }
207 
208         return codecProperties;
209     }
210 
211     /**
212      * Parameters for encoded video stream.
213      */
214     protected class EncoderOutputStreamParameters {
215         // Name of raw YUV420 input file. When the value of this parameter
216         // is set to null input file descriptor from inputResourceId parameter
217         // is used instead.
218         public String inputYuvFilename;
219         // Name of scaled YUV420 input file.
220         public String scaledYuvFilename;
221         // File descriptor for the raw input file (YUV420). Used only if
222         // inputYuvFilename parameter is null.
223         int inputResourceId;
224         // Name of the IVF file to write encoded bitsream
225         public String outputIvfFilename;
226         // Force to use Google SW VP8 encoder.
227         boolean forceSwEncoder;
228         // Number of frames to encode.
229         int frameCount;
230         // Frame rate of input file in frames per second.
231         int frameRate;
232         // Encoded frame width.
233         public int frameWidth;
234         // Encoded frame height.
235         public int frameHeight;
236         // Encoding bitrate array in bits/second for every frame. If array length
237         // is shorter than the total number of frames, the last value is re-used for
238         // all remaining frames. For constant bitrate encoding single element
239         // array can be used with first element set to target bitrate value.
240         public int[] bitrateSet;
241         // Encoding bitrate type - VBR or CBR
242         public int bitrateType;
243         // Number of temporal layers
244         public int temporalLayers;
245         // Desired key frame interval - codec is asked to generate key frames
246         // at a period defined by this parameter.
247         public int syncFrameInterval;
248         // Optional parameter - forced key frame interval. Used to
249         // explicitly request the codec to generate key frames using
250         // MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter.
251         public int syncForceFrameInterval;
252         // Buffer timeout
253         long timeoutDequeue;
254         // Flag if encoder should run in Looper thread.
255         boolean runInLooperThread;
256     }
257 
258     /**
259      * Generates an array of default parameters for encoder output stream based on
260      * upscaling value.
261      */
getDefaultEncodingParameterList( String inputYuvName, String outputIvfBaseName, int encodeSeconds, int[] resolutionScales, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int[] bitrates, boolean syncEncoding)262     protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList(
263             String inputYuvName,
264             String outputIvfBaseName,
265             int encodeSeconds,
266             int[] resolutionScales,
267             int frameWidth,
268             int frameHeight,
269             int frameRate,
270             int bitrateMode,
271             int[] bitrates,
272             boolean syncEncoding) {
273         assertTrue(resolutionScales.length == bitrates.length);
274         int numCodecs = resolutionScales.length;
275         ArrayList<EncoderOutputStreamParameters> outputParameters =
276                 new ArrayList<EncoderOutputStreamParameters>(numCodecs);
277         for (int i = 0; i < numCodecs; i++) {
278             EncoderOutputStreamParameters params = new EncoderOutputStreamParameters();
279             if (inputYuvName != null) {
280                 params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName;
281             } else {
282                 params.inputYuvFilename = null;
283             }
284             params.scaledYuvFilename = SDCARD_DIR + File.separator +
285                     outputIvfBaseName + resolutionScales[i]+ ".yuv";
286             params.inputResourceId = R.raw.football_qvga;
287             params.outputIvfFilename = SDCARD_DIR + File.separator +
288                     outputIvfBaseName + resolutionScales[i] + ".ivf";
289             params.forceSwEncoder = false;
290             params.frameCount = encodeSeconds * frameRate;
291             params.frameRate = frameRate;
292             params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280);
293             params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720);
294             params.bitrateSet = new int[1];
295             params.bitrateSet[0] = bitrates[i];
296             params.bitrateType = bitrateMode;
297             params.temporalLayers = 0;
298             params.syncFrameInterval = SYNC_FRAME_INTERVAL;
299             params.syncForceFrameInterval = 0;
300             if (syncEncoding) {
301                 params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US;
302                 params.runInLooperThread = false;
303             } else {
304                 params.timeoutDequeue = 0;
305                 params.runInLooperThread = true;
306             }
307             outputParameters.add(params);
308         }
309         return outputParameters;
310     }
311 
getDefaultEncodingParameters( String inputYuvName, String outputIvfBaseName, int encodeSeconds, int frameWidth, int frameHeight, int frameRate, int bitrateMode, int bitrate, boolean syncEncoding)312     protected EncoderOutputStreamParameters getDefaultEncodingParameters(
313             String inputYuvName,
314             String outputIvfBaseName,
315             int encodeSeconds,
316             int frameWidth,
317             int frameHeight,
318             int frameRate,
319             int bitrateMode,
320             int bitrate,
321             boolean syncEncoding) {
322         int[] scaleValues = { 1 };
323         int[] bitrates = { bitrate };
324         return getDefaultEncodingParameterList(
325                 inputYuvName,
326                 outputIvfBaseName,
327                 encodeSeconds,
328                 scaleValues,
329                 frameWidth,
330                 frameHeight,
331                 frameRate,
332                 bitrateMode,
333                 bitrates,
334                 syncEncoding).get(0);
335     }
336 
337     /**
338      * Converts (interleaves) YUV420 planar to NV12.
339      * Assumes packed, macroblock-aligned frame with no cropping
340      * (visible/coded row length == stride).
341      */
YUV420ToNV(int width, int height, byte[] yuv)342     private static byte[] YUV420ToNV(int width, int height, byte[] yuv) {
343         byte[] nv = new byte[yuv.length];
344         // Y plane we just copy.
345         System.arraycopy(yuv, 0, nv, 0, width * height);
346 
347         // U & V plane we interleave.
348         int u_offset = width * height;
349         int v_offset = u_offset + u_offset / 4;
350         int nv_offset = width * height;
351         for (int i = 0; i < width * height / 4; i++) {
352             nv[nv_offset++] = yuv[u_offset++];
353             nv[nv_offset++] = yuv[v_offset++];
354         }
355         return nv;
356     }
357 
358     /**
359      * Converts (de-interleaves) NV12 to YUV420 planar.
360      * Stride may be greater than width, slice height may be greater than height.
361      */
NV12ToYUV420(int width, int height, int stride, int sliceHeight, byte[] nv12)362     private static byte[] NV12ToYUV420(int width, int height,
363             int stride, int sliceHeight, byte[] nv12) {
364         byte[] yuv = new byte[width * height * 3 / 2];
365 
366         // Y plane we just copy.
367         for (int i = 0; i < height; i++) {
368             System.arraycopy(nv12, i * stride, yuv, i * width, width);
369         }
370 
371         // U & V plane - de-interleave.
372         int u_offset = width * height;
373         int v_offset = u_offset + u_offset / 4;
374         int nv_offset;
375         for (int i = 0; i < height / 2; i++) {
376             nv_offset = stride * (sliceHeight + i);
377             for (int j = 0; j < width / 2; j++) {
378                 yuv[u_offset++] = nv12[nv_offset++];
379                 yuv[v_offset++] = nv12[nv_offset++];
380             }
381         }
382         return yuv;
383     }
384 
385     /**
386      * Packs YUV420 frame by moving it to a smaller size buffer with stride and slice
387      * height equal to the original frame width and height.
388      */
PackYUV420(int width, int height, int stride, int sliceHeight, byte[] src)389     private static byte[] PackYUV420(int width, int height,
390             int stride, int sliceHeight, byte[] src) {
391         byte[] dst = new byte[width * height * 3 / 2];
392         // Y copy.
393         for (int i = 0; i < height; i++) {
394             System.arraycopy(src, i * stride, dst, i * width, width);
395         }
396         // U and V copy.
397         int u_src_offset = stride * sliceHeight;
398         int v_src_offset = u_src_offset + u_src_offset / 4;
399         int u_dst_offset = width * height;
400         int v_dst_offset = u_dst_offset + u_dst_offset / 4;
401         for (int i = 0; i < height / 2; i++) {
402             System.arraycopy(src, u_src_offset + i * (stride / 2),
403                     dst, u_dst_offset + i * (width / 2), width / 2);
404             System.arraycopy(src, v_src_offset + i * (stride / 2),
405                     dst, v_dst_offset + i * (width / 2), width / 2);
406         }
407         return dst;
408     }
409 
410 
imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride, byte[] dst, int dstByteOffset, int dstWidth, int dstHeight)411     private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride,
412             byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) {
413         for (int i = 0; i < dstHeight/2 - 1; i++) {
414             int dstOffset0 = 2 * i * dstWidth + dstByteOffset;
415             int dstOffset1 = dstOffset0 + dstWidth;
416             int srcOffset0 = i * srcStride + srcByteOffset;
417             int srcOffset1 = srcOffset0 + srcStride;
418             int pixel00 = (int)src[srcOffset0++] & 0xff;
419             int pixel10 = (int)src[srcOffset1++] & 0xff;
420             for (int j = 0; j < dstWidth/2 - 1; j++) {
421                 int pixel01 = (int)src[srcOffset0++] & 0xff;
422                 int pixel11 = (int)src[srcOffset1++] & 0xff;
423                 dst[dstOffset0++] = (byte)pixel00;
424                 dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
425                 dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
426                 dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4);
427                 pixel00 = pixel01;
428                 pixel10 = pixel11;
429             }
430             // last column
431             dst[dstOffset0++] = (byte)pixel00;
432             dst[dstOffset0++] = (byte)pixel00;
433             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
434             dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
435         }
436 
437         // last row
438         int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset;
439         int dstOffset1 = dstOffset0 + dstWidth;
440         int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset;
441         int pixel00 = (int)src[srcOffset0++] & 0xff;
442         for (int j = 0; j < dstWidth/2 - 1; j++) {
443             int pixel01 = (int)src[srcOffset0++] & 0xff;
444             dst[dstOffset0++] = (byte)pixel00;
445             dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
446             dst[dstOffset1++] = (byte)pixel00;
447             dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2);
448             pixel00 = pixel01;
449         }
450         // the very last pixel - bottom right
451         dst[dstOffset0++] = (byte)pixel00;
452         dst[dstOffset0++] = (byte)pixel00;
453         dst[dstOffset1++] = (byte)pixel00;
454         dst[dstOffset1++] = (byte)pixel00;
455     }
456 
457     /**
458     * Up-scale image.
459     * Scale factor is defined by source and destination width ratio.
460     * Only 1:2 and 1:4 up-scaling is supported for now.
461     * For 640x480 -> 1280x720 conversion only top 640x360 part of the original
462     * image is scaled.
463     */
imageScale(byte[] src, int srcWidth, int srcHeight, int dstWidth, int dstHeight)464     private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight,
465             int dstWidth, int dstHeight) throws Exception {
466         int srcYSize = srcWidth * srcHeight;
467         int dstYSize = dstWidth * dstHeight;
468         byte[] dst = null;
469         if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) {
470             // 1:2 upscale
471             dst = new byte[dstWidth * dstHeight * 3 / 2];
472             imageUpscale1To2(src, 0, srcWidth,
473                     dst, 0, dstWidth, dstHeight);                                 // Y
474             imageUpscale1To2(src, srcYSize, srcWidth / 2,
475                     dst, dstYSize, dstWidth / 2, dstHeight / 2);                  // U
476             imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2,
477                     dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2);          // V
478         } else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) {
479             // 1:4 upscale - in two steps
480             int midWidth = 2 * srcWidth;
481             int midHeight = 2 * srcHeight;
482             byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight);
483             dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight);
484 
485         } else {
486             throw new RuntimeException("Can not find proper scaling function");
487         }
488 
489         return dst;
490     }
491 
cacheScaledImage( String srcYuvFilename, int srcResourceId, int srcFrameWidth, int srcFrameHeight, String dstYuvFilename, int dstFrameWidth, int dstFrameHeight)492     private void cacheScaledImage(
493             String srcYuvFilename, int srcResourceId, int srcFrameWidth, int srcFrameHeight,
494             String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception {
495         InputStream srcStream = OpenFileOrResourceId(srcYuvFilename, srcResourceId);
496         FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false);
497         int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2;
498         byte[] srcFrame = new byte[srcFrameSize];
499         byte[] dstFrame = null;
500         Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename);
501         while (true) {
502             int bytesRead = srcStream.read(srcFrame);
503             if (bytesRead != srcFrame.length) {
504                 break;
505             }
506             if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) {
507                 dstFrame = srcFrame;
508             } else {
509                 dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight,
510                         dstFrameWidth, dstFrameHeight);
511             }
512             dstFile.write(dstFrame);
513         }
514         srcStream.close();
515         dstFile.close();
516     }
517 
518 
519     /**
520      * A basic check if an encoded stream is decodable.
521      *
522      * The most basic confirmation we can get about a frame
523      * being properly encoded is trying to decode it.
524      * (Especially in realtime mode encode output is non-
525      * deterministic, therefore a more thorough check like
526      * md5 sum comparison wouldn't work.)
527      *
528      * Indeed, MediaCodec will raise an IllegalStateException
529      * whenever vp8 decoder fails to decode a frame, and
530      * this test uses that fact to verify the bitstream.
531      *
532      * @param inputIvfFilename  The name of the IVF file containing encoded bitsream.
533      * @param outputYuvFilename The name of the output YUV file (optional).
534      * @param frameRate         Frame rate of input file in frames per second
535      * @param forceSwDecoder    Force to use Googlw sw VP8 decoder.
536      */
decode( String inputIvfFilename, String outputYuvFilename, int frameRate, boolean forceSwDecoder)537     protected ArrayList<MediaCodec.BufferInfo> decode(
538             String inputIvfFilename,
539             String outputYuvFilename,
540             int frameRate,
541             boolean forceSwDecoder) throws Exception {
542         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
543         CodecProperties properties = getVp8CodecProperties(false, forceSwDecoder);
544         // Open input/output.
545         IvfReader ivf = new IvfReader(inputIvfFilename);
546         int frameWidth = ivf.getWidth();
547         int frameHeight = ivf.getHeight();
548         int frameCount = ivf.getFrameCount();
549         int frameStride = frameWidth;
550         int frameSliceHeight = frameHeight;
551         int frameColorFormat = properties.colorFormat;
552         assertTrue(frameWidth > 0);
553         assertTrue(frameHeight > 0);
554         assertTrue(frameCount > 0);
555 
556         FileOutputStream yuv = null;
557         if (outputYuvFilename != null) {
558             yuv = new FileOutputStream(outputYuvFilename, false);
559         }
560 
561         // Create decoder.
562         MediaFormat format = MediaFormat.createVideoFormat(VP8_MIME,
563                                                            ivf.getWidth(),
564                                                            ivf.getHeight());
565         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
566         Log.d(TAG, "Creating decoder " + properties.codecName +
567                 ". Color format: 0x" + Integer.toHexString(frameColorFormat) +
568                 ". " + frameWidth + " x " + frameHeight);
569         Log.d(TAG, "  Format: " + format);
570         Log.d(TAG, "  In: " + inputIvfFilename + ". Out:" + outputYuvFilename);
571         MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName);
572         decoder.configure(format,
573                           null,  // surface
574                           null,  // crypto
575                           0);    // flags
576         decoder.start();
577 
578         ByteBuffer[] inputBuffers = decoder.getInputBuffers();
579         ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
580         MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
581 
582         // decode loop
583         int inputFrameIndex = 0;
584         int outputFrameIndex = 0;
585         long inPresentationTimeUs = 0;
586         long outPresentationTimeUs = 0;
587         boolean sawOutputEOS = false;
588         boolean sawInputEOS = false;
589 
590         while (!sawOutputEOS) {
591             if (!sawInputEOS) {
592                 int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US);
593                 if (inputBufIndex >= 0) {
594                     byte[] frame = ivf.readFrame(inputFrameIndex);
595 
596                     if (inputFrameIndex == frameCount - 1) {
597                         Log.d(TAG, "  Input EOS for frame # " + inputFrameIndex);
598                         sawInputEOS = true;
599                     }
600 
601                     inputBuffers[inputBufIndex].clear();
602                     inputBuffers[inputBufIndex].put(frame);
603                     inputBuffers[inputBufIndex].rewind();
604                     inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate;
605 
606                     decoder.queueInputBuffer(
607                             inputBufIndex,
608                             0,  // offset
609                             frame.length,
610                             inPresentationTimeUs,
611                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
612 
613                     inputFrameIndex++;
614                 }
615             }
616 
617             int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
618             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
619                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
620                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
621                     outputBuffers = decoder.getOutputBuffers();
622                 } else  if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
623                     // Process format change
624                     format = decoder.getOutputFormat();
625                     frameWidth = format.getInteger(MediaFormat.KEY_WIDTH);
626                     frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
627                     frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
628                     Log.d(TAG, "Decoder output format change. Color: 0x" +
629                             Integer.toHexString(frameColorFormat));
630                     Log.d(TAG, "Format: " + format.toString());
631 
632                     // Parse frame and slice height from undocumented values
633                     if (format.containsKey("stride")) {
634                         frameStride = format.getInteger("stride");
635                     } else {
636                         frameStride = frameWidth;
637                     }
638                     if (format.containsKey("slice-height")) {
639                         frameSliceHeight = format.getInteger("slice-height");
640                     } else {
641                         frameSliceHeight = frameHeight;
642                     }
643                     Log.d(TAG, "Frame stride and slice height: " + frameStride +
644                             " x " + frameSliceHeight);
645                     frameStride = Math.max(frameWidth, frameStride);
646                     frameSliceHeight = Math.max(frameHeight, frameSliceHeight);
647                 }
648                 result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
649             }
650             if (result >= 0) {
651                 int outputBufIndex = result;
652                 outPresentationTimeUs = bufferInfo.presentationTimeUs;
653                 Log.v(TAG, "Writing buffer # " + outputFrameIndex +
654                         ". Size: " + bufferInfo.size +
655                         ". InTime: " + (inPresentationTimeUs + 500)/1000 +
656                         ". OutTime: " + (outPresentationTimeUs + 500)/1000);
657                 if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
658                     sawOutputEOS = true;
659                     Log.d(TAG, "   Output EOS for frame # " + outputFrameIndex);
660                 }
661 
662                 if (bufferInfo.size > 0) {
663                     // Save decoder output to yuv file.
664                     if (yuv != null) {
665                         byte[] frame = new byte[bufferInfo.size];
666                         outputBuffers[outputBufIndex].position(bufferInfo.offset);
667                         outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size);
668                         // Convert NV12 to YUV420 if necessary.
669                         if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
670                             frame = NV12ToYUV420(frameWidth, frameHeight,
671                                     frameStride, frameSliceHeight, frame);
672                         }
673                         int writeLength = Math.min(frameWidth * frameHeight * 3 / 2, frame.length);
674                         // Pack frame if necessary.
675                         if (writeLength < frame.length &&
676                                 (frameStride > frameWidth || frameSliceHeight > frameHeight)) {
677                             frame = PackYUV420(frameWidth, frameHeight,
678                                     frameStride, frameSliceHeight, frame);
679                         }
680                         yuv.write(frame, 0, writeLength);
681                     }
682                     outputFrameIndex++;
683 
684                     // Update statistics - store presentation time delay in offset
685                     long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs;
686                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
687                     bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size,
688                             outPresentationTimeUs, bufferInfo.flags);
689                     bufferInfos.add(bufferInfoCopy);
690                 }
691                 decoder.releaseOutputBuffer(outputBufIndex, false);
692             }
693         }
694         decoder.stop();
695         decoder.release();
696         ivf.close();
697         if (yuv != null) {
698             yuv.close();
699         }
700 
701         return bufferInfos;
702     }
703 
704 
705     /**
706      * Helper function to return InputStream from either filename (if set)
707      * or resource id (if filename is not set).
708      */
OpenFileOrResourceId(String filename, int resourceId)709     private InputStream OpenFileOrResourceId(String filename, int resourceId) throws Exception {
710         if (filename != null) {
711             return new FileInputStream(filename);
712         }
713         return mResources.openRawResource(resourceId);
714     }
715 
716     /**
717      * Results of frame encoding.
718      */
719     protected class MediaEncoderOutput {
720         public long inPresentationTimeUs;
721         public long outPresentationTimeUs;
722         public boolean outputGenerated;
723         public int flags;
724         public byte[] buffer;
725     }
726 
727     protected class MediaEncoderAsyncHelper {
728         private final EncoderOutputStreamParameters mStreamParams;
729         private final CodecProperties mProperties;
730         private final ArrayList<MediaCodec.BufferInfo> mBufferInfos;
731         private final IvfWriter mIvf;
732         private final byte[] mSrcFrame;
733 
734         private InputStream mYuvStream;
735         private int mInputFrameIndex;
736 
MediaEncoderAsyncHelper( EncoderOutputStreamParameters streamParams, CodecProperties properties, ArrayList<MediaCodec.BufferInfo> bufferInfos, IvfWriter ivf)737         MediaEncoderAsyncHelper(
738                 EncoderOutputStreamParameters streamParams,
739                 CodecProperties properties,
740                 ArrayList<MediaCodec.BufferInfo> bufferInfos,
741                 IvfWriter ivf)
742                 throws Exception {
743             mStreamParams = streamParams;
744             mProperties = properties;
745             mBufferInfos = bufferInfos;
746             mIvf = ivf;
747 
748             int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
749             mSrcFrame = new byte[srcFrameSize];
750 
751             mYuvStream = OpenFileOrResourceId(
752                     streamParams.inputYuvFilename, streamParams.inputResourceId);
753         }
754 
getInputFrame()755         public byte[] getInputFrame() {
756             // Check EOS
757             if (mStreamParams.frameCount == 0
758                     || (mStreamParams.frameCount > 0
759                             && mInputFrameIndex >= mStreamParams.frameCount)) {
760                 Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex);
761                 return null;
762             }
763 
764             try {
765                 int bytesRead = mYuvStream.read(mSrcFrame);
766 
767                 if (bytesRead == -1) {
768                     // rewind to beginning of file
769                     mYuvStream.close();
770                     mYuvStream = OpenFileOrResourceId(
771                             mStreamParams.inputYuvFilename, mStreamParams.inputResourceId);
772                     bytesRead = mYuvStream.read(mSrcFrame);
773                 }
774             } catch (Exception e) {
775                 Log.e(TAG, "Failed to read YUV file.");
776                 return null;
777             }
778             mInputFrameIndex++;
779 
780             // Convert YUV420 to NV12 if necessary
781             if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
782                 return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight,
783                         mSrcFrame);
784             } else {
785                 return mSrcFrame;
786             }
787         }
788 
saveOutputFrame(MediaEncoderOutput out)789         public boolean saveOutputFrame(MediaEncoderOutput out) {
790             if (out.outputGenerated) {
791                 if (out.buffer.length > 0) {
792                     // Save frame
793                     try {
794                         mIvf.writeFrame(out.buffer, out.outPresentationTimeUs);
795                     } catch (Exception e) {
796                         Log.d(TAG, "Failed to write frame");
797                         return true;
798                     }
799 
800                     // Update statistics - store presentation time delay in offset
801                     long presentationTimeUsDelta = out.inPresentationTimeUs -
802                             out.outPresentationTimeUs;
803                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
804                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
805                             out.outPresentationTimeUs, out.flags);
806                     mBufferInfos.add(bufferInfoCopy);
807                 }
808                 // Detect output EOS
809                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
810                     Log.d(TAG, "----Output EOS ");
811                     return true;
812                 }
813             }
814             return false;
815         }
816     }
817 
818     /**
819      * Video encoder wrapper class.
820      * Allows to run the encoder either in a callee's thread or in a looper thread
821      * using buffer dequeue ready notification callbacks.
822      *
823      * Function feedInput() is used to send raw video frame to the encoder input. When encoder
824      * is configured to run in async mode the function will run in a looper thread.
825      * Encoded frame can be retrieved by calling getOutput() function.
826      */
827     protected class MediaEncoderAsync extends Thread {
828         private int mId;
829         private MediaCodec mCodec;
830         private MediaFormat mFormat;
831         private ByteBuffer[] mInputBuffers;
832         private ByteBuffer[] mOutputBuffers;
833         private int mInputFrameIndex;
834         private int mOutputFrameIndex;
835         private int mInputBufIndex;
836         private int mFrameRate;
837         private long mTimeout;
838         private MediaCodec.BufferInfo mBufferInfo;
839         private long mInPresentationTimeUs;
840         private long mOutPresentationTimeUs;
841         private boolean mAsync;
842         // Flag indicating if input frame was consumed by the encoder in feedInput() call.
843         private boolean mConsumedInput;
844         // Result of frame encoding returned by getOutput() call.
845         private MediaEncoderOutput mOutput;
846         // Object used to signal that looper thread has started and Handler instance associated
847         // with looper thread has been allocated.
848         private final Object mThreadEvent = new Object();
849         // Object used to signal that MediaCodec buffer dequeue notification callback
850         // was received.
851         private final Object mCallbackEvent = new Object();
852         private Handler mHandler;
853         private boolean mCallbackReceived;
854         private MediaEncoderAsyncHelper mHelper;
855         private final Object mCompletionEvent = new Object();
856         private boolean mCompleted;
857 
858         private MediaCodec.Callback mCallback = new MediaCodec.Callback() {
859             @Override
860             public void onInputBufferAvailable(MediaCodec codec, int index) {
861                 if (mHelper == null) {
862                     Log.e(TAG, "async helper not available");
863                     return;
864                 }
865 
866                 byte[] encFrame = mHelper.getInputFrame();
867                 boolean inputEOS = (encFrame == null);
868 
869                 int encFrameLength = 0;
870                 int flags = 0;
871                 if (inputEOS) {
872                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
873                 } else {
874                     encFrameLength = encFrame.length;
875 
876                     ByteBuffer byteBuffer = mCodec.getInputBuffer(index);
877                     byteBuffer.put(encFrame);
878                     byteBuffer.rewind();
879 
880                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
881 
882                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
883                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
884 
885                     mInputFrameIndex++;
886                 }
887 
888                 mCodec.queueInputBuffer(
889                         index,
890                         0,  // offset
891                         encFrameLength,  // size
892                         mInPresentationTimeUs,
893                         flags);
894             }
895 
896             @Override
897             public void onOutputBufferAvailable(MediaCodec codec,
898                     int index, MediaCodec.BufferInfo info) {
899                 if (mHelper == null) {
900                     Log.e(TAG, "async helper not available");
901                     return;
902                 }
903 
904                 MediaEncoderOutput out = new MediaEncoderOutput();
905 
906                 out.buffer = new byte[info.size];
907                 ByteBuffer outputBuffer = mCodec.getOutputBuffer(index);
908                 outputBuffer.get(out.buffer, 0, info.size);
909                 mOutPresentationTimeUs = info.presentationTimeUs;
910 
911                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
912                 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
913                     logStr += " CONFIG. ";
914                 }
915                 if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
916                     logStr += " KEY. ";
917                 }
918                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
919                     logStr += " EOS. ";
920                 }
921                 logStr += " Size: " + info.size;
922                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
923                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
924                 Log.v(TAG, logStr);
925 
926                 if (mOutputFrameIndex == 0 &&
927                         ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) {
928                     throw new RuntimeException("First frame is not a sync frame.");
929                 }
930 
931                 if (info.size > 0) {
932                     mOutputFrameIndex++;
933                     out.inPresentationTimeUs = mInPresentationTimeUs;
934                     out.outPresentationTimeUs = mOutPresentationTimeUs;
935                 }
936                 mCodec.releaseOutputBuffer(index, false);
937 
938                 out.flags = info.flags;
939                 out.outputGenerated = true;
940 
941                 if (mHelper.saveOutputFrame(out)) {
942                     // output EOS
943                     signalCompletion();
944                 }
945             }
946 
947             @Override
948             public void onError(MediaCodec codec, CodecException e) {
949                 Log.e(TAG, "onError: " + e
950                         + ", transient " + e.isTransient()
951                         + ", recoverable " + e.isRecoverable()
952                         + ", error " + e.getErrorCode());
953             }
954 
955             @Override
956             public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
957                 Log.i(TAG, "onOutputFormatChanged: " + format.toString());
958             }
959         };
960 
requestStart()961         private synchronized void requestStart() throws Exception {
962             mHandler = null;
963             start();
964             // Wait for Hander allocation
965             synchronized (mThreadEvent) {
966                 while (mHandler == null) {
967                     mThreadEvent.wait();
968                 }
969             }
970         }
971 
setAsyncHelper(MediaEncoderAsyncHelper helper)972         public void setAsyncHelper(MediaEncoderAsyncHelper helper) {
973             mHelper = helper;
974         }
975 
976         @Override
run()977         public void run() {
978             Looper.prepare();
979             synchronized (mThreadEvent) {
980                 mHandler = new Handler();
981                 mThreadEvent.notify();
982             }
983             Looper.loop();
984         }
985 
runCallable(final Callable<?> callable)986         private void runCallable(final Callable<?> callable) throws Exception {
987             if (mAsync) {
988                 final Exception[] exception = new Exception[1];
989                 final CountDownLatch countDownLatch = new CountDownLatch(1);
990                 mHandler.post( new Runnable() {
991                     @Override
992                     public void run() {
993                         try {
994                             callable.call();
995                         } catch (Exception e) {
996                             exception[0] = e;
997                         } finally {
998                             countDownLatch.countDown();
999                         }
1000                     }
1001                 } );
1002 
1003                 // Wait for task completion
1004                 countDownLatch.await();
1005                 if (exception[0] != null) {
1006                     throw exception[0];
1007                 }
1008             } else {
1009                 callable.call();
1010             }
1011         }
1012 
requestStop()1013         private synchronized void requestStop() throws Exception {
1014             mHandler.post( new Runnable() {
1015                 @Override
1016                 public void run() {
1017                     // This will run on the Looper thread
1018                     Log.v(TAG, "MediaEncoder looper quitting");
1019                     Looper.myLooper().quitSafely();
1020                 }
1021             } );
1022             // Wait for completion
1023             join();
1024             mHandler = null;
1025         }
1026 
createCodecInternal(final String name, final MediaFormat format, final long timeout)1027         private void createCodecInternal(final String name,
1028                 final MediaFormat format, final long timeout) throws Exception {
1029             mBufferInfo = new MediaCodec.BufferInfo();
1030             mFormat = format;
1031             mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
1032             mTimeout = timeout;
1033             mInputFrameIndex = 0;
1034             mOutputFrameIndex = 0;
1035             mInPresentationTimeUs = 0;
1036             mOutPresentationTimeUs = 0;
1037 
1038             mCodec = MediaCodec.createByCodecName(name);
1039             if (mAsync) {
1040                 mCodec.setCallback(mCallback);
1041             }
1042             mCodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
1043             mCodec.start();
1044 
1045             // get the cached input/output only in sync mode
1046             if (!mAsync) {
1047                 mInputBuffers = mCodec.getInputBuffers();
1048                 mOutputBuffers = mCodec.getOutputBuffers();
1049             }
1050         }
1051 
createCodec(int id, final String name, final MediaFormat format, final long timeout, boolean async)1052         public void createCodec(int id, final String name, final MediaFormat format,
1053                 final long timeout, boolean async)  throws Exception {
1054             mId = id;
1055             mAsync = async;
1056             if (mAsync) {
1057                 requestStart(); // start looper thread
1058             }
1059             runCallable( new Callable<Void>() {
1060                 @Override
1061                 public Void call() throws Exception {
1062                     createCodecInternal(name, format, timeout);
1063                     return null;
1064                 }
1065             } );
1066         }
1067 
feedInputInternal(final byte[] encFrame, final boolean inputEOS)1068         private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) {
1069             mConsumedInput = false;
1070             // Feed input
1071             mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout);
1072 
1073             if (mInputBufIndex >= 0) {
1074                 mInputBuffers[mInputBufIndex].clear();
1075                 mInputBuffers[mInputBufIndex].put(encFrame);
1076                 mInputBuffers[mInputBufIndex].rewind();
1077                 int encFrameLength = encFrame.length;
1078                 int flags = 0;
1079                 if (inputEOS) {
1080                     encFrameLength = 0;
1081                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
1082                 }
1083                 if (!inputEOS) {
1084                     Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
1085                             ". InTime: " + (mInPresentationTimeUs + 500)/1000);
1086                     mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
1087                     mInputFrameIndex++;
1088                 }
1089 
1090                 mCodec.queueInputBuffer(
1091                         mInputBufIndex,
1092                         0,  // offset
1093                         encFrameLength,  // size
1094                         mInPresentationTimeUs,
1095                         flags);
1096 
1097                 mConsumedInput = true;
1098             } else {
1099                 Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER");
1100             }
1101             mCallbackReceived = false;
1102         }
1103 
feedInput(final byte[] encFrame, final boolean inputEOS)1104         public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception {
1105             runCallable( new Callable<Void>() {
1106                 @Override
1107                 public Void call() throws Exception {
1108                     feedInputInternal(encFrame, inputEOS);
1109                     return null;
1110                 }
1111             } );
1112             return mConsumedInput;
1113         }
1114 
getOutputInternal()1115         private void getOutputInternal() {
1116             mOutput = new MediaEncoderOutput();
1117             mOutput.inPresentationTimeUs = mInPresentationTimeUs;
1118             mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
1119             mOutput.outputGenerated = false;
1120 
1121             // Get output from the encoder
1122             int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
1123             while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
1124                     result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1125                 if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1126                     mOutputBuffers = mCodec.getOutputBuffers();
1127                 } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1128                     mFormat = mCodec.getOutputFormat();
1129                     Log.d(TAG, "Format changed: " + mFormat.toString());
1130                 }
1131                 result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
1132             }
1133             if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
1134                 Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER");
1135             }
1136 
1137             if (result >= 0) {
1138                 int outputBufIndex = result;
1139                 mOutput.buffer = new byte[mBufferInfo.size];
1140                 mOutputBuffers[outputBufIndex].position(mBufferInfo.offset);
1141                 mOutputBuffers[outputBufIndex].get(mOutput.buffer, 0, mBufferInfo.size);
1142                 mOutPresentationTimeUs = mBufferInfo.presentationTimeUs;
1143 
1144                 String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
1145                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
1146                     logStr += " CONFIG. ";
1147                 }
1148                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1149                     logStr += " KEY. ";
1150                 }
1151                 if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1152                     logStr += " EOS. ";
1153                 }
1154                 logStr += " Size: " + mBufferInfo.size;
1155                 logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
1156                         ". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
1157                 Log.v(TAG, logStr);
1158                 if (mOutputFrameIndex == 0 &&
1159                         ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) == 0) ) {
1160                     throw new RuntimeException("First frame is not a sync frame.");
1161                 }
1162 
1163                 if (mBufferInfo.size > 0) {
1164                     mOutputFrameIndex++;
1165                     mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
1166                 }
1167                 mCodec.releaseOutputBuffer(outputBufIndex, false);
1168 
1169                 mOutput.flags = mBufferInfo.flags;
1170                 mOutput.outputGenerated = true;
1171             }
1172             mCallbackReceived = false;
1173         }
1174 
getOutput()1175         public MediaEncoderOutput getOutput() throws Exception {
1176             runCallable( new Callable<Void>() {
1177                 @Override
1178                 public Void call() throws Exception {
1179                     getOutputInternal();
1180                     return null;
1181                 }
1182             } );
1183             return mOutput;
1184         }
1185 
forceSyncFrame()1186         public void forceSyncFrame() throws Exception {
1187             final Bundle syncFrame = new Bundle();
1188             syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
1189             runCallable( new Callable<Void>() {
1190                 @Override
1191                 public Void call() throws Exception {
1192                     mCodec.setParameters(syncFrame);
1193                     return null;
1194                 }
1195             } );
1196         }
1197 
updateBitrate(int bitrate)1198         public void updateBitrate(int bitrate) throws Exception {
1199             final Bundle bitrateUpdate = new Bundle();
1200             bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate);
1201             runCallable( new Callable<Void>() {
1202                 @Override
1203                 public Void call() throws Exception {
1204                     mCodec.setParameters(bitrateUpdate);
1205                     return null;
1206                 }
1207             } );
1208         }
1209 
1210 
waitForBufferEvent()1211         public void waitForBufferEvent() throws Exception {
1212             Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent");
1213             if (mAsync) {
1214                 synchronized (mCallbackEvent) {
1215                     if (!mCallbackReceived) {
1216                         mCallbackEvent.wait(1000); // wait 1 sec for a callback
1217                         // throw an exception if callback was not received
1218                         if (!mCallbackReceived) {
1219                             throw new RuntimeException("MediaCodec callback was not received");
1220                         }
1221                     }
1222                 }
1223             } else {
1224                 Thread.sleep(5);
1225             }
1226             Log.v(TAG, "----Waiting for bufferEvent done");
1227         }
1228 
1229 
waitForCompletion(long timeoutMs)1230         public void waitForCompletion(long timeoutMs) throws Exception {
1231             synchronized (mCompletionEvent) {
1232                 long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs;
1233 
1234                 while (!mCompleted) {
1235                     mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis());
1236                     if (System.currentTimeMillis() >= timeoutExpiredMs) {
1237                         throw new RuntimeException("encoding has timed out!");
1238                     }
1239                 }
1240             }
1241         }
1242 
signalCompletion()1243         public void signalCompletion() {
1244             synchronized (mCompletionEvent) {
1245                 mCompleted = true;
1246                 mCompletionEvent.notify();
1247             }
1248         }
1249 
deleteCodec()1250         public void deleteCodec() throws Exception {
1251             runCallable( new Callable<Void>() {
1252                 @Override
1253                 public Void call() throws Exception {
1254                     mCodec.stop();
1255                     mCodec.release();
1256                     return null;
1257                 }
1258             } );
1259             if (mAsync) {
1260                 requestStop(); // Stop looper thread
1261             }
1262         }
1263     }
1264 
1265     /**
1266      * Vp8 encoding loop supporting encoding single streams with an option
1267      * to run in a looper thread and use buffer ready notification callbacks.
1268      *
1269      * Output stream is described by encodingParams parameters.
1270      *
1271      * MediaCodec will raise an IllegalStateException
1272      * whenever vp8 encoder fails to encode a frame.
1273      *
1274      * Color format of input file should be YUV420, and frameWidth,
1275      * frameHeight should be supplied correctly as raw input file doesn't
1276      * include any header data.
1277      *
1278      * @param streamParams  Structure with encoder parameters
1279      * @return              Returns array of encoded frames information for each frame.
1280      */
encode( EncoderOutputStreamParameters streamParams)1281     protected ArrayList<MediaCodec.BufferInfo> encode(
1282             EncoderOutputStreamParameters streamParams) throws Exception {
1283 
1284         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
1285         CodecProperties properties = getVp8CodecProperties(true, streamParams.forceSwEncoder);
1286         Log.d(TAG, "Source reslution: " + streamParams.frameWidth + " x " +
1287                 streamParams.frameHeight);
1288         int bitrate = streamParams.bitrateSet[0];
1289 
1290         // Open input/output
1291         InputStream yuvStream = OpenFileOrResourceId(
1292                 streamParams.inputYuvFilename, streamParams.inputResourceId);
1293         IvfWriter ivf = new IvfWriter(
1294                 streamParams.outputIvfFilename, streamParams.frameWidth, streamParams.frameHeight);
1295 
1296         // Create a media format signifying desired output.
1297         MediaFormat format = MediaFormat.createVideoFormat(
1298                 VP8_MIME, streamParams.frameWidth, streamParams.frameHeight);
1299         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1300         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
1301             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1302         }
1303         if (streamParams.temporalLayers > 0) {
1304             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
1305         }
1306         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1307         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
1308         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
1309                 streamParams.frameRate;
1310         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1311 
1312         // Create encoder
1313         Log.d(TAG, "Creating encoder " + properties.codecName +
1314                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1315                 streamParams.frameWidth + " x " + streamParams.frameHeight +
1316                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
1317                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
1318                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
1319                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
1320         Log.d(TAG, "  Format: " + format);
1321         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
1322         MediaEncoderAsync codec = new MediaEncoderAsync();
1323         codec.createCodec(0, properties.codecName, format,
1324                 streamParams.timeoutDequeue, streamParams.runInLooperThread);
1325 
1326         // encode loop
1327         boolean sawInputEOS = false;  // no more data
1328         boolean consumedInputEOS = false; // EOS flag is consumed dy encoder
1329         boolean sawOutputEOS = false;
1330         boolean inputConsumed = true;
1331         int inputFrameIndex = 0;
1332         int lastBitrate = bitrate;
1333         int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
1334         byte[] srcFrame = new byte[srcFrameSize];
1335 
1336         while (!sawOutputEOS) {
1337 
1338             // Read and feed input frame
1339             if (!consumedInputEOS) {
1340 
1341                 // Read new input buffers - if previous input was consumed and no EOS
1342                 if (inputConsumed && !sawInputEOS) {
1343                     int bytesRead = yuvStream.read(srcFrame);
1344 
1345                     // Check EOS
1346                     if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) {
1347                         sawInputEOS = true;
1348                         Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
1349                     }
1350 
1351                     if (!sawInputEOS && bytesRead == -1) {
1352                         if (streamParams.frameCount == 0) {
1353                             sawInputEOS = true;
1354                             Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
1355                         } else {
1356                             yuvStream.close();
1357                             yuvStream = OpenFileOrResourceId(
1358                                     streamParams.inputYuvFilename, streamParams.inputResourceId);
1359                             bytesRead = yuvStream.read(srcFrame);
1360                         }
1361                     }
1362 
1363                     // Force sync frame if syncForceFrameinterval is set.
1364                     if (!sawInputEOS && inputFrameIndex > 0 &&
1365                             streamParams.syncForceFrameInterval > 0 &&
1366                             (inputFrameIndex % streamParams.syncForceFrameInterval) == 0) {
1367                         Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex);
1368                         codec.forceSyncFrame();
1369                     }
1370 
1371                     // Dynamic bitrate change.
1372                     if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) {
1373                         int newBitrate = streamParams.bitrateSet[inputFrameIndex];
1374                         if (newBitrate != lastBitrate) {
1375                             Log.d(TAG, "--- Requesting new bitrate " + newBitrate +
1376                                     " for frame " + inputFrameIndex);
1377                             codec.updateBitrate(newBitrate);
1378                             lastBitrate = newBitrate;
1379                         }
1380                     }
1381 
1382                     // Convert YUV420 to NV12 if necessary
1383                     if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
1384                         srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight,
1385                                 srcFrame);
1386                     }
1387                 }
1388 
1389                 inputConsumed = codec.feedInput(srcFrame, sawInputEOS);
1390                 if (inputConsumed) {
1391                     inputFrameIndex++;
1392                     consumedInputEOS = sawInputEOS;
1393                 }
1394             }
1395 
1396             // Get output from the encoder
1397             MediaEncoderOutput out = codec.getOutput();
1398             if (out.outputGenerated) {
1399                 // Detect output EOS
1400                 if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1401                     Log.d(TAG, "----Output EOS ");
1402                     sawOutputEOS = true;
1403                 }
1404 
1405                 if (out.buffer.length > 0) {
1406                     // Save frame
1407                     ivf.writeFrame(out.buffer, out.outPresentationTimeUs);
1408 
1409                     // Update statistics - store presentation time delay in offset
1410                     long presentationTimeUsDelta = out.inPresentationTimeUs -
1411                             out.outPresentationTimeUs;
1412                     MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
1413                     bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
1414                             out.outPresentationTimeUs, out.flags);
1415                     bufferInfos.add(bufferInfoCopy);
1416                 }
1417             }
1418 
1419             // If codec is not ready to accept input/poutput - wait for buffer ready callback
1420             if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) {
1421                 codec.waitForBufferEvent();
1422             }
1423         }
1424 
1425         codec.deleteCodec();
1426         ivf.close();
1427         yuvStream.close();
1428 
1429         return bufferInfos;
1430     }
1431 
1432     /**
1433      * Vp8 encoding run in a looper thread and use buffer ready callbacks.
1434      *
1435      * Output stream is described by encodingParams parameters.
1436      *
1437      * MediaCodec will raise an IllegalStateException
1438      * whenever vp8 encoder fails to encode a frame.
1439      *
1440      * Color format of input file should be YUV420, and frameWidth,
1441      * frameHeight should be supplied correctly as raw input file doesn't
1442      * include any header data.
1443      *
1444      * @param streamParams  Structure with encoder parameters
1445      * @return              Returns array of encoded frames information for each frame.
1446      */
encodeAsync( EncoderOutputStreamParameters streamParams)1447     protected ArrayList<MediaCodec.BufferInfo> encodeAsync(
1448             EncoderOutputStreamParameters streamParams) throws Exception {
1449         if (!streamParams.runInLooperThread) {
1450             throw new RuntimeException("encodeAsync should run with a looper thread!");
1451         }
1452 
1453         ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
1454         CodecProperties properties = getVp8CodecProperties(true, streamParams.forceSwEncoder);
1455         Log.d(TAG, "Source reslution: " + streamParams.frameWidth + " x " +
1456                 streamParams.frameHeight);
1457         int bitrate = streamParams.bitrateSet[0];
1458 
1459         // Open input/output
1460         IvfWriter ivf = new IvfWriter(
1461                 streamParams.outputIvfFilename, streamParams.frameWidth, streamParams.frameHeight);
1462 
1463         // Create a media format signifying desired output.
1464         MediaFormat format = MediaFormat.createVideoFormat(
1465                 VP8_MIME, streamParams.frameWidth, streamParams.frameHeight);
1466         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1467         if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
1468             format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1469         }
1470         if (streamParams.temporalLayers > 0) {
1471             format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
1472         }
1473         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1474         format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
1475         int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
1476                 streamParams.frameRate;
1477         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1478 
1479         // Create encoder
1480         Log.d(TAG, "Creating encoder " + properties.codecName +
1481                 ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1482                 streamParams.frameWidth + " x " + streamParams.frameHeight +
1483                 ". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
1484                 ". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
1485                 ". Key frame:" + syncFrameInterval * streamParams.frameRate +
1486                 ". Force keyFrame: " + streamParams.syncForceFrameInterval);
1487         Log.d(TAG, "  Format: " + format);
1488         Log.d(TAG, "  Output ivf:" + streamParams.outputIvfFilename);
1489 
1490         MediaEncoderAsync codec = new MediaEncoderAsync();
1491         MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper(
1492                 streamParams, properties, bufferInfos, ivf);
1493 
1494         codec.setAsyncHelper(helper);
1495         codec.createCodec(0, properties.codecName, format,
1496                 streamParams.timeoutDequeue, streamParams.runInLooperThread);
1497         codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS);
1498 
1499         codec.deleteCodec();
1500         ivf.close();
1501 
1502         return bufferInfos;
1503     }
1504 
1505     /**
1506      * Vp8 encoding loop supporting encoding multiple streams at a time.
1507      * Each output stream is described by encodingParams parameters allowing
1508      * simultaneous encoding of various resolutions, bitrates with an option to
1509      * control key frame and dynamic bitrate for each output stream indepandently.
1510      *
1511      * MediaCodec will raise an IllegalStateException
1512      * whenever vp8 encoder fails to encode a frame.
1513      *
1514      * Color format of input file should be YUV420, and frameWidth,
1515      * frameHeight should be supplied correctly as raw input file doesn't
1516      * include any header data.
1517      *
1518      * @param srcFrameWidth     Frame width of input yuv file
1519      * @param srcFrameHeight    Frame height of input yuv file
1520      * @param encodingParams    Encoder parameters
1521      * @return                  Returns 2D array of encoded frames information for each stream and
1522      *                          for each frame.
1523      */
encodeSimulcast( int srcFrameWidth, int srcFrameHeight, ArrayList<EncoderOutputStreamParameters> encodingParams)1524     protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast(
1525             int srcFrameWidth,
1526             int srcFrameHeight,
1527             ArrayList<EncoderOutputStreamParameters> encodingParams)  throws Exception {
1528         int numEncoders = encodingParams.size();
1529 
1530         // Create arrays of input/output, formats, bitrates etc
1531         ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos =
1532                 new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders);
1533         InputStream yuvStream[] = new InputStream[numEncoders];
1534         IvfWriter[] ivf = new IvfWriter[numEncoders];
1535         FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders];
1536         MediaFormat[] format = new MediaFormat[numEncoders];
1537         MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders];
1538         int[] inputFrameIndex = new int[numEncoders];
1539         boolean[] sawInputEOS = new boolean[numEncoders];
1540         boolean[] consumedInputEOS = new boolean[numEncoders];
1541         boolean[] inputConsumed = new boolean[numEncoders];
1542         boolean[] bufferConsumed = new boolean[numEncoders];
1543         boolean[] sawOutputEOS = new boolean[numEncoders];
1544         byte[][] srcFrame = new byte[numEncoders][];
1545         boolean sawOutputEOSTotal = false;
1546         boolean bufferConsumedTotal = false;
1547         CodecProperties[] codecProperties = new CodecProperties[numEncoders];
1548 
1549         for (int i = 0; i < numEncoders; i++) {
1550             EncoderOutputStreamParameters params = encodingParams.get(i);
1551             CodecProperties properties = getVp8CodecProperties(true, params.forceSwEncoder);
1552 
1553             // Check if scaled image was created
1554             int scale = params.frameWidth / srcFrameWidth;
1555             if (!mScaledImages.contains(scale)) {
1556                 // resize image
1557                 cacheScaledImage(params.inputYuvFilename, params.inputResourceId,
1558                         srcFrameWidth, srcFrameHeight,
1559                         params.scaledYuvFilename, params.frameWidth, params.frameHeight);
1560                 mScaledImages.add(scale);
1561             }
1562 
1563             // Create buffer info storage
1564             bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>());
1565 
1566             // Create YUV reader
1567             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
1568 
1569             // Create IVF writer
1570             ivf[i] = new IvfWriter(params.outputIvfFilename, params.frameWidth, params.frameHeight);
1571 
1572             // Frame buffer
1573             int frameSize = params.frameWidth * params.frameHeight * 3 / 2;
1574             srcFrame[i] = new byte[frameSize];
1575 
1576             // Create a media format signifying desired output.
1577             int bitrate = params.bitrateSet[0];
1578             format[i] = MediaFormat.createVideoFormat(VP8_MIME,
1579                     params.frameWidth, params.frameHeight);
1580             format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
1581             if (params.bitrateType == VIDEO_ControlRateConstant) {
1582                 format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
1583             }
1584             if (params.temporalLayers > 0) {
1585                 format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer
1586             }
1587             format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
1588             format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate);
1589             int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) /
1590                     params.frameRate; // in sec
1591             format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
1592             // Create encoder
1593             Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName +
1594                     ". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
1595                     params.frameWidth + " x " + params.frameHeight +
1596                     ". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType +
1597                     ". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers +
1598                     ". Key frame:" + syncFrameInterval * params.frameRate +
1599                     ". Force keyFrame: " + params.syncForceFrameInterval);
1600             Log.d(TAG, "  Format: " + format[i]);
1601             Log.d(TAG, "  Output ivf:" + params.outputIvfFilename);
1602 
1603             // Create encoder
1604             codec[i] = new MediaEncoderAsync();
1605             codec[i].createCodec(i, properties.codecName, format[i],
1606                     params.timeoutDequeue, params.runInLooperThread);
1607             codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat);
1608 
1609             inputConsumed[i] = true;
1610         }
1611 
1612         while (!sawOutputEOSTotal) {
1613             // Feed input buffer to all encoders
1614             for (int i = 0; i < numEncoders; i++) {
1615                 bufferConsumed[i] = false;
1616                 if (consumedInputEOS[i]) {
1617                     continue;
1618                 }
1619 
1620                 EncoderOutputStreamParameters params = encodingParams.get(i);
1621                 // Read new input buffers - if previous input was consumed and no EOS
1622                 if (inputConsumed[i] && !sawInputEOS[i]) {
1623                     int bytesRead = yuvStream[i].read(srcFrame[i]);
1624 
1625                     // Check EOS
1626                     if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) {
1627                         sawInputEOS[i] = true;
1628                         Log.d(TAG, "---Enc" + i +
1629                                 ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
1630                     }
1631 
1632                     if (!sawInputEOS[i] && bytesRead == -1) {
1633                         if (params.frameCount == 0) {
1634                             sawInputEOS[i] = true;
1635                             Log.d(TAG, "---Enc" + i +
1636                                     ". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
1637                         } else {
1638                             yuvStream[i].close();
1639                             yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
1640                             bytesRead = yuvStream[i].read(srcFrame[i]);
1641                         }
1642                     }
1643 
1644                     // Convert YUV420 to NV12 if necessary
1645                     if (codecProperties[i].colorFormat !=
1646                             CodecCapabilities.COLOR_FormatYUV420Planar) {
1647                         srcFrame[i] = YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]);
1648                     }
1649                 }
1650 
1651                 inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]);
1652                 if (inputConsumed[i]) {
1653                     inputFrameIndex[i]++;
1654                     consumedInputEOS[i] = sawInputEOS[i];
1655                     bufferConsumed[i] = true;
1656                 }
1657 
1658             }
1659 
1660             // Get output from all encoders
1661             for (int i = 0; i < numEncoders; i++) {
1662                 if (sawOutputEOS[i]) {
1663                     continue;
1664                 }
1665 
1666                 MediaEncoderOutput out = codec[i].getOutput();
1667                 if (out.outputGenerated) {
1668                     bufferConsumed[i] = true;
1669                     // Detect output EOS
1670                     if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1671                         Log.d(TAG, "----Enc" + i + ". Output EOS ");
1672                         sawOutputEOS[i] = true;
1673                     }
1674 
1675                     if (out.buffer.length > 0) {
1676                         // Save frame
1677                         ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs);
1678 
1679                         // Update statistics - store presentation time delay in offset
1680                         long presentationTimeUsDelta = out.inPresentationTimeUs -
1681                                 out.outPresentationTimeUs;
1682                         MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
1683                         bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
1684                                 out.outPresentationTimeUs, out.flags);
1685                         bufferInfos.get(i).add(bufferInfoCopy);
1686                     }
1687                 }
1688             }
1689 
1690             // If codec is not ready to accept input/output - wait for buffer ready callback
1691             bufferConsumedTotal = false;
1692             for (boolean bufferConsumedCurrent : bufferConsumed) {
1693                 bufferConsumedTotal |= bufferConsumedCurrent;
1694             }
1695             if (!bufferConsumedTotal) {
1696                 // Pick the encoder to wait for
1697                 for (int i = 0; i < numEncoders; i++) {
1698                     if (!bufferConsumed[i] && !sawOutputEOS[i]) {
1699                         codec[i].waitForBufferEvent();
1700                         break;
1701                     }
1702                 }
1703             }
1704 
1705             // Check if EOS happened for all encoders
1706             sawOutputEOSTotal = true;
1707             for (boolean sawOutputEOSStream : sawOutputEOS) {
1708                 sawOutputEOSTotal &= sawOutputEOSStream;
1709             }
1710         }
1711 
1712         for (int i = 0; i < numEncoders; i++) {
1713             codec[i].deleteCodec();
1714             ivf[i].close();
1715             yuvStream[i].close();
1716             if (yuvScaled[i] != null) {
1717                 yuvScaled[i].close();
1718             }
1719         }
1720 
1721         return bufferInfos;
1722     }
1723 
1724     /**
1725      * Some encoding statistics.
1726      */
1727     protected class Vp8EncodingStatistics {
Vp8EncodingStatistics()1728         Vp8EncodingStatistics() {
1729             mBitrates = new ArrayList<Integer>();
1730             mFrames = new ArrayList<Integer>();
1731             mKeyFrames = new ArrayList<Integer>();
1732             mMinimumKeyFrameInterval = Integer.MAX_VALUE;
1733         }
1734 
1735         public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream.
1736         public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream.
1737         public int mAverageBitrate;         // Average stream bitrate.
1738         public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream.
1739         public int mAverageKeyFrameInterval; // Average key frame interval.
1740         public int mMaximumKeyFrameInterval; // Maximum key frame interval.
1741         public int mMinimumKeyFrameInterval; // Minimum key frame interval.
1742     }
1743 
1744     /**
1745      * Calculates average bitrate and key frame interval for the encoded streams.
1746      * Output mBitrates field will contain bitrate values for every second
1747      * of the encoded stream.
1748      * Average stream bitrate will be stored in mAverageBitrate field.
1749      * mKeyFrames array will contain the position of key frames in the encoded stream and
1750      * mKeyFrameInterval - average key frame interval.
1751      */
computeEncodingStatistics(int encoderId, ArrayList<MediaCodec.BufferInfo> bufferInfos )1752     protected Vp8EncodingStatistics computeEncodingStatistics(int encoderId,
1753             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
1754         Vp8EncodingStatistics statistics = new Vp8EncodingStatistics();
1755 
1756         int totalSize = 0;
1757         int frames = 0;
1758         int framesPerSecond = 0;
1759         int totalFrameSizePerSecond = 0;
1760         int maxFrameSize = 0;
1761         int currentSecond;
1762         int nextSecond = 0;
1763         String keyFrameList = "  IFrame List: ";
1764         String bitrateList = "  Bitrate list: ";
1765         String framesList = "  FPS list: ";
1766 
1767 
1768         for (int j = 0; j < bufferInfos.size(); j++) {
1769             MediaCodec.BufferInfo info = bufferInfos.get(j);
1770             currentSecond = (int)(info.presentationTimeUs / 1000000);
1771             boolean lastFrame = (j == bufferInfos.size() - 1);
1772             if (!lastFrame) {
1773                 nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000);
1774             }
1775 
1776             totalSize += info.size;
1777             totalFrameSizePerSecond += info.size;
1778             maxFrameSize = Math.max(maxFrameSize, info.size);
1779             framesPerSecond++;
1780             frames++;
1781 
1782             // Update the bitrate statistics if the next frame will
1783             // be for the next second
1784             if (lastFrame || nextSecond > currentSecond) {
1785                 int currentBitrate = totalFrameSizePerSecond * 8;
1786                 bitrateList += (currentBitrate + " ");
1787                 framesList += (framesPerSecond + " ");
1788                 statistics.mBitrates.add(currentBitrate);
1789                 statistics.mFrames.add(framesPerSecond);
1790                 totalFrameSizePerSecond = 0;
1791                 framesPerSecond = 0;
1792             }
1793 
1794             // Update key frame statistics.
1795             if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
1796                 statistics.mKeyFrames.add(j);
1797                 keyFrameList += (j + "  ");
1798             }
1799         }
1800         int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000);
1801         duration = (duration + 500) / 1000;
1802         statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration);
1803         Log.d(TAG, "Statistics for encoder # " + encoderId);
1804         // Calculate average key frame interval in frames.
1805         int keyFrames = statistics.mKeyFrames.size();
1806         if (keyFrames > 1) {
1807             statistics.mAverageKeyFrameInterval =
1808                     statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0);
1809             statistics.mAverageKeyFrameInterval =
1810                     Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1));
1811             for (int j = 1; j < keyFrames; j++) {
1812                 int keyFrameInterval =
1813                         statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1);
1814                 statistics.mMaximumKeyFrameInterval =
1815                         Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval);
1816                 statistics.mMinimumKeyFrameInterval =
1817                         Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval);
1818             }
1819             Log.d(TAG, "  Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval +
1820                     ". Min: " + statistics.mMinimumKeyFrameInterval +
1821                     ". Avg: " + statistics.mAverageKeyFrameInterval);
1822         }
1823         Log.d(TAG, "  Frames: " + frames + ". Duration: " + duration +
1824                 ". Total size: " + totalSize + ". Key frames: " + keyFrames);
1825         Log.d(TAG, keyFrameList);
1826         Log.d(TAG, bitrateList);
1827         Log.d(TAG, framesList);
1828         Log.d(TAG, "  Bitrate average: " + statistics.mAverageBitrate);
1829         Log.d(TAG, "  Maximum frame size: " + maxFrameSize);
1830 
1831         return statistics;
1832     }
1833 
computeEncodingStatistics( ArrayList<MediaCodec.BufferInfo> bufferInfos )1834     protected Vp8EncodingStatistics computeEncodingStatistics(
1835             ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
1836         return computeEncodingStatistics(0, bufferInfos);
1837     }
1838 
computeSimulcastEncodingStatistics( ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos)1839     protected ArrayList<Vp8EncodingStatistics> computeSimulcastEncodingStatistics(
1840             ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) {
1841         int numCodecs = bufferInfos.size();
1842         ArrayList<Vp8EncodingStatistics> statistics = new ArrayList<Vp8EncodingStatistics>();
1843 
1844         for (int i = 0; i < numCodecs; i++) {
1845             Vp8EncodingStatistics currentStatistics =
1846                     computeEncodingStatistics(i, bufferInfos.get(i));
1847             statistics.add(currentStatistics);
1848         }
1849         return statistics;
1850     }
1851 
1852     /**
1853      * Calculates maximum latency for encoder/decoder based on buffer info array
1854      * generated either by encoder or decoder.
1855      */
maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos)1856     protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) {
1857         int maxValue = 0;
1858         for (MediaCodec.BufferInfo bufferInfo : bufferInfos) {
1859             maxValue = Math.max(maxValue,  bufferInfo.offset);
1860         }
1861         maxValue = (maxValue + 500) / 1000; // mcs -> ms
1862         return maxValue;
1863     }
1864 
1865     /**
1866      * Decoding PSNR statistics.
1867      */
1868     protected class Vp8DecodingStatistics {
Vp8DecodingStatistics()1869         Vp8DecodingStatistics() {
1870             mMinimumPSNR = Integer.MAX_VALUE;
1871         }
1872         public double mAveragePSNR;
1873         public double mMinimumPSNR;
1874     }
1875 
1876     /**
1877      * Calculates PSNR value between two video frames.
1878      */
computePSNR(byte[] data0, byte[] data1)1879     private double computePSNR(byte[] data0, byte[] data1) {
1880         long squareError = 0;
1881         assertTrue(data0.length == data1.length);
1882         int length = data0.length;
1883         for (int i = 0 ; i < length; i++) {
1884             int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff);
1885             squareError += diff * diff;
1886         }
1887         double meanSquareError = (double)squareError / length;
1888         double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError);
1889         return psnr;
1890     }
1891 
1892     /**
1893      * Calculates average and minimum PSNR values between
1894      * set of reference and decoded video frames.
1895      * Runs PSNR calculation for the full duration of the decoded data.
1896      */
computeDecodingStatistics( String referenceYuvFilename, int referenceYuvRawId, String decodedYuvFilename, int width, int height)1897     protected Vp8DecodingStatistics computeDecodingStatistics(
1898             String referenceYuvFilename,
1899             int referenceYuvRawId,
1900             String decodedYuvFilename,
1901             int width,
1902             int height) throws Exception {
1903         Vp8DecodingStatistics statistics = new Vp8DecodingStatistics();
1904         InputStream referenceStream =
1905                 OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId);
1906         InputStream decodedStream = new FileInputStream(decodedYuvFilename);
1907 
1908         int ySize = width * height;
1909         int uvSize = width * height / 4;
1910         byte[] yRef = new byte[ySize];
1911         byte[] yDec = new byte[ySize];
1912         byte[] uvRef = new byte[uvSize];
1913         byte[] uvDec = new byte[uvSize];
1914 
1915         int frames = 0;
1916         double averageYPSNR = 0;
1917         double averageUPSNR = 0;
1918         double averageVPSNR = 0;
1919         double minimumYPSNR = Integer.MAX_VALUE;
1920         double minimumUPSNR = Integer.MAX_VALUE;
1921         double minimumVPSNR = Integer.MAX_VALUE;
1922         int minimumPSNRFrameIndex = 0;
1923 
1924         while (true) {
1925             // Calculate Y PSNR.
1926             int bytesReadRef = referenceStream.read(yRef);
1927             int bytesReadDec = decodedStream.read(yDec);
1928             if (bytesReadDec == -1) {
1929                 break;
1930             }
1931             if (bytesReadRef == -1) {
1932                 // Reference file wrapping up
1933                 referenceStream.close();
1934                 referenceStream =
1935                         OpenFileOrResourceId(referenceYuvFilename, referenceYuvRawId);
1936                 bytesReadRef = referenceStream.read(yRef);
1937             }
1938             double curYPSNR = computePSNR(yRef, yDec);
1939             averageYPSNR += curYPSNR;
1940             minimumYPSNR = Math.min(minimumYPSNR, curYPSNR);
1941             double curMinimumPSNR = curYPSNR;
1942 
1943             // Calculate U PSNR.
1944             bytesReadRef = referenceStream.read(uvRef);
1945             bytesReadDec = decodedStream.read(uvDec);
1946             double curUPSNR = computePSNR(uvRef, uvDec);
1947             averageUPSNR += curUPSNR;
1948             minimumUPSNR = Math.min(minimumUPSNR, curUPSNR);
1949             curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR);
1950 
1951             // Calculate V PSNR.
1952             bytesReadRef = referenceStream.read(uvRef);
1953             bytesReadDec = decodedStream.read(uvDec);
1954             double curVPSNR = computePSNR(uvRef, uvDec);
1955             averageVPSNR += curVPSNR;
1956             minimumVPSNR = Math.min(minimumVPSNR, curVPSNR);
1957             curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR);
1958 
1959             // Frame index for minimum PSNR value - help to detect possible distortions
1960             if (curMinimumPSNR < statistics.mMinimumPSNR) {
1961                 statistics.mMinimumPSNR = curMinimumPSNR;
1962                 minimumPSNRFrameIndex = frames;
1963             }
1964 
1965             String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f",
1966                     frames, curYPSNR, curUPSNR, curVPSNR);
1967             Log.v(TAG, logStr);
1968 
1969             frames++;
1970         }
1971 
1972         averageYPSNR /= frames;
1973         averageUPSNR /= frames;
1974         averageVPSNR /= frames;
1975         statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6;
1976 
1977         Log.d(TAG, "PSNR statistics for " + frames + " frames.");
1978         String logStr = String.format(Locale.US,
1979                 "Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f",
1980                 averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR);
1981         Log.d(TAG, logStr);
1982         logStr = String.format(Locale.US,
1983                 "Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d",
1984                 minimumYPSNR, minimumUPSNR, minimumVPSNR,
1985                 statistics.mMinimumPSNR, minimumPSNRFrameIndex);
1986         Log.d(TAG, logStr);
1987 
1988         referenceStream.close();
1989         decodedStream.close();
1990         return statistics;
1991     }
1992 }
1993 
1994