• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package android.media.decoder.cts;
17 
18 import static org.junit.Assert.assertNotNull;
19 
20 import android.annotation.SuppressLint;
21 import android.annotation.TargetApi;
22 import android.app.Activity;
23 import android.content.Context;
24 import android.content.Intent;
25 import android.content.pm.ActivityInfo;
26 import android.content.res.AssetFileDescriptor;
27 import android.content.res.Configuration;
28 import android.content.res.Resources;
29 import android.graphics.Bitmap;
30 import android.graphics.Bitmap.Config;
31 import android.graphics.BitmapFactory;
32 import android.graphics.Color;
33 import android.graphics.SurfaceTexture;
34 import android.media.MediaCodec;
35 import android.media.MediaCodec.BufferInfo;
36 import android.media.MediaCodecInfo.VideoCapabilities;
37 import android.media.MediaCodecList;
38 import android.media.MediaExtractor;
39 import android.media.MediaFormat;
40 import android.opengl.EGL14;
41 import android.opengl.GLES11Ext;
42 import android.opengl.GLES20;
43 import android.opengl.GLSurfaceView;
44 import android.os.Build;
45 import android.os.Handler;
46 import android.os.HandlerThread;
47 import android.os.Looper;
48 import android.os.ParcelFileDescriptor;
49 import android.os.SystemClock;
50 import android.util.Log;
51 import android.util.Pair;
52 import android.view.PixelCopy;
53 import android.view.PixelCopy.OnPixelCopyFinishedListener;
54 import android.view.Surface;
55 import android.view.SurfaceHolder;
56 import android.view.SurfaceView;
57 import android.view.TextureView;
58 import android.view.View;
59 import android.view.ViewGroup;
60 import android.widget.RelativeLayout;
61 
62 import androidx.test.rule.ActivityTestRule;
63 
64 import com.android.compatibility.common.util.ApiLevelUtil;
65 import com.android.compatibility.common.util.MediaUtils;
66 
67 import org.junit.After;
68 import org.junit.Assume;
69 import org.junit.Before;
70 import org.junit.Rule;
71 
72 import java.io.File;
73 import java.io.FileNotFoundException;
74 import java.io.IOException;
75 import java.nio.ByteBuffer;
76 import java.nio.ByteOrder;
77 import java.nio.FloatBuffer;
78 import java.util.HashMap;
79 import java.util.concurrent.CountDownLatch;
80 import java.util.concurrent.TimeUnit;
81 
82 import javax.microedition.khronos.egl.EGL10;
83 import javax.microedition.khronos.egl.EGLConfig;
84 import javax.microedition.khronos.egl.EGLContext;
85 import javax.microedition.khronos.egl.EGLDisplay;
86 import javax.microedition.khronos.egl.EGLSurface;
87 
88 @TargetApi(16)
89 public class DecodeAccuracyTestBase {
90 
91     protected Context mContext;
92     protected Resources mResources;
93     protected DecodeAccuracyTestActivity mActivity;
94     protected TestHelper testHelper;
95 
96     @Rule
97     public ActivityTestRule<DecodeAccuracyTestActivity> mActivityRule =
98             new ActivityTestRule<>(DecodeAccuracyTestActivity.class);
99 
100     @Before
setUp()101     public void setUp() throws Exception {
102         mActivity = mActivityRule.getActivity();
103         mContext = mActivity.getApplicationContext();
104         mResources = mActivity.getResources();
105         testHelper = new TestHelper(mContext, mActivity);
106     }
107 
108     @After
tearDown()109     public void tearDown() throws Exception {
110         mActivity = null;
111         mResources = null;
112         mContext = null;
113         mActivityRule = null;
114     }
115 
bringActivityToFront()116     protected void bringActivityToFront() {
117         Intent intent = new Intent(mContext, DecodeAccuracyTestActivity.class);
118         intent.addFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
119         mActivity.startActivity(intent);
120     }
121 
getHelper()122     protected TestHelper getHelper() {
123         return testHelper;
124     }
125 
checkNotNull(T reference)126     public static <T> T checkNotNull(T reference) {
127         assertNotNull(reference);
128         return reference;
129     }
130 
checkNotNull(String msg, T reference)131     public static <T> T checkNotNull(String msg, T reference) {
132         assertNotNull(msg, reference);
133         return reference;
134     }
135 
136     /* Simple Player that decodes a local video file only. */
137     @TargetApi(16)
138     static class SimplePlayer {
139 
140         public static final long MIN_MS_PER_FRAME = TimeUnit.SECONDS.toMillis(1) / 5; // 5 FPS
141         public static final long STARTUP_ALLOW_MS = TimeUnit.SECONDS.toMillis(1) ;
142         public static final int END_OF_STREAM = -1;
143         public static final int DEQUEUE_SUCCESS = 1;
144         public static final int DEQUEUE_FAIL = 0;
145 
146         private static final String TAG = SimplePlayer.class.getSimpleName();
147         private static final int NO_TRACK_INDEX = -3;
148         private static final long DEQUEUE_TIMEOUT_US = 20;
149 
150         private final Context context;
151         private final MediaExtractor extractor;
152         private final String codecName;
153         private MediaCodec decoder;
154         private byte[] outputBytes;
155         private boolean renderToSurface;
156         private MediaCodecList mediaCodecList;
157         private Surface surface;
158 
SimplePlayer(Context context)159         public SimplePlayer(Context context) {
160             this(context, null);
161         }
162 
SimplePlayer(Context context, String codecName)163         public SimplePlayer(Context context, String codecName) {
164             this.context = checkNotNull(context);
165             this.codecName = codecName;
166             this.extractor = new MediaExtractor();
167             this.renderToSurface = false;
168             this.surface = null;
169         }
170 
171         /**
172          * The function play the corresponding file for certain number of frames.
173          *
174          * @param surface is the surface view of decoder output.
175          * @param videoFormat is the format of the video to extract and decode.
176          * @param numOfTotalFrames is the number of Frame wish to play.
177          * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1.
178          * @return {@link PlayerResult} that consists the result.
179          */
decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap, boolean releasePlayer)180         public PlayerResult decodeVideoFrames(
181                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames, long msPerFrameCap,
182                 boolean releasePlayer) {
183             this.surface = surface;
184             PlayerResult playerResult;
185             if (prepareVideoDecode(videoFormat)) {
186                 if (startDecoder()) {
187                     final long timeout =
188                             Math.max(MIN_MS_PER_FRAME, msPerFrameCap) * numOfTotalFrames + STARTUP_ALLOW_MS;
189                     playerResult = decodeFramesAndPlay(numOfTotalFrames, timeout, msPerFrameCap);
190                 } else {
191                     playerResult = PlayerResult.failToStart();
192                 }
193             } else {
194                 playerResult = new PlayerResult();
195             }
196             if (releasePlayer) {
197                 release();
198             }
199             return new PlayerResult(playerResult);
200         }
201 
decodeVideoFrames( Surface surface, VideoFormat videoFormat, int numOfTotalFrames)202         public PlayerResult decodeVideoFrames(
203                 Surface surface, VideoFormat videoFormat, int numOfTotalFrames) {
204             return decodeVideoFrames(surface, videoFormat, numOfTotalFrames, 0, false);
205         }
206 
207         /**
208          * The function sets up the extractor and video decoder with proper format.
209          * This must be called before doing starting up the decoder.
210          */
prepareVideoDecode(VideoFormat videoFormat)211         private boolean prepareVideoDecode(VideoFormat videoFormat) {
212             MediaFormat mediaFormat = prepareExtractor(videoFormat);
213             if (mediaFormat == null) {
214                 return false;
215             }
216             configureVideoFormat(mediaFormat, videoFormat);
217             Assume.assumeTrue("Decoder " + codecName + " doesn't support format " + mediaFormat,
218                     MediaUtils.supports(codecName, mediaFormat));
219             setRenderToSurface(surface != null);
220             return createDecoder(mediaFormat) && configureDecoder(surface, mediaFormat);
221         }
222 
223         /**
224          * Sets up the extractor and gets the {@link MediaFormat} of the track.
225          */
prepareExtractor(VideoFormat videoFormat)226         private MediaFormat prepareExtractor(VideoFormat videoFormat) {
227             if (!setExtractorDataSource(videoFormat)) {
228                 return null;
229             }
230             final int trackNum = getFirstTrackIndexByType(videoFormat.getMediaFormat());
231             if (trackNum == NO_TRACK_INDEX) {
232                 return null;
233             }
234             extractor.selectTrack(trackNum);
235             return extractor.getTrackFormat(trackNum);
236         }
237 
238         /**
239          * The function decode video frames and display in a surface.
240          *
241          * @param numOfTotalFrames is the number of frames to be decoded.
242          * @param timeOutMs is the time limit for decoding the frames.
243          * @param msPerFrameCap is the maximum msec per frame. No cap is set if value is less than 1.
244          * @return {@link PlayerResult} that consists the result.
245          */
decodeFramesAndPlay( int numOfTotalFrames, long timeOutMs, long msPerFrameCap)246         private PlayerResult decodeFramesAndPlay(
247                 int numOfTotalFrames, long timeOutMs, long msPerFrameCap) {
248             int numOfDecodedFrames = 0;
249             long firstOutputTimeMs = 0;
250             long lastFrameAt = 0;
251             final long loopStart = SystemClock.elapsedRealtime();
252 
253             while (numOfDecodedFrames < numOfTotalFrames
254                     && (SystemClock.elapsedRealtime() - loopStart < timeOutMs)) {
255                 try {
256                     queueDecoderInputBuffer();
257                 } catch (IllegalStateException exception) {
258                     Log.e(TAG, "IllegalStateException in queueDecoderInputBuffer", exception);
259                     break;
260                 }
261                 try {
262                     final int outputResult = dequeueDecoderOutputBuffer();
263                     if (outputResult == SimplePlayer.END_OF_STREAM) {
264                         break;
265                     }
266                     if (outputResult == SimplePlayer.DEQUEUE_SUCCESS) {
267                         if (firstOutputTimeMs == 0) {
268                             firstOutputTimeMs = SystemClock.elapsedRealtime();
269                         }
270                         if (msPerFrameCap > 0) {
271                             // Slow down if cap is set and not reached.
272                             final long delayMs =
273                                     msPerFrameCap - (SystemClock.elapsedRealtime() - lastFrameAt);
274                             if (lastFrameAt != 0 && delayMs > 0) {
275                                 final long threadDelayMs = 3; // In case of delay in thread.
276                                 if (delayMs > threadDelayMs) {
277                                     try {
278                                         Thread.sleep(delayMs - threadDelayMs);
279                                     } catch (InterruptedException ex) { /* */}
280                                 }
281                                 while (SystemClock.elapsedRealtime() - lastFrameAt
282                                         < msPerFrameCap) { /* */ }
283                             }
284                             lastFrameAt = SystemClock.elapsedRealtime();
285                         }
286                         numOfDecodedFrames++;
287                     }
288                 } catch (IllegalStateException exception) {
289                     Log.e(TAG, "IllegalStateException in dequeueDecoderOutputBuffer", exception);
290                 }
291             }
292             // NB: totalTime measures from "first output" instead of
293             // "first INPUT", so does not include first frame latency
294             // and therefore does not tell us if the timeout expired
295             final long totalTime = SystemClock.elapsedRealtime() - firstOutputTimeMs;
296             return new PlayerResult(true, true, numOfTotalFrames == numOfDecodedFrames, totalTime);
297         }
298 
299         /**
300          * Queues the input buffer with the media file one buffer at a time.
301          *
302          * @return true if success, fail otherwise.
303          */
queueDecoderInputBuffer()304         private boolean queueDecoderInputBuffer() {
305             ByteBuffer inputBuffer;
306             final ByteBuffer[] inputBufferArray = decoder.getInputBuffers();
307             final int inputBufferIndex = decoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US);
308             if (inputBufferIndex >= 0) {
309                 if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) {
310                     inputBuffer = inputBufferArray[inputBufferIndex];
311                 } else {
312                     inputBuffer = decoder.getInputBuffer(inputBufferIndex);
313                 }
314                 final int sampleSize = extractor.readSampleData(inputBuffer, 0);
315                 if (sampleSize > 0) {
316                     decoder.queueInputBuffer(
317                             inputBufferIndex, 0, sampleSize, extractor.getSampleTime(), 0);
318                     extractor.advance();
319                 }
320                 return true;
321             }
322             return false;
323         }
324 
325         /**
326          * Dequeues the output buffer.
327          * For video decoder, renders to surface if provided.
328          * For audio decoder, gets the bytes from the output buffer.
329          *
330          * @return an integer indicating its status (fail, success, or end of stream).
331          */
dequeueDecoderOutputBuffer()332         private int dequeueDecoderOutputBuffer() {
333             final BufferInfo info = new BufferInfo();
334             final int decoderStatus = decoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US);
335             if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
336                 return END_OF_STREAM;
337             }
338             if (decoderStatus >= 0) {
339                 // For JELLY_BEAN_MR2- devices, when rendering to a surface,
340                 // info.size seems to always return 0 even if
341                 // the decoder successfully decoded the frame.
342                 if (info.size <= 0 && ApiLevelUtil.isAtLeast(Build.VERSION_CODES.JELLY_BEAN_MR2)) {
343                     return DEQUEUE_FAIL;
344                 }
345                 if (!renderToSurface) {
346                     ByteBuffer outputBuffer;
347                     if (ApiLevelUtil.isBefore(Build.VERSION_CODES.LOLLIPOP)) {
348                         outputBuffer = decoder.getOutputBuffers()[decoderStatus];
349                     } else {
350                         outputBuffer = decoder.getOutputBuffer(decoderStatus);
351                     }
352                     outputBytes = new byte[info.size];
353                     outputBuffer.get(outputBytes);
354                     outputBuffer.clear();
355                 }
356                 decoder.releaseOutputBuffer(decoderStatus, renderToSurface);
357                 return DEQUEUE_SUCCESS;
358             }
359             return DEQUEUE_FAIL;
360         }
361 
release()362         public void release() {
363             decoderRelease();
364             extractorRelease();
365         }
366 
setExtractorDataSource(VideoFormat videoFormat)367         private boolean setExtractorDataSource(VideoFormat videoFormat) {
368             checkNotNull(videoFormat);
369             try {
370                 final AssetFileDescriptor afd = videoFormat.getAssetFileDescriptor();
371                 extractor.setDataSource(
372                         afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
373                 afd.close();
374             } catch (IOException exception) {
375                 Log.e(TAG, "IOException in setDataSource", exception);
376                 return false;
377             }
378             return true;
379         }
380 
381         /**
382          * Creates a decoder based on conditions.
383          *
384          * <p>If codec name is provided, {@link MediaCodec#createByCodecName(String)} is used.
385          * If codec name is not provided, {@link MediaCodecList#findDecoderForFormat(MediaFormat)}
386          * is preferred on LOLLIPOP and up for finding out the codec name that
387          * supports the media format.
388          * For OS older than LOLLIPOP, {@link MediaCodec#createDecoderByType(String)} is used.
389          */
createDecoder(MediaFormat mediaFormat)390         private boolean createDecoder(MediaFormat mediaFormat) {
391             try {
392                 if (codecName != null) {
393                     decoder = MediaCodec.createByCodecName(codecName);
394                 } else if (ApiLevelUtil.isAtLeast(Build.VERSION_CODES.LOLLIPOP)) {
395                     if (Build.VERSION.SDK_INT == Build.VERSION_CODES.LOLLIPOP) {
396                         // On LOLLIPOP, format must not contain a frame rate.
397                         mediaFormat.setString(MediaFormat.KEY_FRAME_RATE, null);
398                     }
399                     if (mediaCodecList == null) {
400                         mediaCodecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
401                     }
402                     decoder = MediaCodec.createByCodecName(
403                             mediaCodecList.findDecoderForFormat(mediaFormat));
404                 } else {
405                     decoder = MediaCodec.createDecoderByType(
406                             mediaFormat.getString(MediaFormat.KEY_MIME));
407                 }
408             } catch (Exception exception) {
409                 Log.e(TAG, "Exception during decoder creation", exception);
410                 decoderRelease();
411                 return false;
412             }
413             return true;
414         }
415 
configureDecoder(Surface surface, MediaFormat mediaFormat)416         private boolean configureDecoder(Surface surface, MediaFormat mediaFormat) {
417             try {
418                 decoder.configure(mediaFormat, surface, null, 0);
419             } catch (Exception exception) {
420                 Log.e(TAG, "Exception during decoder configuration", exception);
421                 try {
422                     decoder.reset();
423                 } catch (Exception resetException) {
424                     Log.e(TAG, "Exception during decoder reset", resetException);
425                 }
426                 decoderRelease();
427                 return false;
428             }
429             return true;
430         }
431 
setRenderToSurface(boolean render)432         private void setRenderToSurface(boolean render) {
433             this.renderToSurface = render;
434         }
435 
startDecoder()436         private boolean startDecoder() {
437             try {
438                 decoder.start();
439             } catch (Exception exception) {
440                 Log.e(TAG, "Exception during decoder start", exception);
441                 decoder.reset();
442                 decoderRelease();
443                 return false;
444             }
445             return true;
446         }
447 
decoderRelease()448         private void decoderRelease() {
449             if (decoder == null) {
450                 return;
451             }
452             try {
453                 decoder.stop();
454             } catch (IllegalStateException exception) {
455                 decoder.reset();
456                 // IllegalStateException happens when decoder fail to start.
457                 Log.e(TAG, "IllegalStateException during decoder stop", exception);
458             } finally {
459                 try {
460                     decoder.release();
461                 } catch (IllegalStateException exception) {
462                     Log.e(TAG, "IllegalStateException during decoder release", exception);
463                 }
464                 decoder = null;
465             }
466         }
467 
extractorRelease()468         private void extractorRelease() {
469             if (extractor == null) {
470                 return;
471             }
472             try {
473                 extractor.release();
474             } catch (IllegalStateException exception) {
475                 Log.e(TAG, "IllegalStateException during extractor release", exception);
476             }
477         }
478 
configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat)479         private static void configureVideoFormat(MediaFormat mediaFormat, VideoFormat videoFormat) {
480             checkNotNull(mediaFormat);
481             checkNotNull(videoFormat);
482             videoFormat.setMimeType(mediaFormat.getString(MediaFormat.KEY_MIME));
483             videoFormat.setWidth(mediaFormat.getInteger(MediaFormat.KEY_WIDTH));
484             videoFormat.setHeight(mediaFormat.getInteger(MediaFormat.KEY_HEIGHT));
485             mediaFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getWidth());
486             mediaFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getHeight());
487             if (ApiLevelUtil.isBefore(Build.VERSION_CODES.KITKAT)) {
488                 return;
489             }
490             // Set KEY_MAX_WIDTH and KEY_MAX_HEIGHT when isAbrEnabled() is set.
491             if (videoFormat.isAbrEnabled()) {
492                 try {
493                     // Check for max resolution supported by the codec.
494                     final MediaCodec decoder = MediaUtils.getDecoder(mediaFormat);
495                     final VideoCapabilities videoCapabilities = MediaUtils.getVideoCapabilities(
496                             decoder.getName(), videoFormat.getMimeType());
497                     decoder.release();
498                     final int maxWidth = videoCapabilities.getSupportedWidths().getUpper();
499                     final int maxHeight =
500                             videoCapabilities.getSupportedHeightsFor(maxWidth).getUpper();
501                     if (maxWidth >= videoFormat.getWidth() && maxHeight >= videoFormat.getHeight()) {
502                         mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, maxWidth);
503                         mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, maxHeight);
504                         return;
505                     }
506                 } catch (NullPointerException exception) { /* */ }
507                 // Set max width/height to current size if can't get codec's max supported
508                 // width/height or max is not greater than the current size.
509                 mediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, videoFormat.getWidth());
510                 mediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, videoFormat.getHeight());
511             }
512         }
513 
514         /**
515          * The function returns the first track found based on the media type.
516          */
getFirstTrackIndexByType(String format)517         private int getFirstTrackIndexByType(String format) {
518             for (int i = 0; i < extractor.getTrackCount(); i++) {
519                 MediaFormat trackMediaFormat = extractor.getTrackFormat(i);
520                 if (trackMediaFormat.getString(MediaFormat.KEY_MIME).startsWith(format + "/")) {
521                     return i;
522                 }
523             }
524             Log.e(TAG, "couldn't get a " + format + " track");
525             return NO_TRACK_INDEX;
526         }
527 
528         /**
529          * Stores the result from SimplePlayer.
530          */
531         public static final class PlayerResult {
532 
533             public static final int UNSET = -1;
534             private final boolean configureSuccess;
535             private final boolean startSuccess;
536             private final boolean decodeSuccess;
537             private final long totalTime;
538 
PlayerResult( boolean configureSuccess, boolean startSuccess, boolean decodeSuccess, long totalTime)539             public PlayerResult(
540                     boolean configureSuccess, boolean startSuccess,
541                     boolean decodeSuccess, long totalTime) {
542                 this.configureSuccess = configureSuccess;
543                 this.startSuccess = startSuccess;
544                 this.decodeSuccess = decodeSuccess;
545                 this.totalTime = totalTime;
546             }
547 
PlayerResult(PlayerResult playerResult)548             public PlayerResult(PlayerResult playerResult) {
549                 this(playerResult.configureSuccess, playerResult.startSuccess,
550                         playerResult.decodeSuccess, playerResult.totalTime);
551             }
552 
PlayerResult()553             public PlayerResult() {
554                 // Fake PlayerResult.
555                 this(false, false, false, UNSET);
556             }
557 
failToStart()558             public static PlayerResult failToStart() {
559                 return new PlayerResult(true, false, false, UNSET);
560             }
561 
getFailureMessage()562             public String getFailureMessage() {
563                 if (!configureSuccess) {
564                     return "Failed to configure decoder.";
565                 } else if (!startSuccess) {
566                     return "Failed to start decoder.";
567                 } else if (!decodeSuccess) {
568                     return "Failed to decode the expected number of frames.";
569                 } else {
570                     return "Failed to finish decoding.";
571                 }
572             }
573 
isConfigureSuccess()574             public boolean isConfigureSuccess() {
575                 return configureSuccess;
576             }
577 
isSuccess()578             public boolean isSuccess() {
579                 return configureSuccess && startSuccess && decodeSuccess && getTotalTime() != UNSET;
580             }
581 
getTotalTime()582             public long getTotalTime() {
583                 return totalTime;
584             }
585 
586         }
587 
588     }
589 
590     /* Utility class for collecting common test case functionality. */
591     class TestHelper {
592 
593         private final String TAG =  TestHelper.class.getSimpleName();
594 
595         private final Context context;
596         private final Handler handler;
597         private final Activity activity;
598 
TestHelper(Context context, Activity activity)599         public TestHelper(Context context, Activity activity) {
600             this.context = checkNotNull(context);
601             this.handler = new Handler(Looper.getMainLooper());
602             this.activity = activity;
603         }
604 
generateBitmapFromImageResourceId(int resourceId)605         public Bitmap generateBitmapFromImageResourceId(int resourceId) {
606             return BitmapFactory.decodeStream(context.getResources().openRawResource(resourceId));
607         }
608 
getContext()609         public Context getContext() {
610             return context;
611         }
612 
rotateOrientation()613         public void rotateOrientation() {
614             handler.post(new Runnable() {
615                 @Override
616                 public void run() {
617                     final int orientation = context.getResources().getConfiguration().orientation;
618                     if (orientation == Configuration.ORIENTATION_PORTRAIT) {
619                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
620                     } else {
621                         activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
622                     }
623                 }
624             });
625         }
626 
unsetOrientation()627         public void unsetOrientation() {
628             handler.post(new Runnable() {
629                 @Override
630                 public void run() {
631                     activity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
632                 }
633             });
634         }
635 
generateView(View view)636         public void generateView(View view) {
637             RelativeLayout relativeLayout =
638                     (RelativeLayout) activity.findViewById(R.id.attach_view);
639             ViewGenerator viewGenerator = new ViewGenerator(relativeLayout, view);
640             handler.post(viewGenerator);
641         }
642 
cleanUpView(View view)643         public void cleanUpView(View view) {
644             ViewCleaner viewCleaner = new ViewCleaner(view);
645             handler.post(viewCleaner);
646         }
647 
generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot)648         public Bitmap generateBitmapFromVideoViewSnapshot(VideoViewSnapshot snapshot) {
649             handler.post(snapshot);
650             synchronized (snapshot.getSyncObject()) {
651                 try {
652                     snapshot.getSyncObject().wait(snapshot.SNAPSHOT_TIMEOUT_MS + 100);
653                 } catch (InterruptedException e) {
654                     e.printStackTrace();
655                     Log.e(TAG, "Unable to finish generateBitmapFromVideoViewSnapshot().");
656                     return null;
657                 }
658             }
659             if (!snapshot.isBitmapReady()) {
660                 Log.e(TAG, "Time out in generateBitmapFromVideoViewSnapshot().");
661                 return null;
662             }
663             return snapshot.getBitmap();
664         }
665 
666         private class ViewGenerator implements Runnable {
667 
668             private final View view;
669             private final RelativeLayout relativeLayout;
670 
ViewGenerator(RelativeLayout relativeLayout, View view)671             public ViewGenerator(RelativeLayout relativeLayout, View view) {
672                 this.view = checkNotNull(view);
673                 this.relativeLayout = checkNotNull(relativeLayout);
674             }
675 
676             @Override
run()677             public void run() {
678                 if (view.getParent() != null) {
679                     ((ViewGroup) view.getParent()).removeView(view);
680                 }
681                 RelativeLayout.LayoutParams params = new RelativeLayout.LayoutParams(
682                         VideoViewFactory.VIEW_WIDTH, VideoViewFactory.VIEW_HEIGHT);
683                 view.setLayoutParams(params);
684                 relativeLayout.addView(view);
685             }
686 
687         }
688 
689         private class ViewCleaner implements Runnable {
690 
691             private final View view;
692 
ViewCleaner(View view)693             public ViewCleaner(View view) {
694                 this.view = checkNotNull(view);
695             }
696 
697             @Override
run()698             public void run() {
699                 if (view.getParent() != null) {
700                     ((ViewGroup) view.getParent()).removeView(view);
701                 }
702             }
703 
704         }
705 
706     }
707 
708 }
709 
710 /* Factory for manipulating a {@link View}. */
711 abstract class VideoViewFactory {
712 
713     public static final long VIEW_WAITTIME_MS = TimeUnit.SECONDS.toMillis(1);
714     public static final long DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(3);
715     public static final int VIEW_WIDTH = 480;
716     public static final int VIEW_HEIGHT = 360;
717 
VideoViewFactory()718     public VideoViewFactory() {}
719 
release()720     public abstract void release();
721 
getName()722     public abstract String getName();
723 
createView(Context context)724     public abstract View createView(Context context);
725 
waitForViewIsAvailable()726     public void waitForViewIsAvailable() throws Exception {
727         waitForViewIsAvailable(DEFAULT_VIEW_AVAILABLE_TIMEOUT_MS);
728     };
729 
waitForViewIsAvailable(long timeOutMs)730     public abstract void waitForViewIsAvailable(long timeOutMs) throws Exception;
731 
getSurface()732     public abstract Surface getSurface();
733 
getVideoViewSnapshot()734     public abstract VideoViewSnapshot getVideoViewSnapshot();
735 
hasLooper()736     public boolean hasLooper() {
737         return Looper.myLooper() != null;
738     }
739 
740 }
741 
742 /* Factory for building a {@link TextureView}. */
743 @TargetApi(16)
744 class TextureViewFactory extends VideoViewFactory implements TextureView.SurfaceTextureListener {
745 
746     private static final String TAG = TextureViewFactory.class.getSimpleName();
747     private static final String NAME = "TextureView";
748 
749     private final Object syncToken = new Object();
750     private TextureView textureView;
751 
TextureViewFactory()752     public TextureViewFactory() {}
753 
754     @Override
createView(Context context)755     public TextureView createView(Context context) {
756         Log.i(TAG, "Creating a " + NAME);
757         textureView = DecodeAccuracyTestBase.checkNotNull(new TextureView(context));
758         textureView.setSurfaceTextureListener(this);
759         return textureView;
760     }
761 
762     @Override
release()763     public void release() {
764         textureView = null;
765     }
766 
767     @Override
getName()768     public String getName() {
769         return NAME;
770     }
771 
772     @Override
getSurface()773     public Surface getSurface() {
774         return new Surface(textureView.getSurfaceTexture());
775     }
776 
777     @Override
getVideoViewSnapshot()778     public TextureViewSnapshot getVideoViewSnapshot() {
779         return new TextureViewSnapshot(textureView);
780     }
781 
782     @Override
waitForViewIsAvailable(long timeOutMs)783     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
784         final long start = SystemClock.elapsedRealtime();
785         while (SystemClock.elapsedRealtime() - start < timeOutMs && !textureView.isAvailable()) {
786             synchronized (syncToken) {
787                 try {
788                     syncToken.wait(VIEW_WAITTIME_MS);
789                 } catch (InterruptedException e) {
790                     Log.e(TAG, "Exception occurred when attaching a TextureView to a window.", e);
791                     throw new InterruptedException(e.getMessage());
792                 }
793             }
794         }
795         if (!textureView.isAvailable()) {
796             throw new InterruptedException("Taking too long to attach a TextureView to a window.");
797         }
798         Log.i(TAG, NAME + " is available.");
799     }
800 
801     @Override
onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height)802     public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture, int width, int height) {
803         synchronized (syncToken) {
804             syncToken.notify();
805         }
806     }
807 
808     @Override
onSurfaceTextureSizeChanged( SurfaceTexture surfaceTexture, int width, int height)809     public void onSurfaceTextureSizeChanged(
810             SurfaceTexture surfaceTexture, int width, int height) {}
811 
812     @Override
onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture)813     public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
814         return false;
815     }
816 
817     @Override
onSurfaceTextureUpdated(SurfaceTexture surfaceTexture)818     public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {}
819 
820 }
821 
822 /**
823  * Factory for building a {@link SurfaceView}
824  */
825 @TargetApi(24)
826 class SurfaceViewFactory extends VideoViewFactory implements SurfaceHolder.Callback {
827 
828     private static final String TAG = SurfaceViewFactory.class.getSimpleName();
829     private static final String NAME = "SurfaceView";
830     private final Object syncToken = new Object();
831 
832     private SurfaceView surfaceView;
833     private SurfaceHolder surfaceHolder;
834 
SurfaceViewFactory()835     public SurfaceViewFactory() {}
836 
837     @Override
release()838     public void release() {
839         surfaceView = null;
840         surfaceHolder = null;
841     }
842 
843     @Override
getName()844     public String getName() {
845         return NAME;
846     }
847 
848     @Override
createView(Context context)849     public View createView(Context context) {
850         Log.i(TAG, "Creating a " + NAME);
851         if (!super.hasLooper()) {
852             Looper.prepare();
853         }
854         surfaceView = new SurfaceView(context);
855         surfaceHolder = surfaceView.getHolder();
856         surfaceHolder.addCallback(this);
857         return surfaceView;
858     }
859 
860     @Override
waitForViewIsAvailable(long timeOutMs)861     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
862         final long start = SystemClock.elapsedRealtime();
863         while (SystemClock.elapsedRealtime() - start < timeOutMs && !getSurface().isValid()) {
864             synchronized (syncToken) {
865                 try {
866                     syncToken.wait(VIEW_WAITTIME_MS);
867                 } catch (InterruptedException e) {
868                     Log.e(TAG, "Exception occurred when attaching a SurfaceView to a window.", e);
869                     throw new InterruptedException(e.getMessage());
870                 }
871             }
872         }
873         if (!getSurface().isValid()) {
874             throw new InterruptedException("Taking too long to attach a SurfaceView to a window.");
875         }
876         Log.i(TAG, NAME + " is available.");
877     }
878 
879     @Override
getSurface()880     public Surface getSurface() {
881         return surfaceHolder == null ? null : surfaceHolder.getSurface();
882     }
883 
884     @Override
getVideoViewSnapshot()885     public VideoViewSnapshot getVideoViewSnapshot() {
886         return new SurfaceViewSnapshot(surfaceView, VIEW_WIDTH, VIEW_HEIGHT);
887     }
888 
889     @Override
surfaceChanged(SurfaceHolder holder, int format, int width, int height)890     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
891 
892     @Override
surfaceCreated(SurfaceHolder holder)893     public void surfaceCreated(SurfaceHolder holder) {
894         synchronized (syncToken) {
895             syncToken.notify();
896         }
897     }
898 
899     @Override
surfaceDestroyed(SurfaceHolder holder)900     public void surfaceDestroyed(SurfaceHolder holder) {}
901 
902 }
903 
904 /**
905  * Factory for building EGL and GLES that could render to GLSurfaceView.
906  * {@link GLSurfaceView} {@link EGL10} {@link GLES20}.
907  */
908 @TargetApi(16)
909 class GLSurfaceViewFactory extends VideoViewFactory {
910 
911     private static final String TAG = GLSurfaceViewFactory.class.getSimpleName();
912     private static final String NAME = "GLSurfaceView";
913 
914     private final Object surfaceSyncToken = new Object();
915 
916     private GLSurfaceViewThread glSurfaceViewThread;
917     private boolean byteBufferIsReady = false;
918 
GLSurfaceViewFactory()919     public GLSurfaceViewFactory() {}
920 
921     @Override
release()922     public void release() {
923         glSurfaceViewThread.release();
924         glSurfaceViewThread = null;
925     }
926 
927     @Override
getName()928     public String getName() {
929         return NAME;
930     }
931 
932     @Override
createView(Context context)933     public View createView(Context context) {
934         Log.i(TAG, "Creating a " + NAME);
935         // Do all GL rendering in the GL thread.
936         glSurfaceViewThread = new GLSurfaceViewThread();
937         glSurfaceViewThread.start();
938         // No necessary view to display, return null.
939         return null;
940     }
941 
942     @Override
waitForViewIsAvailable(long timeOutMs)943     public void waitForViewIsAvailable(long timeOutMs) throws Exception {
944         final long start = SystemClock.elapsedRealtime();
945         while (SystemClock.elapsedRealtime() - start < timeOutMs
946                 && glSurfaceViewThread.getSurface() == null) {
947             synchronized (surfaceSyncToken) {
948                 try {
949                     surfaceSyncToken.wait(VIEW_WAITTIME_MS);
950                 } catch (InterruptedException e) {
951                     Log.e(TAG, "Exception occurred when waiting for the surface from"
952                             + " GLSurfaceView to become available.", e);
953                     throw new InterruptedException(e.getMessage());
954                 }
955             }
956         }
957         if (glSurfaceViewThread.getSurface() == null) {
958             throw new InterruptedException("Taking too long for the surface from"
959                     + " GLSurfaceView to become available.");
960         }
961         Log.i(TAG, NAME + " is available.");
962     }
963 
964     @Override
getSurface()965     public Surface getSurface() {
966         return glSurfaceViewThread.getSurface();
967     }
968 
969     @Override
getVideoViewSnapshot()970     public VideoViewSnapshot getVideoViewSnapshot() {
971         return new GLSurfaceViewSnapshot(this, VIEW_WIDTH, VIEW_HEIGHT);
972     }
973 
byteBufferIsReady()974     public boolean byteBufferIsReady() {
975         return byteBufferIsReady;
976     }
977 
getByteBuffer()978     public ByteBuffer getByteBuffer() {
979         return glSurfaceViewThread.getByteBuffer();
980     }
981 
982     /* Does all GL operations. */
983     private class GLSurfaceViewThread extends Thread
984             implements SurfaceTexture.OnFrameAvailableListener {
985 
986         private static final int FLOAT_SIZE_BYTES = 4;
987         private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
988         private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
989         private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
990         private final CountDownLatch mDone = new CountDownLatch(1);
991         private FloatBuffer triangleVertices;
992         private float[] textureTransform = new float[16];
993 
994         private float[] triangleVerticesData = {
995             // X, Y, Z, U, V
996             -1f, -1f,  0f,  0f,  1f,
997              1f, -1f,  0f,  1f,  1f,
998             -1f,  1f,  0f,  0f,  0f,
999              1f,  1f,  0f,  1f,  0f,
1000         };
1001         // Make the top-left corner corresponds to texture coordinate
1002         // (0, 0). This complies with the transformation matrix obtained from
1003         // SurfaceTexture.getTransformMatrix.
1004 
1005         private static final String VERTEX_SHADER =
1006                 "attribute vec4 aPosition;\n"
1007                 + "attribute vec4 aTextureCoord;\n"
1008                 + "uniform mat4 uTextureTransform;\n"
1009                 + "varying vec2 vTextureCoord;\n"
1010                 + "void main() {\n"
1011                 + "    gl_Position = aPosition;\n"
1012                 + "    vTextureCoord = (uTextureTransform * aTextureCoord).xy;\n"
1013                 + "}\n";
1014 
1015         private static final String FRAGMENT_SHADER =
1016                 "#extension GL_OES_EGL_image_external : require\n"
1017                 + "precision mediump float;\n"      // highp here doesn't seem to matter
1018                 + "varying vec2 vTextureCoord;\n"
1019                 + "uniform samplerExternalOES sTexture;\n"
1020                 + "void main() {\n"
1021                 + "    gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
1022                 + "}\n";
1023 
1024         private int glProgram;
1025         private int textureID = -1;
1026         private int aPositionHandle;
1027         private int aTextureHandle;
1028         private int uTextureTransformHandle;
1029         private EGLDisplay eglDisplay = null;
1030         private EGLContext eglContext = null;
1031         private EGLSurface eglSurface = null;
1032         private EGL10 egl10;
1033         private Surface surface = null;
1034         private SurfaceTexture surfaceTexture;
1035         private ByteBuffer byteBuffer;
1036         private Looper looper;
1037 
GLSurfaceViewThread()1038         public GLSurfaceViewThread() {}
1039 
1040         @Override
run()1041         public void run() {
1042             Looper.prepare();
1043             looper = Looper.myLooper();
1044             triangleVertices = ByteBuffer
1045                     .allocateDirect(triangleVerticesData.length * FLOAT_SIZE_BYTES)
1046                     .order(ByteOrder.nativeOrder()).asFloatBuffer();
1047             triangleVertices.put(triangleVerticesData).position(0);
1048 
1049             eglSetup();
1050             makeCurrent();
1051             eglSurfaceCreated();
1052 
1053             surfaceTexture = new SurfaceTexture(getTextureId());
1054             surfaceTexture.setOnFrameAvailableListener(this);
1055             surface = new Surface(surfaceTexture);
1056             synchronized (surfaceSyncToken) {
1057                 surfaceSyncToken.notify();
1058             }
1059             // Store pixels from surface
1060             byteBuffer = ByteBuffer.allocateDirect(VIEW_WIDTH * VIEW_HEIGHT * 4);
1061             byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
1062             Looper.loop();
1063             surface.release();
1064             surfaceTexture.release();
1065             byteBufferIsReady = false;
1066             byteBuffer =  null;
1067             egl10.eglMakeCurrent(eglDisplay,
1068                 EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
1069             egl10.eglDestroySurface(eglDisplay, eglSurface);
1070             egl10.eglDestroyContext(eglDisplay, eglContext);
1071             //TODO: uncomment following line after fixing crash in GL driver libGLESv2_adreno.so
1072             //TODO: see b/123755902
1073             //egl10.eglTerminate(eglDisplay);
1074             eglDisplay = EGL10.EGL_NO_DISPLAY;
1075             eglContext = EGL10.EGL_NO_CONTEXT;
1076             eglSurface = EGL10.EGL_NO_SURFACE;
1077             mDone.countDown();
1078         }
1079 
1080         @Override
onFrameAvailable(SurfaceTexture st)1081         public void onFrameAvailable(SurfaceTexture st) {
1082             checkGlError("before updateTexImage");
1083             surfaceTexture.updateTexImage();
1084             st.getTransformMatrix(textureTransform);
1085             drawFrame();
1086             saveFrame();
1087         }
1088 
1089         /* Prepares EGL to use GLES 2.0 context and a surface that supports pbuffer. */
eglSetup()1090         public void eglSetup() {
1091             egl10 = (EGL10) EGLContext.getEGL();
1092             eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
1093             if (eglDisplay == EGL10.EGL_NO_DISPLAY) {
1094                 throw new RuntimeException("unable to get egl10 display");
1095             }
1096             int[] version = new int[2];
1097             if (!egl10.eglInitialize(eglDisplay, version)) {
1098                 eglDisplay = null;
1099                 throw new RuntimeException("unable to initialize egl10");
1100             }
1101             // Configure EGL for pbuffer and OpenGL ES 2.0, 24-bit RGB.
1102             int[] configAttribs = {
1103                 EGL10.EGL_RED_SIZE, 8,
1104                 EGL10.EGL_GREEN_SIZE, 8,
1105                 EGL10.EGL_BLUE_SIZE, 8,
1106                 EGL10.EGL_ALPHA_SIZE, 8,
1107                 EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
1108                 EGL10.EGL_SURFACE_TYPE, EGL10.EGL_PBUFFER_BIT,
1109                 EGL10.EGL_NONE
1110             };
1111             EGLConfig[] configs = new EGLConfig[1];
1112             int[] numConfigs = new int[1];
1113             if (!egl10.eglChooseConfig(
1114                     eglDisplay, configAttribs, configs, configs.length, numConfigs)) {
1115                 throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
1116             }
1117             // Configure EGL context for OpenGL ES 2.0.
1118             int[] contextAttribs = {
1119                 EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
1120                 EGL10.EGL_NONE
1121             };
1122             eglContext = egl10.eglCreateContext(
1123                     eglDisplay, configs[0], EGL10.EGL_NO_CONTEXT, contextAttribs);
1124             checkEglError("eglCreateContext");
1125             if (eglContext == null) {
1126                 throw new RuntimeException("null context");
1127             }
1128             // Create a pbuffer surface.
1129             int[] surfaceAttribs = {
1130                 EGL10.EGL_WIDTH, VIEW_WIDTH,
1131                 EGL10.EGL_HEIGHT, VIEW_HEIGHT,
1132                 EGL10.EGL_NONE
1133             };
1134             eglSurface = egl10.eglCreatePbufferSurface(eglDisplay, configs[0], surfaceAttribs);
1135             checkEglError("eglCreatePbufferSurface");
1136             if (eglSurface == null) {
1137                 throw new RuntimeException("surface was null");
1138             }
1139         }
1140 
release()1141         public void release() {
1142             looper.quit();
1143             try{
1144                 mDone.await();
1145             }
1146             catch(InterruptedException e) {
1147                 Log.e(TAG, "Interrupted waiting in release");
1148             }
1149         }
1150 
1151         /* Makes our EGL context and surface current. */
makeCurrent()1152         public void makeCurrent() {
1153             if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) {
1154                 throw new RuntimeException("eglMakeCurrent failed");
1155             }
1156             checkEglError("eglMakeCurrent");
1157         }
1158 
1159         /* Call this after the EGL Surface is created and made current. */
eglSurfaceCreated()1160         public void eglSurfaceCreated() {
1161             glProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
1162             if (glProgram == 0) {
1163                 throw new RuntimeException("failed creating program");
1164             }
1165             aPositionHandle = GLES20.glGetAttribLocation(glProgram, "aPosition");
1166             checkLocation(aPositionHandle, "aPosition");
1167             aTextureHandle = GLES20.glGetAttribLocation(glProgram, "aTextureCoord");
1168             checkLocation(aTextureHandle, "aTextureCoord");
1169             uTextureTransformHandle = GLES20.glGetUniformLocation(glProgram, "uTextureTransform");
1170             checkLocation(uTextureTransformHandle, "uTextureTransform");
1171 
1172             int[] textures = new int[1];
1173             GLES20.glGenTextures(1, textures, 0);
1174             checkGlError("glGenTextures");
1175             textureID = textures[0];
1176             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
1177             checkGlError("glBindTexture");
1178 
1179             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
1180                     GLES20.GL_LINEAR);
1181             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
1182                     GLES20.GL_LINEAR);
1183             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
1184                     GLES20.GL_CLAMP_TO_EDGE);
1185             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
1186                     GLES20.GL_CLAMP_TO_EDGE);
1187             checkGlError("glTexParameter");
1188         }
1189 
drawFrame()1190         public void drawFrame() {
1191             GLES20.glUseProgram(glProgram);
1192             checkGlError("glUseProgram");
1193             GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
1194             checkGlError("glActiveTexture");
1195             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID);
1196             checkGlError("glBindTexture");
1197 
1198             triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
1199             GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false,
1200                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
1201             checkGlError("glVertexAttribPointer aPositionHandle");
1202             GLES20.glEnableVertexAttribArray(aPositionHandle);
1203             checkGlError("glEnableVertexAttribArray aPositionHandle");
1204 
1205             triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
1206             GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false,
1207                     TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
1208             checkGlError("glVertexAttribPointer aTextureHandle");
1209             GLES20.glEnableVertexAttribArray(aTextureHandle);
1210             checkGlError("glEnableVertexAttribArray aTextureHandle");
1211 
1212             GLES20.glUniformMatrix4fv(uTextureTransformHandle, 1, false, textureTransform, 0);
1213             checkGlError("glUniformMatrix uTextureTransformHandle");
1214 
1215             GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
1216             checkGlError("glDrawArrays");
1217             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
1218         }
1219 
1220         /* Reads the pixels to a ByteBuffer. */
saveFrame()1221         public void saveFrame() {
1222             byteBufferIsReady = false;
1223             byteBuffer.clear();
1224             GLES20.glReadPixels(0, 0, VIEW_WIDTH, VIEW_HEIGHT, GLES20.GL_RGBA,
1225                     GLES20.GL_UNSIGNED_BYTE, byteBuffer);
1226             byteBufferIsReady = true;
1227         }
1228 
getTextureId()1229         public int getTextureId() {
1230             return textureID;
1231         }
1232 
getSurface()1233         public Surface getSurface() {
1234             return surface;
1235         }
1236 
getByteBuffer()1237         public ByteBuffer getByteBuffer() {
1238             return byteBuffer;
1239         }
1240 
loadShader(int shaderType, String source)1241         private int loadShader(int shaderType, String source) {
1242             int shader = GLES20.glCreateShader(shaderType);
1243             checkGlError("glCreateShader type=" + shaderType);
1244             GLES20.glShaderSource(shader, source);
1245             GLES20.glCompileShader(shader);
1246             int[] compiled = new int[1];
1247             GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
1248 
1249             if (compiled[0] == 0) {
1250                 Log.e(TAG, "Could not compile shader " + shaderType + ":");
1251                 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
1252                 GLES20.glDeleteShader(shader);
1253                 shader = 0;
1254             }
1255             return shader;
1256         }
1257 
createProgram(String vertexSource, String fragmentSource)1258         private int createProgram(String vertexSource, String fragmentSource) {
1259             int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
1260             if (vertexShader == 0) {
1261                 return 0;
1262             }
1263             int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
1264             if (pixelShader == 0) {
1265                 return 0;
1266             }
1267             int program = GLES20.glCreateProgram();
1268             if (program == 0) {
1269                 Log.e(TAG, "Could not create program");
1270             }
1271             GLES20.glAttachShader(program, vertexShader);
1272             checkGlError("glAttachShader");
1273             GLES20.glAttachShader(program, pixelShader);
1274             checkGlError("glAttachShader");
1275             GLES20.glLinkProgram(program);
1276             int[] linkStatus = new int[1];
1277             GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
1278 
1279             if (linkStatus[0] != GLES20.GL_TRUE) {
1280                 Log.e(TAG, "Could not link program: ");
1281                 Log.e(TAG, GLES20.glGetProgramInfoLog(program));
1282                 GLES20.glDeleteProgram(program);
1283                 program = 0;
1284             }
1285             return program;
1286         }
1287 
checkEglError(String msg)1288         private void checkEglError(String msg) {
1289             int error;
1290             if ((error = egl10.eglGetError()) != EGL10.EGL_SUCCESS) {
1291                 throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
1292             }
1293         }
1294 
checkGlError(String op)1295         public void checkGlError(String op) {
1296             int error;
1297             if ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
1298                 Log.e(TAG, op + ": glError " + error);
1299                 throw new RuntimeException(op + ": glError " + error);
1300             }
1301         }
1302 
checkLocation(int location, String label)1303         public void checkLocation(int location, String label) {
1304             if (location < 0) {
1305                 throw new RuntimeException("Unable to locate '" + label + "' in program");
1306             }
1307         }
1308     }
1309 
1310 }
1311 
1312 /* Definition of a VideoViewSnapshot and a runnable to get a bitmap from a view. */
1313 abstract class VideoViewSnapshot implements Runnable {
1314 
1315     public static final long SNAPSHOT_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(30);
1316     public static final long SLEEP_TIME_MS = 30;
1317     public static final Object SYNC_TOKEN = new Object();
1318 
getBitmap()1319     public abstract Bitmap getBitmap();
1320 
isBitmapReady()1321     public abstract boolean isBitmapReady();
1322 
getSyncObject()1323     public abstract Object getSyncObject();
1324 
1325 }
1326 
1327 /* Runnable to get a bitmap from a texture view on the UI thread via a handler.
1328  * This class is to be used together with
1329  * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)}
1330  */
1331 class TextureViewSnapshot extends VideoViewSnapshot {
1332 
1333     private final TextureView tv;
1334     private Bitmap bitmap = null;
1335 
TextureViewSnapshot(TextureView tv)1336     public TextureViewSnapshot(TextureView tv) {
1337         this.tv = DecodeAccuracyTestBase.checkNotNull(tv);
1338     }
1339 
1340     @Override
run()1341     public void run() {
1342         bitmap = null;
1343         bitmap = tv.getBitmap();
1344         synchronized (SYNC_TOKEN) {
1345             SYNC_TOKEN.notify();
1346         }
1347     }
1348 
1349     @Override
getBitmap()1350     public Bitmap getBitmap() {
1351         return bitmap;
1352     }
1353 
1354     @Override
isBitmapReady()1355     public boolean isBitmapReady() {
1356         return bitmap != null;
1357     }
1358 
1359     @Override
getSyncObject()1360     public Object getSyncObject() {
1361         return SYNC_TOKEN;
1362     }
1363 
1364 }
1365 
1366 /**
1367  * Method to get bitmap of a {@link SurfaceView}.
1368  * Note that PixelCopy does not have to be called in a runnable.
1369  * This class is to be used together with
1370  * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)}
1371  */
1372 class SurfaceViewSnapshot extends VideoViewSnapshot  {
1373 
1374     private static final String TAG = SurfaceViewSnapshot.class.getSimpleName();
1375     private static final int PIXELCOPY_TIMEOUT_MS = 1000;
1376     private static final int INITIAL_STATE = -1;
1377 
1378     private final SurfaceView surfaceView;
1379     private final int width;
1380     private final int height;
1381 
1382     private Bitmap bitmap;
1383     private int copyResult;
1384 
SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height)1385     public SurfaceViewSnapshot(SurfaceView surfaceView, int width, int height) {
1386         this.surfaceView = surfaceView;
1387         this.width = width;
1388         this.height = height;
1389         this.copyResult = INITIAL_STATE;
1390         this.bitmap = null;
1391     }
1392 
1393     @Override
run()1394     public void run() {
1395         final long start = SystemClock.elapsedRealtime();
1396         copyResult = INITIAL_STATE;
1397         final SynchronousPixelCopy copyHelper = new SynchronousPixelCopy();
1398         bitmap = Bitmap.createBitmap(width, height, Config.ARGB_8888);
1399         try {
1400             // Wait for PixelCopy to finish.
1401             while ((copyResult = copyHelper.request(surfaceView, bitmap)) != PixelCopy.SUCCESS
1402                     && (SystemClock.elapsedRealtime() - start) < SNAPSHOT_TIMEOUT_MS) {
1403                 Thread.sleep(SLEEP_TIME_MS);
1404             }
1405         } catch (InterruptedException e) {
1406             Log.e(TAG, "Pixel Copy is stopped/interrupted before it finishes.", e);
1407             bitmap = null;
1408         } finally {
1409             copyHelper.release();
1410             synchronized (SYNC_TOKEN) {
1411                 SYNC_TOKEN.notify();
1412             }
1413         }
1414     }
1415 
1416     @Override
getBitmap()1417     public Bitmap getBitmap() {
1418         return bitmap;
1419     }
1420 
1421     @Override
isBitmapReady()1422     public boolean isBitmapReady() {
1423         return bitmap != null && copyResult == PixelCopy.SUCCESS;
1424     }
1425 
1426     @Override
getSyncObject()1427     public Object getSyncObject() {
1428         return SYNC_TOKEN;
1429     }
1430 
1431     private static class SynchronousPixelCopy implements OnPixelCopyFinishedListener {
1432 
1433         private final Handler handler;
1434         private final HandlerThread thread;
1435 
1436         private int status = INITIAL_STATE;
1437 
SynchronousPixelCopy()1438         public SynchronousPixelCopy() {
1439             this.thread = new HandlerThread("PixelCopyHelper");
1440             thread.start();
1441             this.handler = new Handler(thread.getLooper());
1442         }
1443 
release()1444         public void release() {
1445             if (thread.isAlive()) {
1446                 thread.quit();
1447             }
1448         }
1449 
request(SurfaceView source, Bitmap dest)1450         public int request(SurfaceView source, Bitmap dest) {
1451             synchronized (this) {
1452                 try {
1453                     PixelCopy.request(source, dest, this, handler);
1454                     return getResultLocked();
1455                 } catch (Exception e) {
1456                     Log.e(TAG, "Exception occurred when copying a SurfaceView.", e);
1457                     return -1;
1458                 }
1459             }
1460         }
1461 
getResultLocked()1462         private int getResultLocked() {
1463             try {
1464                 this.wait(PIXELCOPY_TIMEOUT_MS);
1465             } catch (InterruptedException e) { /* PixelCopy request didn't complete within 1s */ }
1466             return status;
1467         }
1468 
1469         @Override
onPixelCopyFinished(int copyResult)1470         public void onPixelCopyFinished(int copyResult) {
1471             synchronized (this) {
1472                 status = copyResult;
1473                 this.notify();
1474             }
1475         }
1476 
1477     }
1478 
1479 }
1480 
1481 /**
1482  * Runnable to get a bitmap from a GLSurfaceView on the UI thread via a handler.
1483  * Note, because of how the bitmap is captured in GLSurfaceView,
1484  * this method does not have to be a runnable.
1485   * This class is to be used together with
1486  * {@link TestHelper#generateBitmapFromVideoViewSnapshot(VideoViewSnapshot)}
1487  */
1488 class GLSurfaceViewSnapshot extends VideoViewSnapshot {
1489 
1490     private static final String TAG = GLSurfaceViewSnapshot.class.getSimpleName();
1491 
1492     private final GLSurfaceViewFactory glSurfaceViewFactory;
1493     private final int width;
1494     private final int height;
1495 
1496     private Bitmap bitmap = null;
1497     private boolean bitmapIsReady = false;
1498 
GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height)1499     public GLSurfaceViewSnapshot(GLSurfaceViewFactory glSurfaceViewFactory, int width, int height) {
1500         this.glSurfaceViewFactory = DecodeAccuracyTestBase.checkNotNull(glSurfaceViewFactory);
1501         this.width = width;
1502         this.height = height;
1503     }
1504 
1505     @Override
run()1506     public void run() {
1507         bitmapIsReady = false;
1508         bitmap = null;
1509         try {
1510             waitForByteBuffer();
1511         } catch (InterruptedException exception) {
1512             Log.e(TAG, exception.getMessage());
1513             bitmap = null;
1514             notifyObject();
1515             return;
1516         }
1517         try {
1518             final ByteBuffer byteBuffer = glSurfaceViewFactory.getByteBuffer();
1519             bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
1520             byteBuffer.rewind();
1521             bitmap.copyPixelsFromBuffer(byteBuffer);
1522             bitmapIsReady = true;
1523             byteBuffer.clear();
1524         } catch (NullPointerException exception) {
1525             Log.e(TAG, "glSurfaceViewFactory or byteBuffer may have been released", exception);
1526             bitmap = null;
1527         } finally {
1528             notifyObject();
1529         }
1530     }
1531 
1532     @Override
getBitmap()1533     public Bitmap getBitmap() {
1534         return bitmap;
1535     }
1536 
1537     @Override
isBitmapReady()1538     public boolean isBitmapReady() {
1539         return bitmapIsReady;
1540     }
1541 
1542     @Override
getSyncObject()1543     public Object getSyncObject() {
1544         return SYNC_TOKEN;
1545     }
1546 
notifyObject()1547     private void notifyObject() {
1548         synchronized (SYNC_TOKEN) {
1549             SYNC_TOKEN.notify();
1550         }
1551     }
1552 
waitForByteBuffer()1553     private void waitForByteBuffer() throws InterruptedException {
1554         // Wait for byte buffer to be ready.
1555         final long start = SystemClock.elapsedRealtime();
1556         while (SystemClock.elapsedRealtime() - start < SNAPSHOT_TIMEOUT_MS) {
1557             if (glSurfaceViewFactory.byteBufferIsReady()) {
1558                 return;
1559             }
1560             Thread.sleep(SLEEP_TIME_MS);
1561         }
1562         throw new InterruptedException("Taking too long to read pixels into a ByteBuffer.");
1563     }
1564 
1565 }
1566 
1567 /* Stores information of a video file. */
1568 class VideoFormat {
1569 
1570     public static final String STRING_UNSET = "UNSET";
1571     public static final int INT_UNSET = -1;
1572 
1573     private final String filename;
1574 
1575     private String mimeType = STRING_UNSET;
1576     private int width = INT_UNSET;
1577     private int height = INT_UNSET;
1578     private int maxWidth = INT_UNSET;
1579     private int maxHeight = INT_UNSET;
1580     private FilenameParser filenameParser;
1581 
VideoFormat(String filename)1582     public VideoFormat(String filename) {
1583         this.filename = filename;
1584     }
1585 
VideoFormat(VideoFormat videoFormat)1586     public VideoFormat(VideoFormat videoFormat) {
1587         this(videoFormat.filename);
1588     }
1589 
getParsedName()1590     private FilenameParser getParsedName() {
1591         if (filenameParser == null) {
1592             filenameParser = new FilenameParser(filename);
1593         }
1594         return filenameParser;
1595     }
1596 
getMediaFormat()1597     public String getMediaFormat() {
1598         return "video";
1599     }
1600 
setMimeType(String mimeType)1601     public void setMimeType(String mimeType) {
1602         this.mimeType = mimeType;
1603     }
1604 
getMimeType()1605     public String getMimeType() {
1606         if (mimeType.equals(STRING_UNSET)) {
1607             return getParsedName().getMimeType();
1608         }
1609         return mimeType;
1610     }
1611 
setWidth(int width)1612     public void setWidth(int width) {
1613         this.width = width;
1614     }
1615 
setMaxWidth(int maxWidth)1616     public void setMaxWidth(int maxWidth) {
1617         this.maxWidth = maxWidth;
1618     }
1619 
getWidth()1620     public int getWidth() {
1621         if (width == INT_UNSET) {
1622             return getParsedName().getWidth();
1623         }
1624         return width;
1625     }
1626 
getMaxWidth()1627     public int getMaxWidth() {
1628         return maxWidth;
1629     }
1630 
getOriginalWidth()1631     public int getOriginalWidth() {
1632         return getParsedName().getWidth();
1633     }
1634 
setHeight(int height)1635     public void setHeight(int height) {
1636         this.height = height;
1637     }
1638 
setMaxHeight(int maxHeight)1639     public void setMaxHeight(int maxHeight) {
1640         this.maxHeight = maxHeight;
1641     }
1642 
getHeight()1643     public int getHeight() {
1644         if (height == INT_UNSET) {
1645             return getParsedName().getHeight();
1646         }
1647         return height;
1648     }
1649 
getMaxHeight()1650     public int getMaxHeight() {
1651         return maxHeight;
1652     }
1653 
getOriginalHeight()1654     public int getOriginalHeight() {
1655         return getParsedName().getHeight();
1656     }
1657 
isAbrEnabled()1658     public boolean isAbrEnabled() {
1659         return false;
1660     }
1661 
getOriginalSize()1662     public String getOriginalSize() {
1663         if (width == INT_UNSET || height == INT_UNSET) {
1664             return getParsedName().getSize();
1665         }
1666         return width + "x" + height;
1667     }
1668 
getDescription()1669     public String getDescription() {
1670         return getParsedName().getDescription();
1671     }
1672 
toPrettyString()1673     public String toPrettyString() {
1674         return getParsedName().toPrettyString();
1675     }
1676 
getAssetFileDescriptor()1677     public AssetFileDescriptor getAssetFileDescriptor() throws FileNotFoundException {
1678         File inpFile = new File(WorkDir.getMediaDirString() + "assets/decode_accuracy/" + filename);
1679         ParcelFileDescriptor parcelFD =
1680                 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY);
1681         return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize());
1682     }
1683 
1684 }
1685 
1686 /* File parser for filenames with format of {description}-{mimeType}_{size}_{framerate}.{format} */
1687 class FilenameParser {
1688 
1689     static final String VP9 = "vp9";
1690     static final String H264 = "h264";
1691 
1692     private final String filename;
1693 
1694     private String codec = VideoFormat.STRING_UNSET;
1695     private String description = VideoFormat.STRING_UNSET;
1696     private int width = VideoFormat.INT_UNSET;
1697     private int height = VideoFormat.INT_UNSET;
1698 
FilenameParser(String filename)1699     FilenameParser(String filename) {
1700         this.filename = filename;
1701         parseFilename(filename);
1702     }
1703 
getCodec()1704     public String getCodec() {
1705         return codec;
1706     }
1707 
getMimeType()1708     public String getMimeType() {
1709         switch (codec) {
1710             case H264:
1711                 return MimeTypes.VIDEO_H264;
1712             case VP9:
1713                 return MimeTypes.VIDEO_VP9;
1714             default:
1715                 return null;
1716         }
1717     }
1718 
getWidth()1719     public int getWidth() {
1720         return width;
1721     }
1722 
getHeight()1723     public int getHeight() {
1724         return height;
1725     }
1726 
getSize()1727     public String getSize() {
1728         return width + "x" + height;
1729     }
1730 
getDescription()1731     public String getDescription() {
1732         return description;
1733     }
1734 
toPrettyString()1735     String toPrettyString() {
1736         if (codec != null) {
1737             return codec.toUpperCase() + " " + getSize();
1738         }
1739         return filename;
1740     }
1741 
parseFilename(String filename)1742     private void parseFilename(String filename) {
1743         final String descriptionDelimiter = "-";
1744         final String infoDelimiter = "_";
1745         final String sizeDelimiter = "x";
1746         try {
1747             this.description = filename.split(descriptionDelimiter)[0];
1748             final String[] fileInfo = filename.split(descriptionDelimiter)[1].split(infoDelimiter);
1749             this.codec = fileInfo[0];
1750             this.width = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[0]);
1751             this.height = Integer.parseInt(fileInfo[1].split(sizeDelimiter)[1]);
1752         } catch (Exception exception) { /* Filename format does not match. */ }
1753     }
1754 
1755 }
1756 
1757 /**
1758  * Compares bitmaps to determine if they are similar.
1759  *
1760  * <p>To determine greatest pixel difference we transform each pixel into the
1761  * CIE L*a*b* color space. The euclidean distance formula is used to determine pixel differences.
1762  */
1763 class BitmapCompare {
1764     private static final String TAG = "BitmapCompare";
1765 
1766     private static final int RED = 0;
1767     private static final int GREEN = 1;
1768     private static final int BLUE = 2;
1769     private static final int X = 0;
1770     private static final int Y = 1;
1771     private static final int Z = 2;
1772 
BitmapCompare()1773     private BitmapCompare() {}
1774 
1775     /**
1776      * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity.
1777      *
1778      * This simplified variant does not ignore any edge pixels.
1779      *
1780      * @param bitmap1 A bitmap to compare to bitmap2.
1781      * @param bitmap2 A bitmap to compare to bitmap1.
1782      * @return A {@link Difference} with an integer describing the greatest pixel difference,
1783      *     using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional
1784      *     {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was first found.
1785      */
1786     @TargetApi(12)
computeDifference(Bitmap bitmap1, Bitmap bitmap2)1787     private static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2) {
1788         return computeDifference(bitmap1, bitmap2, 0);
1789     }
1790 
1791     /**
1792      * Produces greatest pixel between two bitmaps. Used to determine bitmap similarity.
1793      *
1794      * @param bitmap1 A bitmap to compare to bitmap2.
1795      * @param bitmap2 A bitmap to compare to bitmap1.
1796      * @param ignorePixels number of pixels at each edge where we ignore the scoring. This
1797      *     is used for mainline and older base systems to bypass an edge behavior in the
1798      *     GPU code on those systems.
1799      * @return A {@link Difference} with an integer describing the greatest pixel difference,
1800      *     using {@link Integer#MAX_VALUE} for completely different bitmaps, and an optional
1801      *     {@link Pair<Integer, Integer>} of the (col, row) pixel coordinate where it was
1802      *     first found.
1803      */
1804     @TargetApi(12)
computeDifference(Bitmap bitmap1, Bitmap bitmap2, int ignorePixels)1805     private static Difference computeDifference(Bitmap bitmap1, Bitmap bitmap2, int ignorePixels) {
1806         Log.i(TAG, "ignorePixels = " + ignorePixels);
1807         if (bitmap1 == null || bitmap2 == null) {
1808             return new Difference(Integer.MAX_VALUE);
1809         }
1810         if (bitmap1.equals(bitmap2) || bitmap1.sameAs(bitmap2)) {
1811             return new Difference(0);
1812         }
1813         if (bitmap1.getHeight() != bitmap2.getHeight() || bitmap1.getWidth() != bitmap2.getWidth()) {
1814             return new Difference(Integer.MAX_VALUE);
1815         }
1816         // Convert all pixels to CIE L*a*b* color space so we can do a direct color comparison using
1817         // euclidean distance formula.
1818         final double[][] pixels1 = convertRgbToCieLab(bitmap1);
1819         final double[][] pixels2 = convertRgbToCieLab(bitmap2);
1820         int greatestDifference = -1;    // forces a legal index later...
1821         int greatestDifferenceIndex = -1;
1822         for (int i = 0; i < pixels1.length; i++) {
1823             // pixels within 'ignorePixels' of the edge are to be ignored for
1824             // scoring purposes.
1825             int x = i % bitmap1.getWidth();
1826             int y = i / bitmap1.getWidth();
1827             if (x < ignorePixels || x >= bitmap1.getWidth() - ignorePixels
1828                     || y < ignorePixels || y >= bitmap1.getHeight() - ignorePixels) {
1829                 continue;
1830             }
1831 
1832             final int difference = euclideanDistance(pixels1[i], pixels2[i]);
1833 
1834             if (difference > greatestDifference) {
1835                 greatestDifference = difference;
1836                 greatestDifferenceIndex = i;
1837             }
1838         }
1839 
1840         // huge ignorePixels values can get here without checking any pixels
1841         if (greatestDifferenceIndex == -1) {
1842             greatestDifferenceIndex = 0;
1843             greatestDifference = 0;
1844         }
1845         return new Difference(greatestDifference, Pair.create(
1846             greatestDifferenceIndex % bitmap1.getWidth(),
1847             greatestDifferenceIndex / bitmap1.getWidth()));
1848     }
1849 
1850     @SuppressLint("UseSparseArrays")
convertRgbToCieLab(Bitmap bitmap)1851     private static double[][] convertRgbToCieLab(Bitmap bitmap) {
1852         final HashMap<Integer, double[]> pixelTransformCache = new HashMap<>();
1853         final double[][] result = new double[bitmap.getHeight() * bitmap.getWidth()][3];
1854         final int[] pixels = new int[bitmap.getHeight() * bitmap.getWidth()];
1855         bitmap.getPixels(pixels, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
1856         for (int i = 0; i < pixels.length; i++) {
1857             final double[] transformedColor = pixelTransformCache.get(pixels[i]);
1858             if (transformedColor != null) {
1859                 result[i] = transformedColor;
1860             } else {
1861                 result[i] = convertXyzToCieLab(convertRgbToXyz(pixels[i]));
1862                 pixelTransformCache.put(pixels[i], result[i]);
1863             }
1864         }
1865         return result;
1866     }
1867 
1868     /**
1869      * Conversion from RGB to XYZ based algorithm as defined by:
1870      * http://www.easyrgb.com/index.php?X=MATH&H=02#text2
1871      *
1872      * <p><pre>{@code
1873      *   var_R = ( R / 255 )        //R from 0 to 255
1874      *   var_G = ( G / 255 )        //G from 0 to 255
1875      *   var_B = ( B / 255 )        //B from 0 to 255
1876      *
1877      *   if ( var_R > 0.04045 ) var_R = ( ( var_R + 0.055 ) / 1.055 ) ^ 2.4
1878      *   else                   var_R = var_R / 12.92
1879      *   if ( var_G > 0.04045 ) var_G = ( ( var_G + 0.055 ) / 1.055 ) ^ 2.4
1880      *   else                   var_G = var_G / 12.92
1881      *   if ( var_B > 0.04045 ) var_B = ( ( var_B + 0.055 ) / 1.055 ) ^ 2.4
1882      *   else                   var_B = var_B / 12.92
1883      *
1884      *   var_R = var_R * 100
1885      *   var_G = var_G * 100
1886      *   var_B = var_B * 100
1887      *
1888      *   // Observer. = 2°, Illuminant = D65
1889      *   X = var_R * 0.4124 + var_G * 0.3576 + var_B * 0.1805
1890      *   Y = var_R * 0.2126 + var_G * 0.7152 + var_B * 0.0722
1891      *   Z = var_R * 0.0193 + var_G * 0.1192 + var_B * 0.9505
1892      * }</pre>
1893      *
1894      * @param rgbColor A packed int made up of 4 bytes: alpha, red, green, blue.
1895      * @return An array of doubles where each value is a component of the XYZ color space.
1896      */
convertRgbToXyz(int rgbColor)1897     private static double[] convertRgbToXyz(int rgbColor) {
1898         final double[] comp = {Color.red(rgbColor), Color.green(rgbColor), Color.blue(rgbColor)};
1899         for (int i = 0; i < comp.length; i++) {
1900             comp[i] /= 255.0;
1901             if (comp[i] > 0.04045) {
1902                 comp[i] = Math.pow((comp[i] + 0.055) / 1.055, 2.4);
1903             } else {
1904                 comp[i] /= 12.92;
1905             }
1906             comp[i] *= 100;
1907         }
1908         final double x = (comp[RED] * 0.4124) + (comp[GREEN] * 0.3576) + (comp[BLUE] * 0.1805);
1909         final double y = (comp[RED] * 0.2126) + (comp[GREEN] * 0.7152) + (comp[BLUE] * 0.0722);
1910         final double z = (comp[RED] * 0.0193) + (comp[GREEN] * 0.1192) + (comp[BLUE] * 0.9505);
1911         return new double[] {x, y, z};
1912     }
1913 
1914     /**
1915      * Conversion from XYZ to CIE-L*a*b* based algorithm as defined by:
1916      * http://www.easyrgb.com/index.php?X=MATH&H=07#text7
1917      *
1918      * <p><pre>
1919      * {@code
1920      *   var_X = X / ref_X          //ref_X =  95.047   Observer= 2°, Illuminant= D65
1921      *   var_Y = Y / ref_Y          //ref_Y = 100.000
1922      *   var_Z = Z / ref_Z          //ref_Z = 108.883
1923      *
1924      *   if ( var_X > 0.008856 ) var_X = var_X ^ ( 1/3 )
1925      *   else                    var_X = ( 7.787 * var_X ) + ( 16 / 116 )
1926      *   if ( var_Y > 0.008856 ) var_Y = var_Y ^ ( 1/3 )
1927      *   else                    var_Y = ( 7.787 * var_Y ) + ( 16 / 116 )
1928      *   if ( var_Z > 0.008856 ) var_Z = var_Z ^ ( 1/3 )
1929      *   else                    var_Z = ( 7.787 * var_Z ) + ( 16 / 116 )
1930      *
1931      *   CIE-L* = ( 116 * var_Y ) - 16
1932      *   CIE-a* = 500 * ( var_X - var_Y )
1933      *   CIE-b* = 200 * ( var_Y - var_Z )
1934      * }
1935      * </pre>
1936      *
1937      * @param comp An array of doubles where each value is a component of the XYZ color space.
1938      * @return An array of doubles where each value is a component of the CIE-L*a*b* color space.
1939      */
convertXyzToCieLab(double[] comp)1940     private static double[] convertXyzToCieLab(double[] comp) {
1941         comp[X] /= 95.047;
1942         comp[Y] /= 100.0;
1943         comp[Z] /= 108.883;
1944         for (int i = 0; i < comp.length; i++) {
1945             if (comp[i] > 0.008856) {
1946                 comp[i] = Math.pow(comp[i], (1.0 / 3.0));
1947             } else {
1948                 comp[i] = (7.787 * comp[i]) + (16.0 / 116.0);
1949             }
1950         }
1951         final double l = (116 * comp[Y]) - 16;
1952         final double a = 500 * (comp[X] - comp[Y]);
1953         final double b = 200 * (comp[Y] - comp[Z]);
1954         return new double[] {l, a, b};
1955     }
1956 
euclideanDistance(double[] p1, double[] p2)1957     private static int euclideanDistance(double[] p1, double[] p2) {
1958         if (p1.length != p2.length) {
1959             return Integer.MAX_VALUE;
1960         }
1961         double result = 0;
1962         for (int i = 0; i < p1.length; i++) {
1963             result += Math.pow(p1[i] - p2[i], 2);
1964         }
1965         return (int) Math.round(Math.sqrt(result));
1966     }
1967 
1968     /**
1969      * Crops the border of the array representing an image by hBorderSize
1970      * pixels on the left and right borders, and by vBorderSize pixels on the
1971      * top and bottom borders (so the width is 2 * hBorderSize smaller and
1972      * the height is 2 * vBorderSize smaller), then scales the image up to
1973      * match the original size using bilinear interpolation.
1974      */
shrinkAndScaleBilinear( Bitmap input, double hBorderSize, double vBorderSize)1975     private static Bitmap shrinkAndScaleBilinear(
1976             Bitmap input, double hBorderSize, double vBorderSize) {
1977 
1978         int width = input.getWidth();
1979         int height = input.getHeight();
1980 
1981         // Compute the proper step sizes
1982         double xInc = ((double) width - 1 - hBorderSize * 2) / (double) (width - 1);
1983         double yInc = ((double) height - 1 - vBorderSize * 2) / (double) (height - 1);
1984 
1985         // Read the input bitmap into RGB arrays.
1986         int[] inputPixels = new int[width * height];
1987         input.getPixels(inputPixels, 0, width, 0, 0, width, height);
1988         int[][] inputRgb = new int[width * height][3];
1989         for (int i = 0; i < width * height; ++i) {
1990             inputRgb[i][0] = Color.red(inputPixels[i]);
1991             inputRgb[i][1] = Color.green(inputPixels[i]);
1992             inputRgb[i][2] = Color.blue(inputPixels[i]);
1993         }
1994         inputPixels = null;
1995 
1996         // Prepare the output buffer.
1997         int[] outputPixels = new int[width * height];
1998 
1999         // Start the iteration. The first y coordinate is vBorderSize.
2000         double y = vBorderSize;
2001         for (int yIndex = 0; yIndex < height; ++yIndex) {
2002             // The first x coordinate is hBorderSize.
2003             double x = hBorderSize;
2004             for (int xIndex = 0; xIndex < width; ++xIndex) {
2005                 // Determine the square of interest.
2006                 int left = (int)x;    // This is floor(x).
2007                 int top = (int)y;     // This is floor(y).
2008                 int right = left + 1;
2009                 int bottom = top + 1;
2010 
2011                 // (u, v) is the fractional part of (x, y).
2012                 double u = x - (double)left;
2013                 double v = y - (double)top;
2014 
2015                 // Precompute necessary products to save time.
2016                 double p00 = (1.0 - u) * (1.0 - v);
2017                 double p01 = (1.0 - u) * v;
2018                 double p10 = u * (1.0 - v);
2019                 double p11 = u * v;
2020 
2021                 // Clamp the indices to prevent out-of-bound that may be caused
2022                 // by round-off error.
2023                 if (left >= width) left = width - 1;
2024                 if (top >= height) top = height - 1;
2025                 if (right >= width) right = width - 1;
2026                 if (bottom >= height) bottom = height - 1;
2027 
2028                 // Sample RGB values from the four corners.
2029                 int[] rgb00 = inputRgb[top * width + left];
2030                 int[] rgb01 = inputRgb[bottom * width + left];
2031                 int[] rgb10 = inputRgb[top * width + right];
2032                 int[] rgb11 = inputRgb[bottom * width + right];
2033 
2034                 // Interpolate each component of RGB separately.
2035                 int[] mixedColor = new int[3];
2036                 for (int k = 0; k < 3; ++k) {
2037                     mixedColor[k] = (int)Math.round(
2038                             p00 * (double) rgb00[k] + p01 * (double) rgb01[k]
2039                             + p10 * (double) rgb10[k] + p11 * (double) rgb11[k]);
2040                 }
2041                 // Convert RGB to bitmap Color format and store.
2042                 outputPixels[yIndex * width + xIndex] = Color.rgb(
2043                         mixedColor[0], mixedColor[1], mixedColor[2]);
2044                 x += xInc;
2045             }
2046             y += yInc;
2047         }
2048         // Assemble the output buffer into a Bitmap object.
2049         return Bitmap.createBitmap(outputPixels, width, height, input.getConfig());
2050     }
2051 
2052     /**
2053      * Calls computeDifference on multiple cropped-and-scaled versions of
2054      * bitmap2.
2055      */
2056     @TargetApi(12)
computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, Pair<Double, Double>[] borderCrops)2057     public static Difference computeMinimumDifference(
2058             Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, Pair<Double, Double>[] borderCrops) {
2059 
2060         // Compute the difference with the original image (bitmap2) first.
2061         Difference minDiff = computeDifference(bitmap1, bitmap2, ignorePixels);
2062         // Then go through the list of borderCrops.
2063         for (Pair<Double, Double> borderCrop : borderCrops) {
2064             // Compute the difference between bitmap1 and a transformed
2065             // version of bitmap2.
2066             Bitmap bitmap2s = shrinkAndScaleBilinear(bitmap2, borderCrop.first, borderCrop.second);
2067             Difference d = computeDifference(bitmap1, bitmap2s, ignorePixels);
2068             // Keep the minimum difference.
2069             if (d.greatestPixelDifference < minDiff.greatestPixelDifference) {
2070                 minDiff = d;
2071                 minDiff.bestMatchBorderCrop = borderCrop;
2072             }
2073         }
2074         return minDiff;
2075     }
2076 
2077     /**
2078      * Calls computeMinimumDifference on a default list of borderCrop.
2079      */
2080     @TargetApi(12)
computeMinimumDifference( Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, int trueWidth, int trueHeight)2081     public static Difference computeMinimumDifference(
2082             Bitmap bitmap1, Bitmap bitmap2, int ignorePixels, int trueWidth, int trueHeight) {
2083 
2084         double hBorder = (double) bitmap1.getWidth() / (double) trueWidth;
2085         double vBorder = (double) bitmap1.getHeight() / (double) trueHeight;
2086         double hBorderH = 0.5 * hBorder; // Half-texel horizontal border
2087         double vBorderH = 0.5 * vBorder; // Half-texel vertical border
2088         return computeMinimumDifference(
2089                 bitmap1,
2090                 bitmap2,
2091                 ignorePixels,
2092                 new Pair[] {
2093                     Pair.create(hBorderH, 0.0),
2094                     Pair.create(hBorderH, vBorderH),
2095                     Pair.create(0.0, vBorderH),
2096                     Pair.create(hBorder, 0.0),
2097                     Pair.create(hBorder, vBorder),
2098                     Pair.create(0.0, vBorder)
2099                 });
2100         // This default list of borderCrop comes from the behavior of
2101         // GLConsumer.computeTransformMatrix().
2102     }
2103 
2104     /* Describes the difference between two {@link Bitmap} instances. */
2105     public static final class Difference {
2106 
2107         public final int greatestPixelDifference;
2108         public final Pair<Integer, Integer> greatestPixelDifferenceCoordinates;
2109         public Pair<Double, Double> bestMatchBorderCrop;
2110 
Difference(int greatestPixelDifference)2111         private Difference(int greatestPixelDifference) {
2112             this(greatestPixelDifference, null, Pair.create(0.0, 0.0));
2113         }
2114 
Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates)2115         private Difference(
2116                 int greatestPixelDifference,
2117                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates) {
2118             this(greatestPixelDifference, greatestPixelDifferenceCoordinates,
2119                     Pair.create(0.0, 0.0));
2120         }
2121 
Difference( int greatestPixelDifference, Pair<Integer, Integer> greatestPixelDifferenceCoordinates, Pair<Double, Double> bestMatchBorderCrop)2122         private Difference(
2123                 int greatestPixelDifference,
2124                 Pair<Integer, Integer> greatestPixelDifferenceCoordinates,
2125                 Pair<Double, Double> bestMatchBorderCrop) {
2126             this.greatestPixelDifference = greatestPixelDifference;
2127             this.greatestPixelDifferenceCoordinates = greatestPixelDifferenceCoordinates;
2128             this.bestMatchBorderCrop = bestMatchBorderCrop;
2129         }
2130     }
2131 
2132 }
2133 
2134 /* Wrapper for MIME types. */
2135 final class MimeTypes {
2136 
MimeTypes()2137     private MimeTypes() {}
2138 
2139     public static final String VIDEO_VP9 = "video/x-vnd.on2.vp9";
2140     public static final String VIDEO_H264 = "video/avc";
2141 
isVideo(String mimeType)2142     public static boolean isVideo(String mimeType) {
2143         return mimeType.startsWith("video");
2144     }
2145 
2146 }
2147