• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License"); you may not
5  * use this file except in compliance with the License. You may obtain a copy of
6  * the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12  * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13  * License for the specific language governing permissions and limitations under
14  * the License.
15  */
16 
17 package com.android.camera;
18 
19 import android.annotation.TargetApi;
20 import android.content.Context;
21 import android.graphics.SurfaceTexture;
22 import android.hardware.Camera;
23 import android.media.CamcorderProfile;
24 import android.media.MediaRecorder;
25 import android.os.Handler;
26 import android.os.Looper;
27 import android.util.Log;
28 
29 import com.android.gallery3d.R;
30 import com.android.gallery3d.common.ApiHelper;
31 
32 import java.io.FileDescriptor;
33 import java.io.IOException;
34 import java.io.Serializable;
35 import java.lang.reflect.Constructor;
36 import java.lang.reflect.InvocationHandler;
37 import java.lang.reflect.Method;
38 import java.lang.reflect.Proxy;
39 
40 
41 /**
42  * Encapsulates the mobile filter framework components needed to record video
43  * with effects applied. Modeled after MediaRecorder.
44  */
45 @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) // uses SurfaceTexture
46 public class EffectsRecorder {
47     private static final String TAG = "EffectsRecorder";
48 
49     private static Class<?> sClassFilter;
50     private static Method sFilterIsAvailable;
51     private static EffectsRecorder sEffectsRecorder;
52     // The index of the current effects recorder.
53     private static int sEffectsRecorderIndex;
54 
55     private static boolean sReflectionInited = false;
56 
57     private static Class<?> sClsLearningDoneListener;
58     private static Class<?> sClsOnRunnerDoneListener;
59     private static Class<?> sClsOnRecordingDoneListener;
60     private static Class<?> sClsSurfaceTextureSourceListener;
61 
62     private static Method sFilterSetInputValue;
63 
64     private static Constructor<?> sCtPoint;
65     private static Constructor<?> sCtQuad;
66 
67     private static Method sLearningDoneListenerOnLearningDone;
68 
69     private static Method sObjectEquals;
70     private static Method sObjectToString;
71 
72     private static Class<?> sClsGraphRunner;
73     private static Method sGraphRunnerGetGraph;
74     private static Method sGraphRunnerSetDoneCallback;
75     private static Method sGraphRunnerRun;
76     private static Method sGraphRunnerGetError;
77     private static Method sGraphRunnerStop;
78 
79     private static Method sFilterGraphGetFilter;
80     private static Method sFilterGraphTearDown;
81 
82     private static Method sOnRunnerDoneListenerOnRunnerDone;
83 
84     private static Class<?> sClsGraphEnvironment;
85     private static Constructor<?> sCtGraphEnvironment;
86     private static Method sGraphEnvironmentCreateGLEnvironment;
87     private static Method sGraphEnvironmentGetRunner;
88     private static Method sGraphEnvironmentAddReferences;
89     private static Method sGraphEnvironmentLoadGraph;
90     private static Method sGraphEnvironmentGetContext;
91 
92     private static Method sFilterContextGetGLEnvironment;
93     private static Method sGLEnvironmentIsActive;
94     private static Method sGLEnvironmentActivate;
95     private static Method sGLEnvironmentDeactivate;
96     private static Method sSurfaceTextureTargetDisconnect;
97     private static Method sOnRecordingDoneListenerOnRecordingDone;
98     private static Method sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady;
99 
100     private Object mLearningDoneListener;
101     private Object mRunnerDoneCallback;
102     private Object mSourceReadyCallback;
103     // A callback to finalize the media after the recording is done.
104     private Object mRecordingDoneListener;
105 
106     static {
107         try {
108             sClassFilter = Class.forName("android.filterfw.core.Filter");
109             sFilterIsAvailable = sClassFilter.getMethod("isAvailable",
110                     String.class);
111         } catch (ClassNotFoundException ex) {
112             Log.v(TAG, "Can't find the class android.filterfw.core.Filter");
113         } catch (NoSuchMethodException e) {
114             Log.v(TAG, "Can't find the method Filter.isAvailable");
115         }
116     }
117 
118     public static final int  EFFECT_NONE        = 0;
119     public static final int  EFFECT_GOOFY_FACE  = 1;
120     public static final int  EFFECT_BACKDROPPER = 2;
121 
122     public static final int  EFFECT_GF_SQUEEZE     = 0;
123     public static final int  EFFECT_GF_BIG_EYES    = 1;
124     public static final int  EFFECT_GF_BIG_MOUTH   = 2;
125     public static final int  EFFECT_GF_SMALL_MOUTH = 3;
126     public static final int  EFFECT_GF_BIG_NOSE    = 4;
127     public static final int  EFFECT_GF_SMALL_EYES  = 5;
128     public static final int  NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1;
129 
130     public static final int  EFFECT_MSG_STARTED_LEARNING = 0;
131     public static final int  EFFECT_MSG_DONE_LEARNING    = 1;
132     public static final int  EFFECT_MSG_SWITCHING_EFFECT = 2;
133     public static final int  EFFECT_MSG_EFFECTS_STOPPED  = 3;
134     public static final int  EFFECT_MSG_RECORDING_DONE   = 4;
135     public static final int  EFFECT_MSG_PREVIEW_RUNNING  = 5;
136 
137     private Context mContext;
138     private Handler mHandler;
139 
140     private CameraManager.CameraProxy mCameraDevice;
141     private CamcorderProfile mProfile;
142     private double mCaptureRate = 0;
143     private SurfaceTexture mPreviewSurfaceTexture;
144     private int mPreviewWidth;
145     private int mPreviewHeight;
146     private MediaRecorder.OnInfoListener mInfoListener;
147     private MediaRecorder.OnErrorListener mErrorListener;
148 
149     private String mOutputFile;
150     private FileDescriptor mFd;
151     private int mOrientationHint = 0;
152     private long mMaxFileSize = 0;
153     private int mMaxDurationMs = 0;
154     private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK;
155     private int mCameraDisplayOrientation;
156 
157     private int mEffect = EFFECT_NONE;
158     private int mCurrentEffect = EFFECT_NONE;
159     private EffectsListener mEffectsListener;
160 
161     private Object mEffectParameter;
162 
163     private Object mGraphEnv;
164     private int mGraphId;
165     private Object mRunner = null;
166     private Object mOldRunner = null;
167 
168     private SurfaceTexture mTextureSource;
169 
170     private static final int STATE_CONFIGURE              = 0;
171     private static final int STATE_WAITING_FOR_SURFACE    = 1;
172     private static final int STATE_STARTING_PREVIEW       = 2;
173     private static final int STATE_PREVIEW                = 3;
174     private static final int STATE_RECORD                 = 4;
175     private static final int STATE_RELEASED               = 5;
176     private int mState = STATE_CONFIGURE;
177 
178     private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
179     private SoundClips.Player mSoundPlayer;
180 
181     /** Determine if a given effect is supported at runtime
182      * Some effects require libraries not available on all devices
183      */
isEffectSupported(int effectId)184     public static boolean isEffectSupported(int effectId) {
185         if (sFilterIsAvailable == null)  return false;
186 
187         try {
188             switch (effectId) {
189                 case EFFECT_GOOFY_FACE:
190                     return (Boolean) sFilterIsAvailable.invoke(null,
191                             "com.google.android.filterpacks.facedetect.GoofyRenderFilter");
192                 case EFFECT_BACKDROPPER:
193                     return (Boolean) sFilterIsAvailable.invoke(null,
194                             "android.filterpacks.videoproc.BackDropperFilter");
195                 default:
196                     return false;
197             }
198         } catch (Exception ex) {
199             Log.e(TAG, "Fail to check filter", ex);
200         }
201         return false;
202     }
203 
EffectsRecorder(Context context)204     public EffectsRecorder(Context context) {
205         if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
206 
207         if (!sReflectionInited) {
208             try {
209                 sFilterSetInputValue = sClassFilter.getMethod("setInputValue",
210                         new Class[] {String.class, Object.class});
211 
212                 Class<?> clsPoint = Class.forName("android.filterfw.geometry.Point");
213                 sCtPoint = clsPoint.getConstructor(new Class[] {float.class,
214                         float.class});
215 
216                 Class<?> clsQuad = Class.forName("android.filterfw.geometry.Quad");
217                 sCtQuad = clsQuad.getConstructor(new Class[] {clsPoint, clsPoint,
218                         clsPoint, clsPoint});
219 
220                 Class<?> clsBackDropperFilter = Class.forName(
221                         "android.filterpacks.videoproc.BackDropperFilter");
222                 sClsLearningDoneListener = Class.forName(
223                         "android.filterpacks.videoproc.BackDropperFilter$LearningDoneListener");
224                 sLearningDoneListenerOnLearningDone = sClsLearningDoneListener
225                         .getMethod("onLearningDone", new Class[] {clsBackDropperFilter});
226 
227                 sObjectEquals = Object.class.getMethod("equals", new Class[] {Object.class});
228                 sObjectToString = Object.class.getMethod("toString");
229 
230                 sClsOnRunnerDoneListener = Class.forName(
231                         "android.filterfw.core.GraphRunner$OnRunnerDoneListener");
232                 sOnRunnerDoneListenerOnRunnerDone = sClsOnRunnerDoneListener.getMethod(
233                         "onRunnerDone", new Class[] {int.class});
234 
235                 sClsGraphRunner = Class.forName("android.filterfw.core.GraphRunner");
236                 sGraphRunnerGetGraph = sClsGraphRunner.getMethod("getGraph");
237                 sGraphRunnerSetDoneCallback = sClsGraphRunner.getMethod(
238                         "setDoneCallback", new Class[] {sClsOnRunnerDoneListener});
239                 sGraphRunnerRun = sClsGraphRunner.getMethod("run");
240                 sGraphRunnerGetError = sClsGraphRunner.getMethod("getError");
241                 sGraphRunnerStop = sClsGraphRunner.getMethod("stop");
242 
243                 Class<?> clsFilterContext = Class.forName("android.filterfw.core.FilterContext");
244                 sFilterContextGetGLEnvironment = clsFilterContext.getMethod(
245                         "getGLEnvironment");
246 
247                 Class<?> clsFilterGraph = Class.forName("android.filterfw.core.FilterGraph");
248                 sFilterGraphGetFilter = clsFilterGraph.getMethod("getFilter",
249                         new Class[] {String.class});
250                 sFilterGraphTearDown = clsFilterGraph.getMethod("tearDown",
251                         new Class[] {clsFilterContext});
252 
253                 sClsGraphEnvironment = Class.forName("android.filterfw.GraphEnvironment");
254                 sCtGraphEnvironment = sClsGraphEnvironment.getConstructor();
255                 sGraphEnvironmentCreateGLEnvironment = sClsGraphEnvironment.getMethod(
256                         "createGLEnvironment");
257                 sGraphEnvironmentGetRunner = sClsGraphEnvironment.getMethod(
258                         "getRunner", new Class[] {int.class, int.class});
259                 sGraphEnvironmentAddReferences = sClsGraphEnvironment.getMethod(
260                         "addReferences", new Class[] {Object[].class});
261                 sGraphEnvironmentLoadGraph = sClsGraphEnvironment.getMethod(
262                         "loadGraph", new Class[] {Context.class, int.class});
263                 sGraphEnvironmentGetContext = sClsGraphEnvironment.getMethod(
264                         "getContext");
265 
266                 Class<?> clsGLEnvironment = Class.forName("android.filterfw.core.GLEnvironment");
267                 sGLEnvironmentIsActive = clsGLEnvironment.getMethod("isActive");
268                 sGLEnvironmentActivate = clsGLEnvironment.getMethod("activate");
269                 sGLEnvironmentDeactivate = clsGLEnvironment.getMethod("deactivate");
270 
271                 Class<?> clsSurfaceTextureTarget = Class.forName(
272                         "android.filterpacks.videosrc.SurfaceTextureTarget");
273                 sSurfaceTextureTargetDisconnect = clsSurfaceTextureTarget.getMethod(
274                         "disconnect", new Class[] {clsFilterContext});
275 
276                 sClsOnRecordingDoneListener = Class.forName(
277                         "android.filterpacks.videosink.MediaEncoderFilter$OnRecordingDoneListener");
278                 sOnRecordingDoneListenerOnRecordingDone =
279                         sClsOnRecordingDoneListener.getMethod("onRecordingDone");
280 
281                 sClsSurfaceTextureSourceListener = Class.forName(
282                         "android.filterpacks.videosrc.SurfaceTextureSource$SurfaceTextureSourceListener");
283                 sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady =
284                         sClsSurfaceTextureSourceListener.getMethod(
285                                 "onSurfaceTextureSourceReady",
286                                 new Class[] {SurfaceTexture.class});
287             } catch (Exception ex) {
288                 throw new RuntimeException(ex);
289             }
290 
291             sReflectionInited = true;
292         }
293 
294         sEffectsRecorderIndex++;
295         Log.v(TAG, "Current effects recorder index is " + sEffectsRecorderIndex);
296         sEffectsRecorder = this;
297         SerializableInvocationHandler sih = new SerializableInvocationHandler(
298                 sEffectsRecorderIndex);
299         mLearningDoneListener = Proxy.newProxyInstance(
300                 sClsLearningDoneListener.getClassLoader(),
301                 new Class[] {sClsLearningDoneListener}, sih);
302         mRunnerDoneCallback = Proxy.newProxyInstance(
303                 sClsOnRunnerDoneListener.getClassLoader(),
304                 new Class[] {sClsOnRunnerDoneListener}, sih);
305         mSourceReadyCallback = Proxy.newProxyInstance(
306                 sClsSurfaceTextureSourceListener.getClassLoader(),
307                 new Class[] {sClsSurfaceTextureSourceListener}, sih);
308         mRecordingDoneListener =  Proxy.newProxyInstance(
309                 sClsOnRecordingDoneListener.getClassLoader(),
310                 new Class[] {sClsOnRecordingDoneListener}, sih);
311 
312         mContext = context;
313         mHandler = new Handler(Looper.getMainLooper());
314         mSoundPlayer = SoundClips.getPlayer(context);
315     }
316 
setCamera(CameraManager.CameraProxy cameraDevice)317     public synchronized void setCamera(CameraManager.CameraProxy cameraDevice) {
318         switch (mState) {
319             case STATE_PREVIEW:
320                 throw new RuntimeException("setCamera cannot be called while previewing!");
321             case STATE_RECORD:
322                 throw new RuntimeException("setCamera cannot be called while recording!");
323             case STATE_RELEASED:
324                 throw new RuntimeException("setCamera called on an already released recorder!");
325             default:
326                 break;
327         }
328 
329         mCameraDevice = cameraDevice;
330     }
331 
setProfile(CamcorderProfile profile)332     public void setProfile(CamcorderProfile profile) {
333         switch (mState) {
334             case STATE_RECORD:
335                 throw new RuntimeException("setProfile cannot be called while recording!");
336             case STATE_RELEASED:
337                 throw new RuntimeException("setProfile called on an already released recorder!");
338             default:
339                 break;
340         }
341         mProfile = profile;
342     }
343 
setOutputFile(String outputFile)344     public void setOutputFile(String outputFile) {
345         switch (mState) {
346             case STATE_RECORD:
347                 throw new RuntimeException("setOutputFile cannot be called while recording!");
348             case STATE_RELEASED:
349                 throw new RuntimeException("setOutputFile called on an already released recorder!");
350             default:
351                 break;
352         }
353 
354         mOutputFile = outputFile;
355         mFd = null;
356     }
357 
setOutputFile(FileDescriptor fd)358     public void setOutputFile(FileDescriptor fd) {
359         switch (mState) {
360             case STATE_RECORD:
361                 throw new RuntimeException("setOutputFile cannot be called while recording!");
362             case STATE_RELEASED:
363                 throw new RuntimeException("setOutputFile called on an already released recorder!");
364             default:
365                 break;
366         }
367 
368         mOutputFile = null;
369         mFd = fd;
370     }
371 
372     /**
373      * Sets the maximum filesize (in bytes) of the recording session.
374      * This will be passed on to the MediaEncoderFilter and then to the
375      * MediaRecorder ultimately. If zero or negative, the MediaRecorder will
376      * disable the limit
377     */
setMaxFileSize(long maxFileSize)378     public synchronized void setMaxFileSize(long maxFileSize) {
379         switch (mState) {
380             case STATE_RECORD:
381                 throw new RuntimeException("setMaxFileSize cannot be called while recording!");
382             case STATE_RELEASED:
383                 throw new RuntimeException(
384                     "setMaxFileSize called on an already released recorder!");
385             default:
386                 break;
387         }
388         mMaxFileSize = maxFileSize;
389     }
390 
391     /**
392     * Sets the maximum recording duration (in ms) for the next recording session
393     * Setting it to zero (the default) disables the limit.
394     */
setMaxDuration(int maxDurationMs)395     public synchronized void setMaxDuration(int maxDurationMs) {
396         switch (mState) {
397             case STATE_RECORD:
398                 throw new RuntimeException("setMaxDuration cannot be called while recording!");
399             case STATE_RELEASED:
400                 throw new RuntimeException(
401                     "setMaxDuration called on an already released recorder!");
402             default:
403                 break;
404         }
405         mMaxDurationMs = maxDurationMs;
406     }
407 
408 
setCaptureRate(double fps)409     public void setCaptureRate(double fps) {
410         switch (mState) {
411             case STATE_RECORD:
412                 throw new RuntimeException("setCaptureRate cannot be called while recording!");
413             case STATE_RELEASED:
414                 throw new RuntimeException(
415                     "setCaptureRate called on an already released recorder!");
416             default:
417                 break;
418         }
419 
420         if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps");
421         mCaptureRate = fps;
422     }
423 
setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture, int previewWidth, int previewHeight)424     public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture,
425                                   int previewWidth,
426                                   int previewHeight) {
427         if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")");
428         switch (mState) {
429             case STATE_RECORD:
430                 throw new RuntimeException(
431                     "setPreviewSurfaceTexture cannot be called while recording!");
432             case STATE_RELEASED:
433                 throw new RuntimeException(
434                     "setPreviewSurfaceTexture called on an already released recorder!");
435             default:
436                 break;
437         }
438 
439         mPreviewSurfaceTexture = previewSurfaceTexture;
440         mPreviewWidth = previewWidth;
441         mPreviewHeight = previewHeight;
442 
443         switch (mState) {
444             case STATE_WAITING_FOR_SURFACE:
445                 startPreview();
446                 break;
447             case STATE_STARTING_PREVIEW:
448             case STATE_PREVIEW:
449                 initializeEffect(true);
450                 break;
451         }
452     }
453 
setEffect(int effect, Object effectParameter)454     public void setEffect(int effect, Object effectParameter) {
455         if (mLogVerbose) Log.v(TAG,
456                                "setEffect: effect ID " + effect +
457                                ", parameter " + effectParameter.toString());
458         switch (mState) {
459             case STATE_RECORD:
460                 throw new RuntimeException("setEffect cannot be called while recording!");
461             case STATE_RELEASED:
462                 throw new RuntimeException("setEffect called on an already released recorder!");
463             default:
464                 break;
465         }
466 
467         mEffect = effect;
468         mEffectParameter = effectParameter;
469 
470         if (mState == STATE_PREVIEW ||
471                 mState == STATE_STARTING_PREVIEW) {
472             initializeEffect(false);
473         }
474     }
475 
476     public interface EffectsListener {
onEffectsUpdate(int effectId, int effectMsg)477         public void onEffectsUpdate(int effectId, int effectMsg);
onEffectsError(Exception exception, String filePath)478         public void onEffectsError(Exception exception, String filePath);
479     }
480 
setEffectsListener(EffectsListener listener)481     public void setEffectsListener(EffectsListener listener) {
482         mEffectsListener = listener;
483     }
484 
setFaceDetectOrientation()485     private void setFaceDetectOrientation() {
486         if (mCurrentEffect == EFFECT_GOOFY_FACE) {
487             Object rotateFilter = getGraphFilter(mRunner, "rotate");
488             Object metaRotateFilter = getGraphFilter(mRunner, "metarotate");
489             setInputValue(rotateFilter, "rotation", mOrientationHint);
490             int reverseDegrees = (360 - mOrientationHint) % 360;
491             setInputValue(metaRotateFilter, "rotation", reverseDegrees);
492         }
493     }
494 
setRecordingOrientation()495     private void setRecordingOrientation() {
496         if (mState != STATE_RECORD && mRunner != null) {
497             Object bl = newInstance(sCtPoint, new Object[] {0, 0});
498             Object br = newInstance(sCtPoint, new Object[] {1, 0});
499             Object tl = newInstance(sCtPoint, new Object[] {0, 1});
500             Object tr = newInstance(sCtPoint, new Object[] {1, 1});
501             Object recordingRegion;
502             if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) {
503                 // The back camera is not mirrored, so use a identity transform
504                 recordingRegion = newInstance(sCtQuad, new Object[] {bl, br, tl, tr});
505             } else {
506                 // Recording region needs to be tweaked for front cameras, since they
507                 // mirror their preview
508                 if (mOrientationHint == 0 || mOrientationHint == 180) {
509                     // Horizontal flip in landscape
510                     recordingRegion = newInstance(sCtQuad, new Object[] {br, bl, tr, tl});
511                 } else {
512                     // Horizontal flip in portrait
513                     recordingRegion = newInstance(sCtQuad, new Object[] {tl, tr, bl, br});
514                 }
515             }
516             Object recorder = getGraphFilter(mRunner, "recorder");
517             setInputValue(recorder, "inputRegion", recordingRegion);
518         }
519     }
setOrientationHint(int degrees)520     public void setOrientationHint(int degrees) {
521         switch (mState) {
522             case STATE_RELEASED:
523                 throw new RuntimeException(
524                         "setOrientationHint called on an already released recorder!");
525             default:
526                 break;
527         }
528         if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees);
529         mOrientationHint = degrees;
530         setFaceDetectOrientation();
531         setRecordingOrientation();
532     }
533 
setCameraDisplayOrientation(int orientation)534     public void setCameraDisplayOrientation(int orientation) {
535         if (mState != STATE_CONFIGURE) {
536             throw new RuntimeException(
537                 "setCameraDisplayOrientation called after configuration!");
538         }
539         mCameraDisplayOrientation = orientation;
540     }
541 
setCameraFacing(int facing)542     public void setCameraFacing(int facing) {
543         switch (mState) {
544             case STATE_RELEASED:
545                 throw new RuntimeException(
546                     "setCameraFacing called on alrady released recorder!");
547             default:
548                 break;
549         }
550         mCameraFacing = facing;
551         setRecordingOrientation();
552     }
553 
setOnInfoListener(MediaRecorder.OnInfoListener infoListener)554     public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
555         switch (mState) {
556             case STATE_RECORD:
557                 throw new RuntimeException("setInfoListener cannot be called while recording!");
558             case STATE_RELEASED:
559                 throw new RuntimeException(
560                     "setInfoListener called on an already released recorder!");
561             default:
562                 break;
563         }
564         mInfoListener = infoListener;
565     }
566 
setOnErrorListener(MediaRecorder.OnErrorListener errorListener)567     public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
568         switch (mState) {
569             case STATE_RECORD:
570                 throw new RuntimeException("setErrorListener cannot be called while recording!");
571             case STATE_RELEASED:
572                 throw new RuntimeException(
573                     "setErrorListener called on an already released recorder!");
574             default:
575                 break;
576         }
577         mErrorListener = errorListener;
578     }
579 
initializeFilterFramework()580     private void initializeFilterFramework() {
581         mGraphEnv = newInstance(sCtGraphEnvironment);
582         invoke(mGraphEnv, sGraphEnvironmentCreateGLEnvironment);
583 
584         int videoFrameWidth = mProfile.videoFrameWidth;
585         int videoFrameHeight = mProfile.videoFrameHeight;
586         if (mCameraDisplayOrientation == 90 || mCameraDisplayOrientation == 270) {
587             int tmp = videoFrameWidth;
588             videoFrameWidth = videoFrameHeight;
589             videoFrameHeight = tmp;
590         }
591 
592         invoke(mGraphEnv, sGraphEnvironmentAddReferences,
593                 new Object[] {new Object[] {
594                 "textureSourceCallback", mSourceReadyCallback,
595                 "recordingWidth", videoFrameWidth,
596                 "recordingHeight", videoFrameHeight,
597                 "recordingProfile", mProfile,
598                 "learningDoneListener", mLearningDoneListener,
599                 "recordingDoneListener", mRecordingDoneListener}});
600         mRunner = null;
601         mGraphId = -1;
602         mCurrentEffect = EFFECT_NONE;
603     }
604 
initializeEffect(boolean forceReset)605     private synchronized void initializeEffect(boolean forceReset) {
606         if (forceReset ||
607             mCurrentEffect != mEffect ||
608             mCurrentEffect == EFFECT_BACKDROPPER) {
609 
610             invoke(mGraphEnv, sGraphEnvironmentAddReferences,
611                     new Object[] {new Object[] {
612                     "previewSurfaceTexture", mPreviewSurfaceTexture,
613                     "previewWidth", mPreviewWidth,
614                     "previewHeight", mPreviewHeight,
615                     "orientation", mOrientationHint}});
616             if (mState == STATE_PREVIEW ||
617                     mState == STATE_STARTING_PREVIEW) {
618                 // Switching effects while running. Inform video camera.
619                 sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT);
620             }
621 
622             switch (mEffect) {
623                 case EFFECT_GOOFY_FACE:
624                     mGraphId = (Integer) invoke(mGraphEnv,
625                             sGraphEnvironmentLoadGraph,
626                             new Object[] {mContext, R.raw.goofy_face});
627                     break;
628                 case EFFECT_BACKDROPPER:
629                     sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
630                     mGraphId = (Integer) invoke(mGraphEnv,
631                             sGraphEnvironmentLoadGraph,
632                             new Object[] {mContext, R.raw.backdropper});
633                     break;
634                 default:
635                     throw new RuntimeException("Unknown effect ID" + mEffect + "!");
636             }
637             mCurrentEffect = mEffect;
638 
639             mOldRunner = mRunner;
640             mRunner = invoke(mGraphEnv, sGraphEnvironmentGetRunner,
641                     new Object[] {mGraphId,
642                     getConstant(sClsGraphEnvironment, "MODE_ASYNCHRONOUS")});
643             invoke(mRunner, sGraphRunnerSetDoneCallback, new Object[] {mRunnerDoneCallback});
644             if (mLogVerbose) {
645                 Log.v(TAG, "New runner: " + mRunner
646                       + ". Old runner: " + mOldRunner);
647             }
648             if (mState == STATE_PREVIEW ||
649                     mState == STATE_STARTING_PREVIEW) {
650                 // Switching effects while running. Stop existing runner.
651                 // The stop callback will take care of starting new runner.
652                 mCameraDevice.stopPreview();
653                 mCameraDevice.setPreviewTextureAsync(null);
654                 invoke(mOldRunner, sGraphRunnerStop);
655             }
656         }
657 
658         switch (mCurrentEffect) {
659             case EFFECT_GOOFY_FACE:
660                 tryEnableVideoStabilization(true);
661                 Object goofyFilter = getGraphFilter(mRunner, "goofyrenderer");
662                 setInputValue(goofyFilter, "currentEffect",
663                         ((Integer) mEffectParameter).intValue());
664                 break;
665             case EFFECT_BACKDROPPER:
666                 tryEnableVideoStabilization(false);
667                 Object backgroundSrc = getGraphFilter(mRunner, "background");
668                 if (ApiHelper.HAS_EFFECTS_RECORDING_CONTEXT_INPUT) {
669                     // Set the context first before setting sourceUrl to
670                     // guarantee the content URI get resolved properly.
671                     setInputValue(backgroundSrc, "context", mContext);
672                 }
673                 setInputValue(backgroundSrc, "sourceUrl", mEffectParameter);
674                 // For front camera, the background video needs to be mirrored in the
675                 // backdropper filter
676                 if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
677                     Object replacer = getGraphFilter(mRunner, "replacer");
678                     setInputValue(replacer, "mirrorBg", true);
679                     if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored");
680                 }
681                 break;
682             default:
683                 break;
684         }
685         setFaceDetectOrientation();
686         setRecordingOrientation();
687     }
688 
startPreview()689     public synchronized void startPreview() {
690         if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
691 
692         switch (mState) {
693             case STATE_STARTING_PREVIEW:
694             case STATE_PREVIEW:
695                 // Already running preview
696                 Log.w(TAG, "startPreview called when already running preview");
697                 return;
698             case STATE_RECORD:
699                 throw new RuntimeException("Cannot start preview when already recording!");
700             case STATE_RELEASED:
701                 throw new RuntimeException("setEffect called on an already released recorder!");
702             default:
703                 break;
704         }
705 
706         if (mEffect == EFFECT_NONE) {
707             throw new RuntimeException("No effect selected!");
708         }
709         if (mEffectParameter == null) {
710             throw new RuntimeException("No effect parameter provided!");
711         }
712         if (mProfile == null) {
713             throw new RuntimeException("No recording profile provided!");
714         }
715         if (mPreviewSurfaceTexture == null) {
716             if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one");
717             mState = STATE_WAITING_FOR_SURFACE;
718             return;
719         }
720         if (mCameraDevice == null) {
721             throw new RuntimeException("No camera to record from!");
722         }
723 
724         if (mLogVerbose) Log.v(TAG, "Initializing filter framework and running the graph.");
725         initializeFilterFramework();
726 
727         initializeEffect(true);
728 
729         mState = STATE_STARTING_PREVIEW;
730         invoke(mRunner, sGraphRunnerRun);
731         // Rest of preview startup handled in mSourceReadyCallback
732     }
733 
invokeObjectEquals(Object proxy, Object[] args)734     private Object invokeObjectEquals(Object proxy, Object[] args) {
735         return Boolean.valueOf(proxy == args[0]);
736     }
737 
invokeObjectToString()738     private Object invokeObjectToString() {
739         return "Proxy-" + toString();
740     }
741 
invokeOnLearningDone()742     private void invokeOnLearningDone() {
743         if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
744         // Called in a processing thread, so have to post message back to UI
745         // thread
746         sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
747         enable3ALocks(true);
748     }
749 
invokeOnRunnerDone(Object[] args)750     private void invokeOnRunnerDone(Object[] args) {
751         int runnerDoneResult = (Integer) args[0];
752         synchronized (EffectsRecorder.this) {
753             if (mLogVerbose) {
754                 Log.v(TAG,
755                       "Graph runner done (" + EffectsRecorder.this
756                       + ", mRunner " + mRunner
757                       + ", mOldRunner " + mOldRunner + ")");
758             }
759             if (runnerDoneResult ==
760                     (Integer) getConstant(sClsGraphRunner, "RESULT_ERROR")) {
761                 // Handle error case
762                 Log.e(TAG, "Error running filter graph!");
763                 Exception e = null;
764                 if (mRunner != null) {
765                     e = (Exception) invoke(mRunner, sGraphRunnerGetError);
766                 } else if (mOldRunner != null) {
767                     e = (Exception) invoke(mOldRunner, sGraphRunnerGetError);
768                 }
769                 raiseError(e);
770             }
771             if (mOldRunner != null) {
772                 // Tear down old graph if available
773                 if (mLogVerbose) Log.v(TAG, "Tearing down old graph.");
774                 Object glEnv = getContextGLEnvironment(mGraphEnv);
775                 if (glEnv != null && !(Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
776                     invoke(glEnv, sGLEnvironmentActivate);
777                 }
778                 getGraphTearDown(mOldRunner,
779                         invoke(mGraphEnv, sGraphEnvironmentGetContext));
780                 if (glEnv != null && (Boolean) invoke(glEnv, sGLEnvironmentIsActive)) {
781                     invoke(glEnv, sGLEnvironmentDeactivate);
782                 }
783                 mOldRunner = null;
784             }
785             if (mState == STATE_PREVIEW ||
786                     mState == STATE_STARTING_PREVIEW) {
787                 // Switching effects, start up the new runner
788                 if (mLogVerbose) {
789                     Log.v(TAG, "Previous effect halted. Running graph again. state: "
790                             + mState);
791                 }
792                 tryEnable3ALocks(false);
793                 // In case of an error, the graph restarts from beginning and in case
794                 // of the BACKDROPPER effect, the learner re-learns the background.
795                 // Hence, we need to show the learning dialogue to the user
796                 // to avoid recording before the learning is done. Else, the user
797                 // could start recording before the learning is done and the new
798                 // background comes up later leading to an end result video
799                 // with a heterogeneous background.
800                 // For BACKDROPPER effect, this path is also executed sometimes at
801                 // the end of a normal recording session. In such a case, the graph
802                 // does not restart and hence the learner does not re-learn. So we
803                 // do not want to show the learning dialogue then.
804                 if (runnerDoneResult == (Integer) getConstant(
805                         sClsGraphRunner, "RESULT_ERROR")
806                         && mCurrentEffect == EFFECT_BACKDROPPER) {
807                     sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
808                 }
809                 invoke(mRunner, sGraphRunnerRun);
810             } else if (mState != STATE_RELEASED) {
811                 // Shutting down effects
812                 if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
813                 tryEnable3ALocks(false);
814                 sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED);
815             } else {
816                 // STATE_RELEASED - camera will be/has been released as well, do nothing.
817             }
818         }
819     }
820 
invokeOnSurfaceTextureSourceReady(Object[] args)821     private void invokeOnSurfaceTextureSourceReady(Object[] args) {
822         SurfaceTexture source = (SurfaceTexture) args[0];
823         if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
824         synchronized (EffectsRecorder.this) {
825             mTextureSource = source;
826 
827             if (mState == STATE_CONFIGURE) {
828                 // Stop preview happened while the runner was doing startup tasks
829                 // Since we haven't started anything up, don't do anything
830                 // Rest of cleanup will happen in onRunnerDone
831                 if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping.");
832                 return;
833             }
834             if (mState == STATE_RELEASED) {
835                 // EffectsRecorder has been released, so don't touch the camera device
836                 // or anything else
837                 if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping.");
838                 return;
839             }
840             if (source == null) {
841                 if (mLogVerbose) {
842                     Log.v(TAG, "Ready callback: source null! Looks like graph was closed!");
843                 }
844                 if (mState == STATE_PREVIEW ||
845                         mState == STATE_STARTING_PREVIEW ||
846                         mState == STATE_RECORD) {
847                     // A null source here means the graph is shutting down
848                     // unexpectedly, so we need to turn off preview before
849                     // the surface texture goes away.
850                     if (mLogVerbose) {
851                         Log.v(TAG, "Ready callback: State: " + mState
852                                 + ". stopCameraPreview");
853                     }
854 
855                     stopCameraPreview();
856                 }
857                 return;
858             }
859 
860             // Lock AE/AWB to reduce transition flicker
861             tryEnable3ALocks(true);
862 
863             mCameraDevice.stopPreview();
864             if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
865             mCameraDevice.setPreviewTextureAsync(mTextureSource);
866 
867             mCameraDevice.startPreviewAsync();
868 
869             // Unlock AE/AWB after preview started
870             tryEnable3ALocks(false);
871 
872             mState = STATE_PREVIEW;
873 
874             if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
875 
876             // Sending a message to listener that preview is complete
877             sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING);
878         }
879     }
880 
invokeOnRecordingDone()881     private void invokeOnRecordingDone() {
882         // Forward the callback to the VideoModule object (as an asynchronous event).
883         if (mLogVerbose) Log.v(TAG, "Recording done callback triggered");
884         sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE);
885     }
886 
startRecording()887     public synchronized void startRecording() {
888         if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
889 
890         switch (mState) {
891             case STATE_RECORD:
892                 throw new RuntimeException("Already recording, cannot begin anew!");
893             case STATE_RELEASED:
894                 throw new RuntimeException(
895                     "startRecording called on an already released recorder!");
896             default:
897                 break;
898         }
899 
900         if ((mOutputFile == null) && (mFd == null)) {
901             throw new RuntimeException("No output file name or descriptor provided!");
902         }
903 
904         if (mState == STATE_CONFIGURE) {
905             startPreview();
906         }
907 
908         Object recorder = getGraphFilter(mRunner, "recorder");
909         if (mFd != null) {
910             setInputValue(recorder, "outputFileDescriptor", mFd);
911         } else {
912             setInputValue(recorder, "outputFile", mOutputFile);
913         }
914         // It is ok to set the audiosource without checking for timelapse here
915         // since that check will be done in the MediaEncoderFilter itself
916         setInputValue(recorder, "audioSource", MediaRecorder.AudioSource.CAMCORDER);
917         setInputValue(recorder, "recordingProfile", mProfile);
918         setInputValue(recorder, "orientationHint", mOrientationHint);
919         // Important to set the timelapseinterval to 0 if the capture rate is not >0
920         // since the recorder does not get created every time the recording starts.
921         // The recorder infers whether the capture is timelapsed based on the value of
922         // this interval
923         boolean captureTimeLapse = mCaptureRate > 0;
924         if (captureTimeLapse) {
925             double timeBetweenFrameCapture = 1 / mCaptureRate;
926             setInputValue(recorder, "timelapseRecordingIntervalUs",
927                     (long) (1000000 * timeBetweenFrameCapture));
928 
929         } else {
930             setInputValue(recorder, "timelapseRecordingIntervalUs", 0L);
931         }
932 
933         if (mInfoListener != null) {
934             setInputValue(recorder, "infoListener", mInfoListener);
935         }
936         if (mErrorListener != null) {
937             setInputValue(recorder, "errorListener", mErrorListener);
938         }
939         setInputValue(recorder, "maxFileSize", mMaxFileSize);
940         setInputValue(recorder, "maxDurationMs", mMaxDurationMs);
941         setInputValue(recorder, "recording", true);
942         mSoundPlayer.play(SoundClips.START_VIDEO_RECORDING);
943         mState = STATE_RECORD;
944     }
945 
stopRecording()946     public synchronized void stopRecording() {
947         if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
948 
949         switch (mState) {
950             case STATE_CONFIGURE:
951             case STATE_STARTING_PREVIEW:
952             case STATE_PREVIEW:
953                 Log.w(TAG, "StopRecording called when recording not active!");
954                 return;
955             case STATE_RELEASED:
956                 throw new RuntimeException("stopRecording called on released EffectsRecorder!");
957             default:
958                 break;
959         }
960         Object recorder = getGraphFilter(mRunner, "recorder");
961         setInputValue(recorder, "recording", false);
962         mSoundPlayer.play(SoundClips.STOP_VIDEO_RECORDING);
963         mState = STATE_PREVIEW;
964     }
965 
966     // Called to tell the filter graph that the display surfacetexture is not valid anymore.
967     // So the filter graph should not hold any reference to the surface created with that.
disconnectDisplay()968     public synchronized void disconnectDisplay() {
969         if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " +
970             "SurfaceTexture");
971         Object display = getGraphFilter(mRunner, "display");
972         invoke(display, sSurfaceTextureTargetDisconnect, new Object[] {
973                 invoke(mGraphEnv, sGraphEnvironmentGetContext)});
974     }
975 
976     // The VideoModule will call this to notify that the camera is being
977     // released to the outside world. This call should happen after the
978     // stopRecording call. Else, the effects may throw an exception.
979     // With the recording stopped, the stopPreview call will not try to
980     // release the camera again.
981     // This must be called in onPause() if the effects are ON.
disconnectCamera()982     public synchronized void disconnectCamera() {
983         if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera");
984         stopCameraPreview();
985         mCameraDevice = null;
986     }
987 
988     // In a normal case, when the disconnect is not called, we should not
989     // set the camera device to null, since on return callback, we try to
990     // enable 3A locks, which need the cameradevice.
stopCameraPreview()991     public synchronized void stopCameraPreview() {
992         if (mLogVerbose) Log.v(TAG, "Stopping camera preview.");
993         if (mCameraDevice == null) {
994             Log.d(TAG, "Camera already null. Nothing to disconnect");
995             return;
996         }
997         mCameraDevice.stopPreview();
998         mCameraDevice.setPreviewTextureAsync(null);
999     }
1000 
1001     // Stop and release effect resources
stopPreview()1002     public synchronized void stopPreview() {
1003         if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
1004         switch (mState) {
1005             case STATE_CONFIGURE:
1006                 Log.w(TAG, "StopPreview called when preview not active!");
1007                 return;
1008             case STATE_RELEASED:
1009                 throw new RuntimeException("stopPreview called on released EffectsRecorder!");
1010             default:
1011                 break;
1012         }
1013 
1014         if (mState == STATE_RECORD) {
1015             stopRecording();
1016         }
1017 
1018         mCurrentEffect = EFFECT_NONE;
1019 
1020         // This will not do anything if the camera has already been disconnected.
1021         stopCameraPreview();
1022 
1023         mState = STATE_CONFIGURE;
1024         mOldRunner = mRunner;
1025         invoke(mRunner, sGraphRunnerStop);
1026         mRunner = null;
1027         // Rest of stop and release handled in mRunnerDoneCallback
1028     }
1029 
1030     // Try to enable/disable video stabilization if supported; otherwise return false
1031     // It is called from a synchronized block.
tryEnableVideoStabilization(boolean toggle)1032     boolean tryEnableVideoStabilization(boolean toggle) {
1033         if (mLogVerbose) Log.v(TAG, "tryEnableVideoStabilization.");
1034         if (mCameraDevice == null) {
1035             Log.d(TAG, "Camera already null. Not enabling video stabilization.");
1036             return false;
1037         }
1038         Camera.Parameters params = mCameraDevice.getParameters();
1039 
1040         String vstabSupported = params.get("video-stabilization-supported");
1041         if ("true".equals(vstabSupported)) {
1042             if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle);
1043             params.set("video-stabilization", toggle ? "true" : "false");
1044             mCameraDevice.setParameters(params);
1045             return true;
1046         }
1047         if (mLogVerbose) Log.v(TAG, "Video stabilization not supported");
1048         return false;
1049     }
1050 
1051     // Try to enable/disable 3A locks if supported; otherwise return false
1052     @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
tryEnable3ALocks(boolean toggle)1053     synchronized boolean tryEnable3ALocks(boolean toggle) {
1054         if (mLogVerbose) Log.v(TAG, "tryEnable3ALocks");
1055         if (mCameraDevice == null) {
1056             Log.d(TAG, "Camera already null. Not tryenabling 3A locks.");
1057             return false;
1058         }
1059         Camera.Parameters params = mCameraDevice.getParameters();
1060         if (Util.isAutoExposureLockSupported(params) &&
1061             Util.isAutoWhiteBalanceLockSupported(params)) {
1062             params.setAutoExposureLock(toggle);
1063             params.setAutoWhiteBalanceLock(toggle);
1064             mCameraDevice.setParameters(params);
1065             return true;
1066         }
1067         return false;
1068     }
1069 
1070     // Try to enable/disable 3A locks if supported; otherwise, throw error
1071     // Use this when locks are essential to success
enable3ALocks(boolean toggle)1072     synchronized void enable3ALocks(boolean toggle) {
1073         if (mLogVerbose) Log.v(TAG, "Enable3ALocks");
1074         if (mCameraDevice == null) {
1075             Log.d(TAG, "Camera already null. Not enabling 3A locks.");
1076             return;
1077         }
1078         Camera.Parameters params = mCameraDevice.getParameters();
1079         if (!tryEnable3ALocks(toggle)) {
1080             throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
1081         }
1082     }
1083 
1084     static class SerializableInvocationHandler
1085             implements InvocationHandler, Serializable {
1086         private final int mEffectsRecorderIndex;
SerializableInvocationHandler(int index)1087         public SerializableInvocationHandler(int index) {
1088             mEffectsRecorderIndex = index;
1089         }
1090 
1091         @Override
invoke(Object proxy, Method method, Object[] args)1092         public Object invoke(Object proxy, Method method, Object[] args)
1093                 throws Throwable {
1094             if (sEffectsRecorder == null) return null;
1095             if (mEffectsRecorderIndex != sEffectsRecorderIndex) {
1096                 Log.v(TAG, "Ignore old callback " + mEffectsRecorderIndex);
1097                 return null;
1098             }
1099             if (method.equals(sObjectEquals)) {
1100                 return sEffectsRecorder.invokeObjectEquals(proxy, args);
1101             } else if (method.equals(sObjectToString)) {
1102                 return sEffectsRecorder.invokeObjectToString();
1103             } else if (method.equals(sLearningDoneListenerOnLearningDone)) {
1104                 sEffectsRecorder.invokeOnLearningDone();
1105             } else if (method.equals(sOnRunnerDoneListenerOnRunnerDone)) {
1106                 sEffectsRecorder.invokeOnRunnerDone(args);
1107             } else if (method.equals(
1108                     sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady)) {
1109                 sEffectsRecorder.invokeOnSurfaceTextureSourceReady(args);
1110             } else if (method.equals(sOnRecordingDoneListenerOnRecordingDone)) {
1111                 sEffectsRecorder.invokeOnRecordingDone();
1112             }
1113             return null;
1114         }
1115     }
1116 
1117     // Indicates that all camera/recording activity needs to halt
release()1118     public synchronized void release() {
1119         if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
1120 
1121         switch (mState) {
1122             case STATE_RECORD:
1123             case STATE_STARTING_PREVIEW:
1124             case STATE_PREVIEW:
1125                 stopPreview();
1126                 // Fall-through
1127             default:
1128                 if (mSoundPlayer != null) {
1129                     mSoundPlayer.release();
1130                     mSoundPlayer = null;
1131                 }
1132                 mState = STATE_RELEASED;
1133                 break;
1134         }
1135         sEffectsRecorder = null;
1136     }
1137 
sendMessage(final int effect, final int msg)1138     private void sendMessage(final int effect, final int msg) {
1139         if (mEffectsListener != null) {
1140             mHandler.post(new Runnable() {
1141                 @Override
1142                 public void run() {
1143                     mEffectsListener.onEffectsUpdate(effect, msg);
1144                 }
1145             });
1146         }
1147     }
1148 
raiseError(final Exception exception)1149     private void raiseError(final Exception exception) {
1150         if (mEffectsListener != null) {
1151             mHandler.post(new Runnable() {
1152                 @Override
1153                 public void run() {
1154                     if (mFd != null) {
1155                         mEffectsListener.onEffectsError(exception, null);
1156                     } else {
1157                         mEffectsListener.onEffectsError(exception, mOutputFile);
1158                     }
1159                 }
1160             });
1161         }
1162     }
1163 
1164     // invoke method on receiver with no arguments
invoke(Object receiver, Method method)1165     private Object invoke(Object receiver, Method method) {
1166         try {
1167             return method.invoke(receiver);
1168         } catch (Exception ex) {
1169             throw new RuntimeException(ex);
1170         }
1171     }
1172 
1173     // invoke method on receiver with arguments
invoke(Object receiver, Method method, Object[] args)1174     private Object invoke(Object receiver, Method method, Object[] args) {
1175         try {
1176             return method.invoke(receiver, args);
1177         } catch (Exception ex) {
1178             throw new RuntimeException(ex);
1179         }
1180     }
1181 
setInputValue(Object receiver, String key, Object value)1182     private void setInputValue(Object receiver, String key, Object value) {
1183         try {
1184             sFilterSetInputValue.invoke(receiver, new Object[] {key, value});
1185         } catch (Exception ex) {
1186             throw new RuntimeException(ex);
1187         }
1188     }
1189 
newInstance(Constructor<?> ct, Object[] initArgs)1190     private Object newInstance(Constructor<?> ct, Object[] initArgs) {
1191         try {
1192             return ct.newInstance(initArgs);
1193         } catch (Exception ex) {
1194             throw new RuntimeException(ex);
1195         }
1196     }
1197 
newInstance(Constructor<?> ct)1198     private Object newInstance(Constructor<?> ct) {
1199         try {
1200             return ct.newInstance();
1201         } catch (Exception ex) {
1202             throw new RuntimeException(ex);
1203         }
1204     }
1205 
getGraphFilter(Object receiver, String name)1206     private Object getGraphFilter(Object receiver, String name) {
1207         try {
1208             return sFilterGraphGetFilter.invoke(sGraphRunnerGetGraph
1209                     .invoke(receiver), new Object[] {name});
1210         } catch (Exception ex) {
1211             throw new RuntimeException(ex);
1212         }
1213     }
1214 
getContextGLEnvironment(Object receiver)1215     private Object getContextGLEnvironment(Object receiver) {
1216         try {
1217             return sFilterContextGetGLEnvironment
1218                     .invoke(sGraphEnvironmentGetContext.invoke(receiver));
1219         } catch (Exception ex) {
1220             throw new RuntimeException(ex);
1221         }
1222     }
1223 
getGraphTearDown(Object receiver, Object filterContext)1224     private void getGraphTearDown(Object receiver, Object filterContext) {
1225         try {
1226             sFilterGraphTearDown.invoke(sGraphRunnerGetGraph.invoke(receiver),
1227                     new Object[]{filterContext});
1228         } catch (Exception ex) {
1229             throw new RuntimeException(ex);
1230         }
1231     }
1232 
getConstant(Class<?> cls, String name)1233     private Object getConstant(Class<?> cls, String name) {
1234         try {
1235             return cls.getDeclaredField(name).get(null);
1236         } catch (Exception ex) {
1237             throw new RuntimeException(ex);
1238         }
1239     }
1240 }
1241