• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package androidx.media.filterfw;
18 
19 import android.annotation.TargetApi;
20 import android.graphics.SurfaceTexture;
21 import android.hardware.Camera;
22 import android.hardware.Camera.CameraInfo;
23 import android.hardware.Camera.PreviewCallback;
24 import android.media.CamcorderProfile;
25 import android.media.MediaRecorder;
26 import android.opengl.GLES20;
27 import android.os.Build.VERSION;
28 import android.util.Log;
29 import android.view.Display;
30 import android.view.Surface;
31 import android.view.SurfaceView;
32 
33 import java.io.IOException;
34 import java.nio.ByteBuffer;
35 import java.util.HashMap;
36 import java.util.HashSet;
37 import java.util.List;
38 import java.util.Set;
39 import java.util.Vector;
40 import java.util.concurrent.LinkedBlockingQueue;
41 import java.util.concurrent.TimeUnit;
42 import java.util.concurrent.atomic.AtomicInteger;
43 import java.util.concurrent.locks.Condition;
44 import java.util.concurrent.locks.ReentrantLock;
45 
46 import javax.microedition.khronos.egl.EGLContext;
47 
48 /**
49  * The CameraStreamer streams Frames from a camera to connected clients.
50  *
51  * There is one centralized CameraStreamer object per MffContext, and only one stream can be
52  * active at any time. The CameraStreamer acts as a Camera "server" that streams frames to any
53  * number of connected clients. Typically, these are CameraSource filters that are part of a
54  * graph, but other clients can be written as well.
55  */
56 public class CameraStreamer {
57 
58     /** Camera Facing: Don't Care: Picks any available camera. */
59     public static final int FACING_DONTCARE = 0;
60     /** Camera Facing: Front: Use the front facing camera. */
61     public static final int FACING_FRONT = 1;
62     /** Camera Facing: Back: Use the rear facing camera. */
63     public static final int FACING_BACK = 2;
64 
65     /** How long the streamer should wait to acquire the camera before giving up. */
66     public static long MAX_CAMERA_WAIT_TIME = 5;
67 
68     /**
69      * The global camera lock, that is closed when the camera is acquired by any CameraStreamer,
70      * and opened when a streamer is done using the camera.
71      */
72     static ReentrantLock mCameraLock = new ReentrantLock();
73 
74     /** The Camera thread that grabs frames from the camera */
75     private CameraRunnable mCameraRunner = null;
76 
77     private abstract class CamFrameHandler {
78         protected int mCameraWidth;
79         protected int mCameraHeight;
80         protected int mOutWidth;
81         protected int mOutHeight;
82         protected CameraRunnable mRunner;
83 
84         /** Map of GLSL shaders (one for each target context) */
85         protected HashMap<EGLContext, ImageShader> mTargetShaders
86             = new HashMap<EGLContext, ImageShader>();
87 
88         /** Map of target textures (one for each target context) */
89         protected HashMap<EGLContext, TextureSource> mTargetTextures
90             = new HashMap<EGLContext, TextureSource>();
91 
92         /** Map of set of clients (one for each target context) */
93         protected HashMap<EGLContext, Set<FrameClient>> mContextClients
94             = new HashMap<EGLContext, Set<FrameClient>>();
95 
96         /** List of clients that are consuming camera frames. */
97         protected Vector<FrameClient> mClients = new Vector<FrameClient>();
98 
initWithRunner(CameraRunnable camRunner)99         public void initWithRunner(CameraRunnable camRunner) {
100             mRunner = camRunner;
101         }
102 
setCameraSize(int width, int height)103         public void setCameraSize(int width, int height) {
104             mCameraWidth = width;
105             mCameraHeight = height;
106         }
107 
registerClient(FrameClient client)108         public void registerClient(FrameClient client) {
109             EGLContext context = RenderTarget.currentContext();
110             Set<FrameClient> clientTargets = clientsForContext(context);
111             clientTargets.add(client);
112             mClients.add(client);
113             onRegisterClient(client, context);
114         }
115 
unregisterClient(FrameClient client)116         public void unregisterClient(FrameClient client) {
117             EGLContext context = RenderTarget.currentContext();
118             Set<FrameClient> clientTargets = clientsForContext(context);
119             clientTargets.remove(client);
120             if (clientTargets.isEmpty()) {
121                 onCleanupContext(context);
122             }
123             mClients.remove(client);
124         }
125 
setupServerFrame()126         public abstract void setupServerFrame();
updateServerFrame()127         public abstract void updateServerFrame();
grabFrame(FrameImage2D targetFrame)128         public abstract void grabFrame(FrameImage2D targetFrame);
release()129         public abstract void release();
130 
onUpdateCameraOrientation(int orientation)131         public void onUpdateCameraOrientation(int orientation) {
132             if (orientation % 180 != 0) {
133                 mOutWidth = mCameraHeight;
134                 mOutHeight = mCameraWidth;
135             } else {
136                 mOutWidth = mCameraWidth;
137                 mOutHeight = mCameraHeight;
138             }
139         }
140 
clientsForContext(EGLContext context)141         protected Set<FrameClient> clientsForContext(EGLContext context) {
142             Set<FrameClient> clients = mContextClients.get(context);
143             if (clients == null) {
144                 clients = new HashSet<FrameClient>();
145                 mContextClients.put(context, clients);
146             }
147             return clients;
148         }
149 
onRegisterClient(FrameClient client, EGLContext context)150         protected void onRegisterClient(FrameClient client, EGLContext context) {
151         }
152 
onCleanupContext(EGLContext context)153         protected void onCleanupContext(EGLContext context) {
154             TextureSource texture = mTargetTextures.get(context);
155             ImageShader shader = mTargetShaders.get(context);
156             if (texture != null) {
157                 texture.release();
158                 mTargetTextures.remove(context);
159             }
160             if (shader != null) {
161                 mTargetShaders.remove(context);
162             }
163         }
164 
textureForContext(EGLContext context)165         protected TextureSource textureForContext(EGLContext context) {
166             TextureSource texture = mTargetTextures.get(context);
167             if (texture == null) {
168                 texture = createClientTexture();
169                 mTargetTextures.put(context, texture);
170             }
171             return texture;
172         }
173 
shaderForContext(EGLContext context)174         protected ImageShader shaderForContext(EGLContext context) {
175             ImageShader shader = mTargetShaders.get(context);
176             if (shader == null) {
177                 shader = createClientShader();
178                 mTargetShaders.put(context, shader);
179             }
180             return shader;
181         }
182 
createClientShader()183         protected ImageShader createClientShader() {
184             return null;
185         }
186 
createClientTexture()187         protected TextureSource createClientTexture() {
188             return null;
189         }
190 
isFrontMirrored()191         public boolean isFrontMirrored() {
192             return true;
193         }
194     }
195 
196     // Jellybean (and later) back-end
197     @TargetApi(16)
198     private class CamFrameHandlerJB extends CamFrameHandlerICS {
199 
200         @Override
setupServerFrame()201         public void setupServerFrame() {
202             setupPreviewTexture(mRunner.mCamera);
203         }
204 
205         @Override
updateServerFrame()206         public synchronized void updateServerFrame() {
207             updateSurfaceTexture();
208             informClients();
209         }
210 
211         @Override
grabFrame(FrameImage2D targetFrame)212         public synchronized void grabFrame(FrameImage2D targetFrame) {
213             TextureSource targetTex = TextureSource.newExternalTexture();
214             ImageShader copyShader = shaderForContext(RenderTarget.currentContext());
215             if (targetTex == null || copyShader == null) {
216                 throw new RuntimeException("Attempting to grab camera frame from unknown "
217                     + "thread: " + Thread.currentThread() + "!");
218             }
219             mPreviewSurfaceTexture.attachToGLContext(targetTex.getTextureId());
220             updateTransform(copyShader);
221             updateShaderTargetRect(copyShader);
222             targetFrame.resize(new int[] { mOutWidth, mOutHeight });
223             copyShader.process(targetTex,
224                                targetFrame.lockRenderTarget(),
225                                mOutWidth,
226                                mOutHeight);
227             targetFrame.setTimestamp(mPreviewSurfaceTexture.getTimestamp());
228             targetFrame.unlock();
229             mPreviewSurfaceTexture.detachFromGLContext();
230             targetTex.release();
231         }
232 
233         @Override
updateShaderTargetRect(ImageShader shader)234         protected void updateShaderTargetRect(ImageShader shader) {
235             if ((mRunner.mActualFacing == FACING_FRONT) && mRunner.mFlipFront) {
236                 shader.setTargetRect(1f, 1f, -1f, -1f);
237             } else {
238                 shader.setTargetRect(0f, 1f, 1f, -1f);
239             }
240         }
241 
242         @Override
setupPreviewTexture(Camera camera)243         protected void setupPreviewTexture(Camera camera) {
244             super.setupPreviewTexture(camera);
245             mPreviewSurfaceTexture.detachFromGLContext();
246         }
247 
updateSurfaceTexture()248         protected void updateSurfaceTexture() {
249             mPreviewSurfaceTexture.attachToGLContext(mPreviewTexture.getTextureId());
250             mPreviewSurfaceTexture.updateTexImage();
251             mPreviewSurfaceTexture.detachFromGLContext();
252         }
253 
informClients()254         protected void informClients() {
255             synchronized (mClients) {
256                 for (FrameClient client : mClients) {
257                     client.onCameraFrameAvailable();
258                 }
259             }
260         }
261     }
262 
263     // ICS (and later) back-end
264     @TargetApi(15)
265     private class CamFrameHandlerICS extends CamFrameHandler  {
266 
267         protected static final String mCopyShaderSource =
268             "#extension GL_OES_EGL_image_external : require\n" +
269             "precision mediump float;\n" +
270             "uniform samplerExternalOES tex_sampler_0;\n" +
271             "varying vec2 v_texcoord;\n" +
272             "void main() {\n" +
273             "  gl_FragColor = texture2D(tex_sampler_0, v_texcoord);\n" +
274             "}\n";
275 
276         /** The camera transform matrix */
277         private float[] mCameraTransform = new float[16];
278 
279         /** The texture the camera streams to */
280         protected TextureSource mPreviewTexture = null;
281         protected SurfaceTexture mPreviewSurfaceTexture = null;
282 
283         /** Map of target surface textures (one for each target context) */
284         protected HashMap<EGLContext, SurfaceTexture> mTargetSurfaceTextures
285             = new HashMap<EGLContext, SurfaceTexture>();
286 
287         /** Map of RenderTargets for client SurfaceTextures */
288         protected HashMap<SurfaceTexture, RenderTarget> mClientRenderTargets
289             = new HashMap<SurfaceTexture, RenderTarget>();
290 
291         /** Server side copy shader */
292         protected ImageShader mCopyShader = null;
293 
294         @Override
setupServerFrame()295         public synchronized void setupServerFrame() {
296             setupPreviewTexture(mRunner.mCamera);
297         }
298 
299         @Override
updateServerFrame()300         public synchronized void updateServerFrame() {
301             mPreviewSurfaceTexture.updateTexImage();
302             distributeFrames();
303         }
304 
305         @Override
onUpdateCameraOrientation(int orientation)306         public void onUpdateCameraOrientation(int orientation) {
307             super.onUpdateCameraOrientation(orientation);
308             mRunner.mCamera.setDisplayOrientation(orientation);
309             updateSurfaceTextureSizes();
310         }
311 
312         @Override
onRegisterClient(FrameClient client, EGLContext context)313         public synchronized void onRegisterClient(FrameClient client, EGLContext context) {
314             final Set<FrameClient> clientTargets = clientsForContext(context);
315 
316             // Make sure we have texture, shader, and surfacetexture setup for this context.
317             TextureSource clientTex = textureForContext(context);
318             ImageShader copyShader = shaderForContext(context);
319             SurfaceTexture surfTex = surfaceTextureForContext(context);
320 
321             // Listen to client-side surface texture updates
322             surfTex.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
323                 @Override
324                 public void onFrameAvailable(SurfaceTexture surfaceTexture) {
325                     for (FrameClient clientTarget : clientTargets) {
326                         clientTarget.onCameraFrameAvailable();
327                     }
328                 }
329             });
330         }
331 
332         @Override
grabFrame(FrameImage2D targetFrame)333         public synchronized void grabFrame(FrameImage2D targetFrame) {
334             // Get the GL objects for the receiver's context
335             EGLContext clientContext = RenderTarget.currentContext();
336             TextureSource clientTex = textureForContext(clientContext);
337             ImageShader copyShader = shaderForContext(clientContext);
338             SurfaceTexture surfTex = surfaceTextureForContext(clientContext);
339             if (clientTex == null || copyShader == null || surfTex == null) {
340                 throw new RuntimeException("Attempting to grab camera frame from unknown "
341                     + "thread: " + Thread.currentThread() + "!");
342             }
343 
344             // Copy from client ST to client tex
345             surfTex.updateTexImage();
346             targetFrame.resize(new int[] { mOutWidth, mOutHeight });
347             copyShader.process(clientTex,
348                                targetFrame.lockRenderTarget(),
349                                mOutWidth,
350                                mOutHeight);
351 
352             targetFrame.setTimestamp(mPreviewSurfaceTexture.getTimestamp());
353             targetFrame.unlock();
354         }
355 
356         @Override
release()357         public synchronized void release() {
358             if (mPreviewTexture != null) {
359                 mPreviewTexture.release();
360                 mPreviewTexture = null;
361             }
362             if (mPreviewSurfaceTexture != null) {
363                 mPreviewSurfaceTexture.release();
364                 mPreviewSurfaceTexture = null;
365             }
366         }
367 
368         @Override
createClientShader()369         protected ImageShader createClientShader() {
370             return new ImageShader(mCopyShaderSource);
371         }
372 
373         @Override
createClientTexture()374         protected TextureSource createClientTexture() {
375             return TextureSource.newExternalTexture();
376         }
377 
distributeFrames()378         protected void distributeFrames() {
379             updateTransform(getCopyShader());
380             updateShaderTargetRect(getCopyShader());
381 
382             for (SurfaceTexture clientTexture : mTargetSurfaceTextures.values()) {
383                 RenderTarget clientTarget = renderTargetFor(clientTexture);
384                 clientTarget.focus();
385                 getCopyShader().process(mPreviewTexture,
386                                         clientTarget,
387                                         mOutWidth,
388                                         mOutHeight);
389                 GLToolbox.checkGlError("distribute frames");
390                 clientTarget.swapBuffers();
391             }
392         }
393 
renderTargetFor(SurfaceTexture surfaceTex)394         protected RenderTarget renderTargetFor(SurfaceTexture surfaceTex) {
395             RenderTarget target = mClientRenderTargets.get(surfaceTex);
396             if (target == null) {
397                 target = RenderTarget.currentTarget().forSurfaceTexture(surfaceTex);
398                 mClientRenderTargets.put(surfaceTex, target);
399             }
400             return target;
401         }
402 
setupPreviewTexture(Camera camera)403         protected void setupPreviewTexture(Camera camera) {
404             if (mPreviewTexture == null) {
405                 mPreviewTexture = TextureSource.newExternalTexture();
406             }
407             if (mPreviewSurfaceTexture == null) {
408                 mPreviewSurfaceTexture = new SurfaceTexture(mPreviewTexture.getTextureId());
409                 try {
410                     camera.setPreviewTexture(mPreviewSurfaceTexture);
411                 } catch (IOException e) {
412                     throw new RuntimeException("Could not bind camera surface texture: " +
413                                                e.getMessage() + "!");
414                 }
415                 mPreviewSurfaceTexture.setOnFrameAvailableListener(mOnCameraFrameListener);
416             }
417         }
418 
getCopyShader()419         protected ImageShader getCopyShader() {
420             if (mCopyShader == null) {
421                 mCopyShader = new ImageShader(mCopyShaderSource);
422             }
423             return mCopyShader;
424         }
425 
surfaceTextureForContext(EGLContext context)426         protected SurfaceTexture surfaceTextureForContext(EGLContext context) {
427             SurfaceTexture surfTex = mTargetSurfaceTextures.get(context);
428             if (surfTex == null) {
429                 TextureSource texture = textureForContext(context);
430                 if (texture != null) {
431                     surfTex = new SurfaceTexture(texture.getTextureId());
432                     surfTex.setDefaultBufferSize(mOutWidth, mOutHeight);
433                     mTargetSurfaceTextures.put(context, surfTex);
434                 }
435             }
436             return surfTex;
437         }
438 
updateShaderTargetRect(ImageShader shader)439         protected void updateShaderTargetRect(ImageShader shader) {
440             if ((mRunner.mActualFacing == FACING_FRONT) && mRunner.mFlipFront) {
441                 shader.setTargetRect(1f, 0f, -1f, 1f);
442             } else {
443                 shader.setTargetRect(0f, 0f, 1f, 1f);
444             }
445         }
446 
updateSurfaceTextureSizes()447         protected synchronized void updateSurfaceTextureSizes() {
448             for (SurfaceTexture clientTexture : mTargetSurfaceTextures.values()) {
449                 clientTexture.setDefaultBufferSize(mOutWidth, mOutHeight);
450             }
451         }
452 
updateTransform(ImageShader shader)453         protected void updateTransform(ImageShader shader) {
454             mPreviewSurfaceTexture.getTransformMatrix(mCameraTransform);
455             shader.setSourceTransform(mCameraTransform);
456         }
457 
458         @Override
onCleanupContext(EGLContext context)459         protected void onCleanupContext(EGLContext context) {
460             super.onCleanupContext(context);
461             SurfaceTexture surfaceTex = mTargetSurfaceTextures.get(context);
462             if (surfaceTex != null) {
463                 surfaceTex.release();
464                 mTargetSurfaceTextures.remove(context);
465             }
466         }
467 
468         protected SurfaceTexture.OnFrameAvailableListener mOnCameraFrameListener =
469                 new SurfaceTexture.OnFrameAvailableListener() {
470             @Override
471             public void onFrameAvailable(SurfaceTexture surfaceTexture) {
472                 mRunner.signalNewFrame();
473             }
474         };
475     }
476 
477     // Gingerbread (and later) back-end
478     @TargetApi(9)
479     private final class CamFrameHandlerGB extends CamFrameHandler  {
480 
481         private SurfaceView mSurfaceView;
482         private byte[] mFrameBufferFront;
483         private byte[] mFrameBufferBack;
484         private boolean mWriteToBack = true;
485         private float[] mTargetCoords = new float[] { 0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
486         final Object mBufferLock = new Object();
487 
488         private String mNV21ToRGBAFragment =
489             "precision mediump float;\n" +
490             "\n" +
491             "uniform sampler2D tex_sampler_0;\n" +
492             "varying vec2 v_y_texcoord;\n" +
493             "varying vec2 v_vu_texcoord;\n" +
494             "varying vec2 v_pixcoord;\n" +
495             "\n" +
496             "vec3 select(vec4 yyyy, vec4 vuvu, int s) {\n" +
497             "  if (s == 0) {\n" +
498             "    return vec3(yyyy.r, vuvu.g, vuvu.r);\n" +
499             "  } else if (s == 1) {\n" +
500             "    return vec3(yyyy.g, vuvu.g, vuvu.r);\n" +
501             " } else if (s == 2) {\n" +
502             "    return vec3(yyyy.b, vuvu.a, vuvu.b);\n" +
503             "  } else  {\n" +
504             "    return vec3(yyyy.a, vuvu.a, vuvu.b);\n" +
505             "  }\n" +
506             "}\n" +
507             "\n" +
508             "vec3 yuv2rgb(vec3 yuv) {\n" +
509             "  mat4 conversion = mat4(1.0,  0.0,    1.402, -0.701,\n" +
510             "                         1.0, -0.344, -0.714,  0.529,\n" +
511             "                         1.0,  1.772,  0.0,   -0.886,\n" +
512             "                         0, 0, 0, 0);" +
513             "  return (vec4(yuv, 1.0) * conversion).rgb;\n" +
514             "}\n" +
515             "\n" +
516             "void main() {\n" +
517             "  vec4 yyyy = texture2D(tex_sampler_0, v_y_texcoord);\n" +
518             "  vec4 vuvu = texture2D(tex_sampler_0, v_vu_texcoord);\n" +
519             "  int s = int(mod(floor(v_pixcoord.x), 4.0));\n" +
520             "  vec3 yuv = select(yyyy, vuvu, s);\n" +
521             "  vec3 rgb = yuv2rgb(yuv);\n" +
522             "  gl_FragColor = vec4(rgb, 1.0);\n" +
523             "}";
524 
525         private String mNV21ToRGBAVertex =
526             "attribute vec4 a_position;\n" +
527             "attribute vec2 a_y_texcoord;\n" +
528             "attribute vec2 a_vu_texcoord;\n" +
529             "attribute vec2 a_pixcoord;\n" +
530             "varying vec2 v_y_texcoord;\n" +
531             "varying vec2 v_vu_texcoord;\n" +
532             "varying vec2 v_pixcoord;\n" +
533             "void main() {\n" +
534             "  gl_Position = a_position;\n" +
535             "  v_y_texcoord = a_y_texcoord;\n" +
536             "  v_vu_texcoord = a_vu_texcoord;\n" +
537             "  v_pixcoord = a_pixcoord;\n" +
538             "}\n";
539 
readBuffer()540         private byte[] readBuffer() {
541             synchronized (mBufferLock) {
542                 return mWriteToBack ? mFrameBufferFront : mFrameBufferBack;
543             }
544         }
545 
writeBuffer()546         private byte[] writeBuffer() {
547             synchronized (mBufferLock) {
548                 return mWriteToBack ? mFrameBufferBack : mFrameBufferFront;
549             }
550         }
551 
swapBuffers()552         private synchronized void swapBuffers() {
553             synchronized (mBufferLock) {
554                 mWriteToBack = !mWriteToBack;
555             }
556         }
557 
558         private PreviewCallback mPreviewCallback = new PreviewCallback() {
559 
560             @Override
561             public void onPreviewFrame(byte[] data, Camera camera) {
562                 swapBuffers();
563                 camera.addCallbackBuffer(writeBuffer());
564                 mRunner.signalNewFrame();
565             }
566 
567         };
568 
569         @Override
setupServerFrame()570         public void setupServerFrame() {
571             checkCameraDimensions();
572             Camera camera = mRunner.mCamera;
573             int bufferSize = mCameraWidth * (mCameraHeight + mCameraHeight/2);
574             mFrameBufferFront = new byte[bufferSize];
575             mFrameBufferBack = new byte[bufferSize];
576             camera.addCallbackBuffer(writeBuffer());
577             camera.setPreviewCallbackWithBuffer(mPreviewCallback);
578             SurfaceView previewDisplay = getPreviewDisplay();
579             if (previewDisplay != null) {
580                 try {
581                     camera.setPreviewDisplay(previewDisplay.getHolder());
582                 } catch (IOException e) {
583                     throw new RuntimeException("Could not start camera with given preview " +
584                             "display!");
585                 }
586             }
587         }
588 
checkCameraDimensions()589         private void checkCameraDimensions() {
590             if (mCameraWidth % 4 != 0) {
591                 throw new RuntimeException("Camera width must be a multiple of 4!");
592             } else if (mCameraHeight % 2 != 0) {
593                 throw new RuntimeException("Camera height must be a multiple of 2!");
594             }
595         }
596 
597         @Override
updateServerFrame()598         public void updateServerFrame() {
599             // Server frame has been updated already, simply inform clients here.
600             informClients();
601         }
602 
603         @Override
grabFrame(FrameImage2D targetFrame)604         public void grabFrame(FrameImage2D targetFrame) {
605             EGLContext clientContext = RenderTarget.currentContext();
606 
607             // Copy camera data to the client YUV texture
608             TextureSource clientTex = textureForContext(clientContext);
609             int texWidth = mCameraWidth / 4;
610             int texHeight = mCameraHeight + mCameraHeight / 2;
611             synchronized(mBufferLock) {    // Don't swap buffers while we are reading
612                 ByteBuffer pixels = ByteBuffer.wrap(readBuffer());
613                 clientTex.allocateWithPixels(pixels, texWidth, texHeight);
614             }
615             clientTex.setParameter(GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
616             clientTex.setParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
617 
618             // Setup the YUV-2-RGBA shader
619             ImageShader transferShader = shaderForContext(clientContext);
620             transferShader.setTargetCoords(mTargetCoords);
621             updateShaderPixelSize(transferShader);
622 
623             // Convert pixels into target frame
624             targetFrame.resize(new int[] { mOutWidth, mOutHeight });
625             transferShader.process(clientTex,
626                     targetFrame.lockRenderTarget(),
627                     mOutWidth,
628                     mOutHeight);
629             targetFrame.unlock();
630         }
631 
632         @Override
onUpdateCameraOrientation(int orientation)633         public void onUpdateCameraOrientation(int orientation) {
634             super.onUpdateCameraOrientation(orientation);
635             if ((mRunner.mActualFacing == FACING_FRONT) && mRunner.mFlipFront) {
636                 switch (orientation) {
637                     case 0:
638                         mTargetCoords = new float[] { 1f, 0f, 0f, 0f, 1f, 1f, 0f, 1f };
639                         break;
640                     case 90:
641                         mTargetCoords = new float[] { 0f, 0f, 0f, 1f, 1f, 0f, 1f, 1f };
642                         break;
643                     case 180:
644                         mTargetCoords = new float[] { 0f, 1f, 1f, 1f, 0f, 0f, 1f, 0f };
645                         break;
646                     case 270:
647                         mTargetCoords = new float[] { 1f, 1f, 1f, 0f, 0f, 1f, 0f, 0f };
648                         break;
649                 }
650             } else {
651                 switch (orientation) {
652                     case 0:
653                         mTargetCoords = new float[] { 0f, 0f, 1f, 0f, 0f, 1f, 1f, 1f };
654                         break;
655                     case 90:
656                         mTargetCoords = new float[] { 1f, 0f, 1f, 1f, 0f, 0f, 0f, 1f };
657                         break;
658                     case 180:
659                         mTargetCoords = new float[] { 1f, 1f, 0f, 1f, 1f, 0f, 0f, 0f };
660                         break;
661                     case 270:
662                         mTargetCoords = new float[] { 0f, 1f, 0f, 0f, 1f, 1f, 1f, 0f };
663                         break;
664                 }
665             }
666         }
667 
668         @Override
release()669         public void release() {
670             mFrameBufferBack = null;
671             mFrameBufferFront = null;
672         }
673 
674         @Override
isFrontMirrored()675         public boolean isFrontMirrored() {
676             return false;
677         }
678 
679         @Override
createClientShader()680         protected ImageShader createClientShader() {
681             ImageShader shader = new ImageShader(mNV21ToRGBAVertex, mNV21ToRGBAFragment);
682             // TODO: Make this a VBO
683             float[] yCoords = new float[] {
684                     0f, 0f,
685                     1f, 0f,
686                     0f, 2f / 3f,
687                     1f, 2f / 3f };
688             float[] uvCoords = new float[] {
689                     0f, 2f / 3f,
690                     1f, 2f / 3f,
691                     0f, 1f,
692                     1f, 1f };
693             shader.setAttributeValues("a_y_texcoord", yCoords, 2);
694             shader.setAttributeValues("a_vu_texcoord", uvCoords, 2);
695             return shader;
696         }
697 
698         @Override
createClientTexture()699         protected TextureSource createClientTexture() {
700             TextureSource texture = TextureSource.newTexture();
701             texture.setParameter(GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
702             texture.setParameter(GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
703             return texture;
704         }
705 
updateShaderPixelSize(ImageShader shader)706         private void updateShaderPixelSize(ImageShader shader) {
707             float[] pixCoords = new float[] {
708                     0f, 0f,
709                     mCameraWidth, 0f,
710                     0f, mCameraHeight,
711                     mCameraWidth, mCameraHeight };
712             shader.setAttributeValues("a_pixcoord", pixCoords, 2);
713         }
714 
getPreviewDisplay()715         private SurfaceView getPreviewDisplay() {
716             if (mSurfaceView == null) {
717                 mSurfaceView = mRunner.getContext().getDummySurfaceView();
718             }
719             return mSurfaceView;
720         }
721 
informClients()722         private void informClients() {
723             synchronized (mClients) {
724                 for (FrameClient client : mClients) {
725                     client.onCameraFrameAvailable();
726                 }
727             }
728         }
729     }
730 
731     private static class State {
732         public static final int STATE_RUNNING = 1;
733         public static final int STATE_STOPPED = 2;
734         public static final int STATE_HALTED = 3;
735 
736         private AtomicInteger mCurrent = new AtomicInteger(STATE_STOPPED);
737 
current()738         public int current() {
739             return mCurrent.get();
740         }
741 
set(int newState)742         public void set(int newState) {
743             mCurrent.set(newState);
744         }
745     }
746 
747     private static class Event {
748         public static final int START = 1;
749         public static final int FRAME = 2;
750         public static final int STOP = 3;
751         public static final int HALT = 4;
752         public static final int RESTART = 5;
753         public static final int UPDATE = 6;
754         public static final int TEARDOWN = 7;
755 
756         public int code;
757 
Event(int code)758         public Event(int code) {
759             this.code = code;
760         }
761     }
762 
763     private final class CameraRunnable implements Runnable {
764 
765         /** On slower devices the event queue can easily fill up. We bound the queue to this. */
766         private final static int MAX_EVENTS = 32;
767 
768         /** The runner's state */
769         private State mState = new State();
770 
771         /** The CameraRunner's event queue */
772         private LinkedBlockingQueue<Event> mEventQueue = new LinkedBlockingQueue<Event>(MAX_EVENTS);
773 
774         /** The requested FPS */
775         private int mRequestedFramesPerSec = 30;
776 
777         /** The actual FPS */
778         private int mActualFramesPerSec = 0;
779 
780         /** The requested preview width and height */
781         private int mRequestedPreviewWidth = 640;
782         private int mRequestedPreviewHeight = 480;
783 
784         /** The requested picture width and height */
785         private int mRequestedPictureWidth = 640;
786         private int mRequestedPictureHeight = 480;
787 
788         /** The actual camera width and height */
789         private int[] mActualDims = null;
790 
791         /** The requested facing */
792         private int mRequestedFacing = FACING_DONTCARE;
793 
794         /** The actual facing */
795         private int mActualFacing = FACING_DONTCARE;
796 
797         /** Whether to horizontally flip the front facing camera */
798         private boolean mFlipFront = true;
799 
800         /** The display the camera streamer is bound to. */
801         private Display mDisplay = null;
802 
803         /** The camera and screen orientation. */
804         private int mCamOrientation = 0;
805         private int mOrientation = -1;
806 
807         /** The camera rotation (used for capture). */
808         private int mCamRotation = 0;
809 
810         /** The camera flash mode */
811         private String mFlashMode = Camera.Parameters.FLASH_MODE_OFF;
812 
813         /** The camera object */
814         private Camera mCamera = null;
815 
816         private MediaRecorder mRecorder = null;
817 
818         /** The ID of the currently used camera */
819         int mCamId = 0;
820 
821         /** The platform-dependent camera frame handler. */
822         private CamFrameHandler mCamFrameHandler = null;
823 
824         /** The set of camera listeners. */
825         private Set<CameraListener> mCamListeners = new HashSet<CameraListener>();
826 
827         private ReentrantLock mCameraReadyLock = new ReentrantLock(true);
828         // mCameraReady condition is used when waiting for the camera getting ready.
829         private Condition mCameraReady = mCameraReadyLock.newCondition();
830         // external camera lock used to provide the capability of external camera access.
831         private ExternalCameraLock mExternalCameraLock = new ExternalCameraLock();
832 
833         private RenderTarget mRenderTarget;
834         private MffContext mContext;
835 
836         /**
837          *  This provides the capability of locking and unlocking from different threads.
838          *  The thread will wait until the lock state is idle. Any thread can wake up
839          *  a waiting thread by calling unlock (i.e. signal), provided that unlock
840          *  are called using the same context when lock was called. Using context prevents
841          *  from rogue usage of unlock.
842          */
843         private class ExternalCameraLock {
844             public static final int IDLE = 0;
845             public static final int IN_USE = 1;
846             private int mLockState = IDLE;
847             private Object mLockContext;
848             private final ReentrantLock mLock = new ReentrantLock(true);
849             private final Condition mInUseLockCondition= mLock.newCondition();
850 
lock(Object context)851             public boolean lock(Object context) {
852                 if (context == null) {
853                     throw new RuntimeException("Null context when locking");
854                 }
855                 mLock.lock();
856                 if (mLockState == IN_USE) {
857                     try {
858                         mInUseLockCondition.await();
859                     } catch (InterruptedException e) {
860                         return false;
861                     }
862                 }
863                 mLockState = IN_USE;
864                 mLockContext = context;
865                 mLock.unlock();
866                 return true;
867             }
868 
unlock(Object context)869             public void unlock(Object context) {
870                 mLock.lock();
871                 if (mLockState != IN_USE) {
872                     throw new RuntimeException("Not in IN_USE state");
873                 }
874                 if (context != mLockContext) {
875                     throw new RuntimeException("Lock is not owned by this context");
876                 }
877                 mLockState = IDLE;
878                 mLockContext = null;
879                 mInUseLockCondition.signal();
880                 mLock.unlock();
881             }
882         }
883 
CameraRunnable(MffContext context)884         public CameraRunnable(MffContext context) {
885             mContext = context;
886             createCamFrameHandler();
887             mCamFrameHandler.initWithRunner(this);
888             launchThread();
889         }
890 
getContext()891         public MffContext getContext() {
892             return mContext;
893         }
894 
loop()895         public void loop() {
896             while (true) {
897                 try {
898                     Event event = nextEvent();
899                     if (event == null) continue;
900                     switch (event.code) {
901                         case Event.START:
902                             onStart();
903                             break;
904                         case Event.STOP:
905                             onStop();
906                             break;
907                         case Event.FRAME:
908                             onFrame();
909                             break;
910                         case Event.HALT:
911                             onHalt();
912                             break;
913                         case Event.RESTART:
914                             onRestart();
915                             break;
916                         case Event.UPDATE:
917                             onUpdate();
918                             break;
919                         case Event.TEARDOWN:
920                             onTearDown();
921                             break;
922                     }
923                 } catch (Exception e) {
924                     e.printStackTrace();
925                 }
926             }
927         }
928 
929         @Override
run()930         public void run() {
931             loop();
932         }
933 
signalNewFrame()934         public void signalNewFrame() {
935             pushEvent(Event.FRAME, false);
936         }
937 
pushEvent(int eventId, boolean required)938         public void pushEvent(int eventId, boolean required) {
939             try {
940                 if (required) {
941                     mEventQueue.put(new Event(eventId));
942                 } else {
943                     mEventQueue.offer(new Event(eventId));
944                 }
945             } catch (InterruptedException e) {
946                 // We should never get here (as we do not limit capacity in the queue), but if
947                 // we do, we log an error.
948                 Log.e("CameraStreamer", "Dropping event " + eventId + "!");
949             }
950         }
951 
launchThread()952         public void launchThread() {
953             Thread cameraThread = new Thread(this);
954             cameraThread.start();
955         }
956 
957         @Deprecated
getCamera()958         public Camera getCamera() {
959             synchronized (mState) {
960                 return mCamera;
961             }
962         }
963 
lockCamera(Object context)964         public Camera lockCamera(Object context) {
965             mExternalCameraLock.lock(context);
966             /**
967              * since lockCamera can happen right after closeCamera,
968              * the camera handle can be null, wait until valid handle
969              * is acquired.
970              */
971             while (mCamera == null) {
972                 mExternalCameraLock.unlock(context);
973                 mCameraReadyLock.lock();
974                 try {
975                     mCameraReady.await();
976                 } catch (InterruptedException e) {
977                     throw new RuntimeException("Condition interrupted", e);
978                 }
979                 mCameraReadyLock.unlock();
980                 mExternalCameraLock.lock(context);
981             }
982             return mCamera;
983         }
984 
unlockCamera(Object context)985         public void unlockCamera(Object context) {
986             mExternalCameraLock.unlock(context);
987         }
988 
getCurrentCameraId()989         public int getCurrentCameraId() {
990             synchronized (mState) {
991                 return mCamId;
992             }
993         }
994 
isRunning()995         public boolean isRunning() {
996             return mState.current() != State.STATE_STOPPED;
997         }
998 
addListener(CameraListener listener)999         public void addListener(CameraListener listener) {
1000             synchronized (mCamListeners) {
1001                 mCamListeners.add(listener);
1002             }
1003         }
1004 
removeListener(CameraListener listener)1005         public void removeListener(CameraListener listener) {
1006             synchronized (mCamListeners) {
1007                 mCamListeners.remove(listener);
1008             }
1009         }
1010 
bindToDisplay(Display display)1011         public synchronized void bindToDisplay(Display display) {
1012             mDisplay = display;
1013         }
1014 
setDesiredPreviewSize(int width, int height)1015         public synchronized void setDesiredPreviewSize(int width, int height) {
1016             if (width != mRequestedPreviewWidth || height != mRequestedPreviewHeight) {
1017                 mRequestedPreviewWidth = width;
1018                 mRequestedPreviewHeight = height;
1019                 onParamsUpdated();
1020             }
1021         }
1022 
setDesiredPictureSize(int width, int height)1023         public synchronized void setDesiredPictureSize(int width, int height) {
1024             if (width != mRequestedPictureWidth || height != mRequestedPictureHeight) {
1025                 mRequestedPictureWidth = width;
1026                 mRequestedPictureHeight = height;
1027                 onParamsUpdated();
1028             }
1029         }
1030 
setDesiredFrameRate(int fps)1031         public synchronized void setDesiredFrameRate(int fps) {
1032             if (fps != mRequestedFramesPerSec) {
1033                 mRequestedFramesPerSec = fps;
1034                 onParamsUpdated();
1035             }
1036         }
1037 
setFacing(int facing)1038         public synchronized void setFacing(int facing) {
1039             if (facing != mRequestedFacing) {
1040                 switch (facing) {
1041                     case FACING_DONTCARE:
1042                     case FACING_FRONT:
1043                     case FACING_BACK:
1044                         mRequestedFacing = facing;
1045                         break;
1046                     default:
1047                         throw new IllegalArgumentException("Unknown facing value '" + facing
1048                             + "' passed to setFacing!");
1049                 }
1050                 onParamsUpdated();
1051             }
1052         }
1053 
setFlipFrontCamera(boolean flipFront)1054         public synchronized void setFlipFrontCamera(boolean flipFront) {
1055             if (mFlipFront != flipFront) {
1056                 mFlipFront = flipFront;
1057             }
1058         }
1059 
setFlashMode(String flashMode)1060         public synchronized void setFlashMode(String flashMode) {
1061             if (!flashMode.equals(mFlashMode)) {
1062                 mFlashMode = flashMode;
1063                 onParamsUpdated();
1064             }
1065         }
1066 
getCameraFacing()1067         public synchronized int getCameraFacing() {
1068             return mActualFacing;
1069         }
1070 
getCameraRotation()1071         public synchronized int getCameraRotation() {
1072             return mCamRotation;
1073         }
1074 
supportsHardwareFaceDetection()1075         public synchronized boolean supportsHardwareFaceDetection() {
1076             //return mCamFrameHandler.supportsHardwareFaceDetection();
1077             // TODO
1078             return true;
1079         }
1080 
getCameraWidth()1081         public synchronized int getCameraWidth() {
1082             return (mActualDims != null) ? mActualDims[0] : 0;
1083         }
1084 
getCameraHeight()1085         public synchronized int getCameraHeight() {
1086             return (mActualDims != null) ? mActualDims[1] : 0;
1087         }
1088 
getCameraFrameRate()1089         public synchronized int getCameraFrameRate() {
1090             return mActualFramesPerSec;
1091         }
1092 
getFlashMode()1093         public synchronized String getFlashMode() {
1094             return mCamera.getParameters().getFlashMode();
1095         }
1096 
canStart()1097         public synchronized boolean canStart() {
1098             // If we can get a camera id without error we should be able to start.
1099             try {
1100                 getCameraId();
1101             } catch (RuntimeException e) {
1102                 return false;
1103             }
1104             return true;
1105         }
1106 
grabFrame(FrameImage2D targetFrame)1107         public boolean grabFrame(FrameImage2D targetFrame) {
1108             // Make sure we stay in state running while we are grabbing the frame.
1109             synchronized (mState) {
1110                 if (mState.current() != State.STATE_RUNNING) {
1111                     return false;
1112                 }
1113                 // we may not have the camera ready, this might happen when in the middle
1114                 // of switching camera.
1115                 if (mCamera == null) {
1116                     return false;
1117                 }
1118                 mCamFrameHandler.grabFrame(targetFrame);
1119                 return true;
1120             }
1121         }
1122 
getCamFrameHandler()1123         public CamFrameHandler getCamFrameHandler() {
1124             return mCamFrameHandler;
1125         }
1126 
onParamsUpdated()1127         private void onParamsUpdated() {
1128             pushEvent(Event.UPDATE, true);
1129         }
1130 
nextEvent()1131         private Event nextEvent() {
1132             try {
1133                 return mEventQueue.take();
1134             } catch (InterruptedException e) {
1135                 // Ignore and keep going.
1136                 Log.w("GraphRunner", "Event queue processing was interrupted.");
1137                 return null;
1138             }
1139         }
1140 
onStart()1141         private void onStart() {
1142             if (mState.current() == State.STATE_STOPPED) {
1143                 mState.set(State.STATE_RUNNING);
1144                 getRenderTarget().focus();
1145                 openCamera();
1146             }
1147         }
1148 
onStop()1149         private void onStop() {
1150             if (mState.current() == State.STATE_RUNNING) {
1151                 closeCamera();
1152                 RenderTarget.focusNone();
1153             }
1154             // Set state to stop (halted becomes stopped).
1155             mState.set(State.STATE_STOPPED);
1156         }
1157 
onHalt()1158         private void onHalt() {
1159             // Only halt if running. Stopped overrides halt.
1160             if (mState.current() == State.STATE_RUNNING) {
1161                 closeCamera();
1162                 RenderTarget.focusNone();
1163                 mState.set(State.STATE_HALTED);
1164             }
1165         }
1166 
onRestart()1167         private void onRestart() {
1168             // Only restart if halted
1169             if (mState.current() == State.STATE_HALTED) {
1170                 mState.set(State.STATE_RUNNING);
1171                 getRenderTarget().focus();
1172                 openCamera();
1173             }
1174         }
1175 
onUpdate()1176         private void onUpdate() {
1177             if (mState.current() == State.STATE_RUNNING) {
1178                 pushEvent(Event.STOP, true);
1179                 pushEvent(Event.START, true);
1180             }
1181         }
onFrame()1182         private void onFrame() {
1183             if (mState.current() == State.STATE_RUNNING) {
1184                 updateRotation();
1185                 mCamFrameHandler.updateServerFrame();
1186             }
1187         }
1188 
onTearDown()1189         private void onTearDown() {
1190             if (mState.current() == State.STATE_STOPPED) {
1191                 // Remove all listeners. This will release their resources
1192                 for (CameraListener listener : mCamListeners) {
1193                     removeListener(listener);
1194                 }
1195                 mCamListeners.clear();
1196             } else {
1197                 Log.e("CameraStreamer", "Could not tear-down CameraStreamer as camera still "
1198                         + "seems to be running!");
1199             }
1200         }
1201 
createCamFrameHandler()1202         private void createCamFrameHandler() {
1203             // TODO: For now we simply assert that OpenGL is supported. Later on, we should add
1204             // a CamFrameHandler that does not depend on OpenGL.
1205             getContext().assertOpenGLSupported();
1206             if (VERSION.SDK_INT >= 16) {
1207                 mCamFrameHandler = new CamFrameHandlerJB();
1208             } else if (VERSION.SDK_INT >= 15) {
1209                 mCamFrameHandler = new CamFrameHandlerICS();
1210             } else {
1211                 mCamFrameHandler = new CamFrameHandlerGB();
1212             }
1213         }
1214 
updateRotation()1215         private void updateRotation() {
1216             if (mDisplay != null) {
1217                 updateDisplayRotation(mDisplay.getRotation());
1218             }
1219         }
1220 
updateDisplayRotation(int rotation)1221         private synchronized void updateDisplayRotation(int rotation) {
1222             switch (rotation) {
1223                 case Surface.ROTATION_0:
1224                     onUpdateOrientation(0);
1225                     break;
1226                 case Surface.ROTATION_90:
1227                     onUpdateOrientation(90);
1228                     break;
1229                 case Surface.ROTATION_180:
1230                     onUpdateOrientation(180);
1231                     break;
1232                 case Surface.ROTATION_270:
1233                     onUpdateOrientation(270);
1234                     break;
1235                 default:
1236                     throw new IllegalArgumentException("Unsupported display rotation constant! Use "
1237                         + "one of the Surface.ROTATION_ constants!");
1238             }
1239         }
1240 
getRenderTarget()1241         private RenderTarget getRenderTarget() {
1242             if (mRenderTarget == null) {
1243                 mRenderTarget = RenderTarget.newTarget(1, 1);
1244             }
1245             return mRenderTarget;
1246         }
1247 
updateCamera()1248         private void updateCamera() {
1249             synchronized (mState) {
1250                 mCamId = getCameraId();
1251                 updateCameraOrientation(mCamId);
1252                 mCamera = Camera.open(mCamId);
1253                 initCameraParameters();
1254             }
1255         }
1256 
updateCameraOrientation(int camId)1257         private void updateCameraOrientation(int camId) {
1258             CameraInfo cameraInfo = new CameraInfo();
1259             Camera.getCameraInfo(camId, cameraInfo);
1260             mCamOrientation = cameraInfo.orientation;
1261             mOrientation = -1;  // Forces recalculation to match display
1262             mActualFacing = (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT)
1263                 ? FACING_FRONT
1264                 : FACING_BACK;
1265         }
1266 
getCameraId()1267         private int getCameraId() {
1268             int camCount = Camera.getNumberOfCameras();
1269             if (camCount == 0) {
1270                 throw new RuntimeException("Device does not have any cameras!");
1271             } else if (mRequestedFacing == FACING_DONTCARE) {
1272                 // Simply return first camera if mRequestedFacing is don't care
1273                 return 0;
1274             }
1275 
1276             // Attempt to find requested camera
1277             boolean useFrontCam = (mRequestedFacing == FACING_FRONT);
1278             CameraInfo cameraInfo = new CameraInfo();
1279             for (int i = 0; i < camCount; ++i) {
1280                 Camera.getCameraInfo(i, cameraInfo);
1281                 if ((cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) == useFrontCam) {
1282                     return i;
1283                 }
1284             }
1285             throw new RuntimeException("Could not find a camera facing (" + mRequestedFacing
1286                     + ")!");
1287         }
1288 
initCameraParameters()1289         private void initCameraParameters() {
1290             Camera.Parameters params = mCamera.getParameters();
1291 
1292             // Find closest preview size
1293             mActualDims =
1294                 findClosestPreviewSize(mRequestedPreviewWidth, mRequestedPreviewHeight, params);
1295             mCamFrameHandler.setCameraSize(mActualDims[0], mActualDims[1]);
1296             params.setPreviewSize(mActualDims[0], mActualDims[1]);
1297             // Find closest picture size
1298             int[] dims =
1299                 findClosestPictureSize(mRequestedPictureWidth, mRequestedPictureHeight, params);
1300             params.setPictureSize(dims[0], dims[1]);
1301 
1302             // Find closest FPS
1303             int closestRange[] = findClosestFpsRange(mRequestedFramesPerSec, params);
1304             params.setPreviewFpsRange(closestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
1305                                       closestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
1306 
1307             // Set flash mode (if supported)
1308             if (params.getFlashMode() != null) {
1309                 params.setFlashMode(mFlashMode);
1310             }
1311 
1312             mCamera.setParameters(params);
1313         }
1314 
findClosestPreviewSize(int width, int height, Camera.Parameters parameters)1315         private int[] findClosestPreviewSize(int width, int height, Camera.Parameters parameters) {
1316             List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();
1317             return findClosestSizeFromList(width, height, previewSizes);
1318         }
1319 
findClosestPictureSize(int width, int height, Camera.Parameters parameters)1320         private int[] findClosestPictureSize(int width, int height, Camera.Parameters parameters) {
1321             List<Camera.Size> pictureSizes = parameters.getSupportedPictureSizes();
1322             return findClosestSizeFromList(width, height, pictureSizes);
1323         }
1324 
findClosestSizeFromList(int width, int height, List<Camera.Size> sizes)1325         private int[] findClosestSizeFromList(int width, int height, List<Camera.Size> sizes) {
1326             int closestWidth = -1;
1327             int closestHeight = -1;
1328             int smallestWidth = sizes.get(0).width;
1329             int smallestHeight =  sizes.get(0).height;
1330             for (Camera.Size size : sizes) {
1331                 // Best match defined as not being larger in either dimension than
1332                 // the requested size, but as close as possible. The below isn't a
1333                 // stable selection (reording the size list can give different
1334                 // results), but since this is a fallback nicety, that's acceptable.
1335                 if ( size.width <= width &&
1336                      size.height <= height &&
1337                      size.width >= closestWidth &&
1338                      size.height >= closestHeight) {
1339                     closestWidth = size.width;
1340                     closestHeight = size.height;
1341                 }
1342                 if ( size.width < smallestWidth &&
1343                      size.height < smallestHeight) {
1344                     smallestWidth = size.width;
1345                     smallestHeight = size.height;
1346                 }
1347             }
1348             if (closestWidth == -1) {
1349                 // Requested size is smaller than any listed size; match with smallest possible
1350                 closestWidth = smallestWidth;
1351                 closestHeight = smallestHeight;
1352             }
1353             int[] closestSize = {closestWidth, closestHeight};
1354             return closestSize;
1355         }
1356 
findClosestFpsRange(int fps, Camera.Parameters params)1357         private int[] findClosestFpsRange(int fps, Camera.Parameters params) {
1358             List<int[]> supportedFpsRanges = params.getSupportedPreviewFpsRange();
1359             int[] closestRange = supportedFpsRanges.get(0);
1360             int fpsk = fps * 1000;
1361             int minDiff = 1000000;
1362             for (int[] range : supportedFpsRanges) {
1363                 int low = range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
1364                 int high = range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
1365                 if (low <= fpsk && high >= fpsk) {
1366                     int diff = (fpsk - low) + (high - fpsk);
1367                     if (diff < minDiff) {
1368                         closestRange = range;
1369                         minDiff = diff;
1370                     }
1371                 }
1372             }
1373             mActualFramesPerSec = closestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] / 1000;
1374             return closestRange;
1375         }
1376 
onUpdateOrientation(int orientation)1377         private void onUpdateOrientation(int orientation) {
1378             // First we calculate the camera rotation.
1379             int rotation = (mActualFacing == FACING_FRONT)
1380                     ? (mCamOrientation + orientation) % 360
1381                     : (mCamOrientation - orientation + 360) % 360;
1382             if (rotation != mCamRotation) {
1383                 synchronized (this) {
1384                     mCamRotation = rotation;
1385                 }
1386             }
1387 
1388             // We compensate for mirroring in the orientation. This differs from the rotation,
1389             // where we are invariant to mirroring.
1390             int fixedOrientation = rotation;
1391             if (mActualFacing == FACING_FRONT && mCamFrameHandler.isFrontMirrored()) {
1392                 fixedOrientation = (360 - rotation) % 360;  // compensate the mirror
1393             }
1394             if (mOrientation != fixedOrientation) {
1395                 mOrientation = fixedOrientation;
1396                 mCamFrameHandler.onUpdateCameraOrientation(mOrientation);
1397             }
1398         }
1399 
openCamera()1400         private void openCamera() {
1401             // Acquire lock for camera
1402             try {
1403                 if (!mCameraLock.tryLock(MAX_CAMERA_WAIT_TIME, TimeUnit.SECONDS)) {
1404                     throw new RuntimeException("Timed out while waiting to acquire camera!");
1405                 }
1406             } catch (InterruptedException e) {
1407                 throw new RuntimeException("Interrupted while waiting to acquire camera!");
1408             }
1409 
1410             // Make sure external entities are not holding camera. We need to hold the lock until
1411             // the preview is started again.
1412             Object lockContext = new Object();
1413             mExternalCameraLock.lock(lockContext);
1414 
1415             // Need to synchronize this as many of the member values are modified during setup.
1416             synchronized (this) {
1417                 updateCamera();
1418                 updateRotation();
1419                 mCamFrameHandler.setupServerFrame();
1420             }
1421 
1422             mCamera.startPreview();
1423 
1424             // Inform listeners
1425             synchronized (mCamListeners) {
1426                 for (CameraListener listener : mCamListeners) {
1427                     listener.onCameraOpened(CameraStreamer.this);
1428                 }
1429             }
1430             mExternalCameraLock.unlock(lockContext);
1431             // New camera started
1432             mCameraReadyLock.lock();
1433             mCameraReady.signal();
1434             mCameraReadyLock.unlock();
1435         }
1436 
1437         /**
1438          * Creates an instance of MediaRecorder to be used for the streamer.
1439          * User should call the functions in the following sequence:<p>
1440          *   {@link #createRecorder}<p>
1441          *   {@link #startRecording}<p>
1442          *   {@link #stopRecording}<p>
1443          *   {@link #releaseRecorder}<p>
1444          * @param outputPath the output video path for the recorder
1445          * @param profile the recording {@link CamcorderProfile} which has parameters indicating
1446          *  the resolution, quality etc.
1447          */
createRecorder(String outputPath, CamcorderProfile profile)1448         public void createRecorder(String outputPath, CamcorderProfile profile) {
1449             lockCamera(this);
1450             mCamera.unlock();
1451             if (mRecorder != null) {
1452                 mRecorder.release();
1453             }
1454             mRecorder = new MediaRecorder();
1455             mRecorder.setCamera(mCamera);
1456             mRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
1457             mRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
1458             mRecorder.setProfile(profile);
1459             mRecorder.setOutputFile(outputPath);
1460             try {
1461                 mRecorder.prepare();
1462             } catch (Exception e) {
1463                 throw new RuntimeException(e);
1464             }
1465         }
1466 
1467         /**
1468          * Starts recording video using the created MediaRecorder object
1469          */
startRecording()1470         public void startRecording() {
1471             if (mRecorder == null) {
1472                 throw new RuntimeException("No recorder created");
1473             }
1474             mRecorder.start();
1475         }
1476 
1477         /**
1478          * Stops recording video
1479          */
stopRecording()1480         public void stopRecording() {
1481             if (mRecorder == null) {
1482                 throw new RuntimeException("No recorder created");
1483             }
1484             mRecorder.stop();
1485         }
1486 
1487         /**
1488          * Release the resources held by the MediaRecorder, call this after done recording.
1489          */
releaseRecorder()1490         public void releaseRecorder() {
1491             if (mRecorder == null) {
1492                 throw new RuntimeException("No recorder created");
1493             }
1494             mRecorder.release();
1495             mRecorder = null;
1496             mCamera.lock();
1497             unlockCamera(this);
1498         }
1499 
closeCamera()1500         private void closeCamera() {
1501             Object lockContext = new Object();
1502             mExternalCameraLock.lock(lockContext);
1503             if (mCamera != null) {
1504                 mCamera.stopPreview();
1505                 mCamera.release();
1506                 mCamera = null;
1507             }
1508             mCameraLock.unlock();
1509             mCamFrameHandler.release();
1510             mExternalCameraLock.unlock(lockContext);
1511             // Inform listeners
1512             synchronized (mCamListeners) {
1513                 for (CameraListener listener : mCamListeners) {
1514                     listener.onCameraClosed(CameraStreamer.this);
1515                 }
1516             }
1517         }
1518 
1519     }
1520 
1521     /**
1522      * The frame-client callback interface.
1523      * FrameClients, that wish to receive Frames from the camera must implement this callback
1524      * method.
1525      * Note, that this method is called on the Camera server thread. However, the
1526      * {@code getLatestFrame()} method must be called from the client thread.
1527      */
1528     public static interface FrameClient {
onCameraFrameAvailable()1529         public void onCameraFrameAvailable();
1530     }
1531 
1532     /**
1533      * The CameraListener callback interface.
1534      * This interface allows observers to monitor the CameraStreamer and respond to stream open
1535      * and close events.
1536      */
1537     public static interface CameraListener {
1538         /**
1539          * Called when the camera is opened and begins producing frames.
1540          * This is also called when settings have changed that caused the camera to be reopened.
1541          */
onCameraOpened(CameraStreamer camera)1542         public void onCameraOpened(CameraStreamer camera);
1543 
1544         /**
1545          * Called when the camera is closed and stops producing frames.
1546          */
onCameraClosed(CameraStreamer camera)1547         public void onCameraClosed(CameraStreamer camera);
1548     }
1549 
1550     /**
1551      * Manually update the display rotation.
1552      * You do not need to call this, if the camera is bound to a display, or your app does not
1553      * support multiple orientations.
1554      */
updateDisplayRotation(int rotation)1555     public void updateDisplayRotation(int rotation) {
1556         mCameraRunner.updateDisplayRotation(rotation);
1557     }
1558 
1559     /**
1560      * Bind the camera to your Activity's display.
1561      * Use this, if your Activity supports multiple display orientation, and you would like the
1562      * camera to update accordingly when the orientation is changed.
1563      */
bindToDisplay(Display display)1564     public void bindToDisplay(Display display) {
1565         mCameraRunner.bindToDisplay(display);
1566     }
1567 
1568     /**
1569      * Sets the desired preview size.
1570      * Note that the actual width and height may vary.
1571      *
1572      * @param width The desired width of the preview camera stream.
1573      * @param height The desired height of the preview camera stream.
1574      */
setDesiredPreviewSize(int width, int height)1575     public void setDesiredPreviewSize(int width, int height) {
1576         mCameraRunner.setDesiredPreviewSize(width, height);
1577     }
1578 
1579     /**
1580      * Sets the desired picture size.
1581      * Note that the actual width and height may vary.
1582      *
1583      * @param width The desired picture width.
1584      * @param height The desired picture height.
1585      */
setDesiredPictureSize(int width, int height)1586     public void setDesiredPictureSize(int width, int height) {
1587         mCameraRunner.setDesiredPictureSize(width, height);
1588     }
1589 
1590     /**
1591      * Sets the desired camera frame-rate.
1592      * Note, that the actual frame-rate may vary.
1593      *
1594      * @param fps The desired FPS.
1595      */
setDesiredFrameRate(int fps)1596     public void setDesiredFrameRate(int fps) {
1597         mCameraRunner.setDesiredFrameRate(fps);
1598     }
1599 
1600     /**
1601      * Sets the camera facing direction.
1602      *
1603      * Specify {@code FACING_DONTCARE} (default) if you would like the CameraStreamer to choose
1604      * the direction. When specifying any other direction be sure to first check whether the
1605      * device supports the desired facing.
1606      *
1607      * @param facing The desired camera facing direction.
1608      */
setFacing(int facing)1609     public void setFacing(int facing) {
1610         mCameraRunner.setFacing(facing);
1611     }
1612 
1613     /**
1614      * Set whether to flip the camera image horizontally when using the front facing camera.
1615      */
setFlipFrontCamera(boolean flipFront)1616     public void setFlipFrontCamera(boolean flipFront) {
1617         mCameraRunner.setFlipFrontCamera(flipFront);
1618     }
1619 
1620     /**
1621      * Sets the camera flash mode.
1622      *
1623      * This must be one of the String constants defined in the Camera.Parameters class.
1624      *
1625      * @param flashMode A String constant specifying the flash mode.
1626      */
setFlashMode(String flashMode)1627     public void setFlashMode(String flashMode) {
1628         mCameraRunner.setFlashMode(flashMode);
1629     }
1630 
1631     /**
1632      * Returns the current flash mode.
1633      *
1634      * This returns the currently running camera's flash-mode, or NULL if flash modes are not
1635      * supported on that camera.
1636      *
1637      * @return The flash mode String, or NULL if flash modes are not supported.
1638      */
getFlashMode()1639     public String getFlashMode() {
1640         return mCameraRunner.getFlashMode();
1641     }
1642 
1643     /**
1644      * Get the actual camera facing.
1645      * Returns 0 if actual facing is not yet known.
1646      */
getCameraFacing()1647     public int getCameraFacing() {
1648         return mCameraRunner.getCameraFacing();
1649     }
1650 
1651     /**
1652      * Get the current camera rotation.
1653      *
1654      * Use this rotation if you want to snap pictures from the camera and need to rotate the
1655      * picture to be up-right.
1656      *
1657      * @return the current camera rotation.
1658      */
getCameraRotation()1659     public int getCameraRotation() {
1660         return mCameraRunner.getCameraRotation();
1661     }
1662 
1663     /**
1664      * Specifies whether or not the camera supports hardware face detection.
1665      * @return true, if the camera supports hardware face detection.
1666      */
supportsHardwareFaceDetection()1667     public boolean supportsHardwareFaceDetection() {
1668         return mCameraRunner.supportsHardwareFaceDetection();
1669     }
1670 
1671     /**
1672      * Returns the camera facing that is chosen when DONT_CARE is specified.
1673      * Returns 0 if neither a front nor back camera could be found.
1674      */
getDefaultFacing()1675     public static int getDefaultFacing() {
1676         int camCount = Camera.getNumberOfCameras();
1677         if (camCount == 0) {
1678             return 0;
1679         } else {
1680             CameraInfo cameraInfo = new CameraInfo();
1681             Camera.getCameraInfo(0, cameraInfo);
1682             return (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT)
1683                 ? FACING_FRONT
1684                 : FACING_BACK;
1685         }
1686     }
1687 
1688     /**
1689      * Get the actual camera width.
1690      * Returns 0 if actual width is not yet known.
1691      */
getCameraWidth()1692     public int getCameraWidth() {
1693         return mCameraRunner.getCameraWidth();
1694     }
1695 
1696     /**
1697      * Get the actual camera height.
1698      * Returns 0 if actual height is not yet known.
1699      */
getCameraHeight()1700     public int getCameraHeight() {
1701         return mCameraRunner.getCameraHeight();
1702     }
1703 
1704     /**
1705      * Get the actual camera frame-rate.
1706      * Returns 0 if actual frame-rate is not yet known.
1707      */
getCameraFrameRate()1708     public int getCameraFrameRate() {
1709         return mCameraRunner.getCameraFrameRate();
1710     }
1711 
1712     /**
1713      * Returns true if the camera can be started at this point.
1714      */
canStart()1715     public boolean canStart() {
1716         return mCameraRunner.canStart();
1717     }
1718 
1719     /**
1720      * Returns true if the camera is currently running.
1721      */
isRunning()1722     public boolean isRunning() {
1723         return mCameraRunner.isRunning();
1724     }
1725 
1726     /**
1727      * Starts the camera.
1728      */
start()1729     public void start() {
1730         mCameraRunner.pushEvent(Event.START, true);
1731     }
1732 
1733     /**
1734      * Stops the camera.
1735      */
stop()1736     public void stop() {
1737         mCameraRunner.pushEvent(Event.STOP, true);
1738     }
1739 
1740     /**
1741      * Stops the camera and waits until it is completely closed. Generally, this should not be
1742      * called in the UI thread, but may be necessary if you need the camera to be closed before
1743      * performing subsequent steps.
1744      */
stopAndWait()1745     public void stopAndWait() {
1746         mCameraRunner.pushEvent(Event.STOP, true);
1747         try {
1748             if (!mCameraLock.tryLock(MAX_CAMERA_WAIT_TIME, TimeUnit.SECONDS)) {
1749                 Log.w("CameraStreamer", "Time-out waiting for camera to close!");
1750             }
1751         } catch (InterruptedException e) {
1752             Log.w("CameraStreamer", "Interrupted while waiting for camera to close!");
1753         }
1754         mCameraLock.unlock();
1755     }
1756 
1757     /**
1758      * Registers a listener to handle camera state changes.
1759      */
addListener(CameraListener listener)1760     public void addListener(CameraListener listener) {
1761         mCameraRunner.addListener(listener);
1762     }
1763 
1764     /**
1765      * Unregisters a listener to handle camera state changes.
1766      */
removeListener(CameraListener listener)1767     public void removeListener(CameraListener listener) {
1768         mCameraRunner.removeListener(listener);
1769     }
1770 
1771     /**
1772      * Registers the frame-client with the camera.
1773      * This MUST be called from the client thread!
1774      */
registerClient(FrameClient client)1775     public void registerClient(FrameClient client) {
1776         mCameraRunner.getCamFrameHandler().registerClient(client);
1777     }
1778 
1779     /**
1780      * Unregisters the frame-client with the camera.
1781      * This MUST be called from the client thread!
1782      */
unregisterClient(FrameClient client)1783     public void unregisterClient(FrameClient client) {
1784         mCameraRunner.getCamFrameHandler().unregisterClient(client);
1785     }
1786 
1787     /**
1788      * Gets the latest camera frame for the client.
1789      *
1790      * This must be called from the same thread as the {@link #registerClient(FrameClient)} call!
1791      * The frame passed in will be resized by the camera streamer to fit the camera frame.
1792      * Returns false if the frame could not be grabbed. This may happen if the camera has been
1793      * closed in the meantime, and its resources let go.
1794      *
1795      * @return true, if the frame was grabbed successfully.
1796      */
getLatestFrame(FrameImage2D targetFrame)1797     public boolean getLatestFrame(FrameImage2D targetFrame) {
1798         return mCameraRunner.grabFrame(targetFrame);
1799     }
1800 
1801     /**
1802      * Expose the underlying android.hardware.Camera object.
1803      * Use the returned object with care: some camera functions may break the functionality
1804      * of CameraStreamer.
1805      * @return the Camera object.
1806      */
1807     @Deprecated
getCamera()1808     public Camera getCamera() {
1809         return mCameraRunner.getCamera();
1810     }
1811 
1812     /**
1813      * Obtain access to the underlying android.hardware.Camera object.
1814      * This grants temporary access to the internal Camera handle. Once you are done using the
1815      * handle you must call {@link #unlockCamera(Object)}. While you are holding the Camera,
1816      * it will not be modified or released by the CameraStreamer. The Camera object return is
1817      * guaranteed to have the preview running.
1818      *
1819      * The CameraStreamer does not account for changes you make to the Camera. That is, if you
1820      * change the Camera unexpectedly this may cause unintended behavior by the streamer.
1821      *
1822      * Note that the returned object may be null. This can happen when the CameraStreamer is not
1823      * running, or is just transitioning to another Camera, such as during a switch from front to
1824      * back Camera.
1825      * @param context an object used as a context for locking and unlocking. lockCamera and
1826      *   unlockCamera should use the same context object.
1827      * @return The Camera object.
1828      */
lockCamera(Object context)1829     public Camera lockCamera(Object context) {
1830         return mCameraRunner.lockCamera(context);
1831     }
1832 
1833     /**
1834      * Release the acquire Camera object.
1835      * @param context the context object that used when lockCamera is called.
1836      */
unlockCamera(Object context)1837     public void unlockCamera(Object context) {
1838         mCameraRunner.unlockCamera(context);
1839     }
1840 
1841     /**
1842      * Creates an instance of MediaRecorder to be used for the streamer.
1843      * User should call the functions in the following sequence:<p>
1844      *   {@link #createRecorder}<p>
1845      *   {@link #startRecording}<p>
1846      *   {@link #stopRecording}<p>
1847      *   {@link #releaseRecorder}<p>
1848      * @param path the output video path for the recorder
1849      * @param profile the recording {@link CamcorderProfile} which has parameters indicating
1850      *  the resolution, quality etc.
1851      */
createRecorder(String path, CamcorderProfile profile)1852     public void createRecorder(String path, CamcorderProfile profile) {
1853         mCameraRunner.createRecorder(path, profile);
1854     }
1855 
releaseRecorder()1856     public void releaseRecorder() {
1857         mCameraRunner.releaseRecorder();
1858     }
1859 
startRecording()1860     public void startRecording() {
1861         mCameraRunner.startRecording();
1862     }
1863 
stopRecording()1864     public void stopRecording() {
1865         mCameraRunner.stopRecording();
1866     }
1867 
1868     /**
1869      * Retrieve the ID of the currently used camera.
1870      * @return the ID of the currently used camera.
1871      */
getCameraId()1872     public int getCameraId() {
1873         return mCameraRunner.getCurrentCameraId();
1874     }
1875 
1876     /**
1877      * @return The number of cameras available for streaming on this device.
1878      */
getNumberOfCameras()1879     public static int getNumberOfCameras() {
1880         // Currently, this is just the number of cameras that are available on the device.
1881         return Camera.getNumberOfCameras();
1882     }
1883 
CameraStreamer(MffContext context)1884     CameraStreamer(MffContext context) {
1885         mCameraRunner = new CameraRunnable(context);
1886     }
1887 
1888     /** Halt is like stop, but may be resumed using restart(). */
halt()1889     void halt() {
1890         mCameraRunner.pushEvent(Event.HALT, true);
1891     }
1892 
1893     /** Restart starts the camera only if previously halted. */
restart()1894     void restart() {
1895         mCameraRunner.pushEvent(Event.RESTART, true);
1896     }
1897 
requireDummySurfaceView()1898     static boolean requireDummySurfaceView() {
1899         return VERSION.SDK_INT < 15;
1900     }
1901 
tearDown()1902     void tearDown() {
1903         mCameraRunner.pushEvent(Event.TEARDOWN, true);
1904     }
1905 }
1906 
1907