• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2016 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.android.devcamera;
17 
18 import android.content.Context;
19 import android.graphics.ImageFormat;
20 import android.graphics.SurfaceTexture;
21 import android.hardware.camera2.CameraAccessException;
22 import android.hardware.camera2.CameraCaptureSession;
23 import android.hardware.camera2.CameraCharacteristics;
24 import android.hardware.camera2.CameraDevice;
25 import android.hardware.camera2.CameraManager;
26 import android.hardware.camera2.CameraMetadata;
27 import android.hardware.camera2.CaptureRequest;
28 import android.hardware.camera2.CaptureResult;
29 import android.hardware.camera2.TotalCaptureResult;
30 import android.hardware.camera2.params.Face;
31 import android.hardware.camera2.params.InputConfiguration;
32 import android.media.Image;
33 import android.media.ImageReader;
34 import android.media.ImageWriter;
35 import android.media.MediaActionSound;
36 import android.opengl.GLES11Ext;
37 import android.opengl.GLES20;
38 import android.os.Handler;
39 import android.os.HandlerThread;
40 import android.os.SystemClock;
41 import android.util.Log;
42 import android.util.Size;
43 import android.view.Surface;
44 
45 import java.nio.ByteBuffer;
46 import java.util.ArrayList;
47 import java.util.LinkedList;
48 import java.util.List;
49 
50 import javax.microedition.khronos.opengles.GL10;
51 
52 
53 /**
54  * Api2Camera : a camera2 implementation
55  *
56  * The goal here is to make the simplest possible API2 camera,
57  * where individual streams and capture options (e.g. edge enhancement,
58  * noise reduction, face detection) can be toggled on and off.
59  *
60  */
61 
62 public class Api2Camera implements CameraInterface, SurfaceTexture.OnFrameAvailableListener {
63     private static final String TAG = "DevCamera_API2";
64 
65     // Nth frame to log; put 10^6 if you don't want logging.
66     private static int LOG_NTH_FRAME = 30;
67     // Log dropped frames. There are a log on Angler MDA32.
68     private static boolean LOG_DROPPED_FRAMES = true;
69 
70     // IMPORTANT: Only one of these can be true:
71     private static boolean SECOND_YUV_IMAGEREADER_STREAM = true;
72     private static boolean SECOND_SURFACE_TEXTURE_STREAM = false;
73 
74     // Enable raw stream if available.
75     private static boolean RAW_STREAM_ENABLE = true;
76     // Use JPEG ImageReader and YUV ImageWriter if reprocessing is available
77     private static final boolean USE_REPROCESSING_IF_AVAIL = true;
78 
79     // Whether we are continuously taking pictures, or not.
80     boolean mIsBursting = false;
81     // Last total capture result
82     TotalCaptureResult mLastTotalCaptureResult;
83 
84     // ImageReader/Writer buffer sizes.
85     private static final int YUV1_IMAGEREADER_SIZE = 8;
86     private static final int YUV2_IMAGEREADER_SIZE = 8;
87     private static final int RAW_IMAGEREADER_SIZE = 8;
88     private static final int IMAGEWRITER_SIZE = 2;
89 
90     private CameraInfoCache mCameraInfoCache;
91     private CameraManager mCameraManager;
92     private CameraCaptureSession mCurrentCaptureSession;
93     private MediaActionSound mMediaActionSound = new MediaActionSound();
94 
95     MyCameraCallback mMyCameraCallback;
96 
97     // Generally everything running on this thread & this module is *not thread safe*.
98     private HandlerThread mOpsThread;
99     private Handler mOpsHandler;
100     private HandlerThread mInitThread;
101     private Handler mInitHandler;
102     private HandlerThread mJpegListenerThread;
103     private Handler mJpegListenerHandler;
104 
105     Context mContext;
106     boolean mCameraIsFront;
107     SurfaceTexture mSurfaceTexture;
108     Surface mSurfaceTextureSurface;
109 
110     private boolean mFirstFrameArrived;
111     private ImageReader mYuv1ImageReader;
112     private int mYuv1ImageCounter;
113     // Handle to last received Image: allows ZSL to be implemented.
114     private Image mYuv1LastReceivedImage = null;
115     // Time at which reprocessing request went in (right now we are doing one at a time).
116     private long mReprocessingRequestNanoTime;
117 
118     private ImageReader mJpegImageReader;
119     private ImageReader mYuv2ImageReader;
120     private int mYuv2ImageCounter;
121     private ImageReader mRawImageReader;
122     private int mRawImageCounter;
123 
124     // Starting the preview requires each of these 3 to be true/non-null:
125     volatile private Surface mPreviewSurface;
126     volatile private CameraDevice mCameraDevice;
127     volatile boolean mAllThingsInitialized = false;
128 
129     /**
130      * Constructor.
131      */
Api2Camera(Context context, boolean useFrontCamera)132     public Api2Camera(Context context, boolean useFrontCamera) {
133         mContext = context;
134         mCameraIsFront = useFrontCamera;
135         mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
136         mCameraInfoCache = new CameraInfoCache(mCameraManager, useFrontCamera);
137 
138         // Create thread and handler for camera operations.
139         mOpsThread = new HandlerThread("CameraOpsThread");
140         mOpsThread.start();
141         mOpsHandler = new Handler(mOpsThread.getLooper());
142 
143         // Create thread and handler for slow initialization operations.
144         // Don't want to use camera operations thread because we want to time camera open carefully.
145         mInitThread = new HandlerThread("CameraInitThread");
146         mInitThread.start();
147         mInitHandler = new Handler(mInitThread.getLooper());
148         mInitHandler.post(new Runnable() {
149             @Override
150             public void run() {
151                 InitializeAllTheThings();
152                 mAllThingsInitialized = true;
153                 Log.v(TAG, "STARTUP_REQUIREMENT ImageReader initialization done.");
154                 tryToStartCaptureSession();
155             }
156         });
157 
158         // Set initial Noise and Edge modes.
159         if (mCameraInfoCache.isHardwareLevelAtLeast(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3)) {
160             // YUV streams.
161             if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.noiseModes,
162                     CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG)) {
163                 mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
164             } else {
165                 mCaptureNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_FAST;
166             }
167             if (mCameraInfoCache.supportedModesContains(mCameraInfoCache.edgeModes,
168                     CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG)) {
169                 mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_ZERO_SHUTTER_LAG;
170             } else {
171                 mCaptureEdgeMode = CameraCharacteristics.EDGE_MODE_FAST;
172             }
173 
174             // Reprocessing.
175             mReprocessingNoiseMode = CameraCharacteristics.NOISE_REDUCTION_MODE_HIGH_QUALITY;
176             mReprocessingEdgeMode = CameraCharacteristics.EDGE_MODE_HIGH_QUALITY;
177         }
178     }
179 
180     // Ugh, why is this stuff so slow?
InitializeAllTheThings()181     private void InitializeAllTheThings() {
182 
183         // Thread to handle returned JPEGs.
184         mJpegListenerThread = new HandlerThread("CameraJpegThread");
185         mJpegListenerThread.start();
186         mJpegListenerHandler = new Handler(mJpegListenerThread.getLooper());
187 
188         // Create ImageReader to receive JPEG image buffers via reprocessing.
189         mJpegImageReader = ImageReader.newInstance(
190                 mCameraInfoCache.getYuvStream1Size().getWidth(),
191                 mCameraInfoCache.getYuvStream1Size().getHeight(),
192                 ImageFormat.JPEG,
193                 2);
194         mJpegImageReader.setOnImageAvailableListener(mJpegImageListener, mJpegListenerHandler);
195 
196         // Create ImageReader to receive YUV image buffers.
197         mYuv1ImageReader = ImageReader.newInstance(
198                 mCameraInfoCache.getYuvStream1Size().getWidth(),
199                 mCameraInfoCache.getYuvStream1Size().getHeight(),
200                 ImageFormat.YUV_420_888,
201                 YUV1_IMAGEREADER_SIZE);
202         mYuv1ImageReader.setOnImageAvailableListener(mYuv1ImageListener, mOpsHandler);
203 
204         if (SECOND_YUV_IMAGEREADER_STREAM) {
205             // Create ImageReader to receive YUV image buffers.
206             mYuv2ImageReader = ImageReader.newInstance(
207                     mCameraInfoCache.getYuvStream2Size().getWidth(),
208                     mCameraInfoCache.getYuvStream2Size().getHeight(),
209                     ImageFormat.YUV_420_888,
210                     YUV2_IMAGEREADER_SIZE);
211             mYuv2ImageReader.setOnImageAvailableListener(mYuv2ImageListener, mOpsHandler);
212         }
213 
214         if (SECOND_SURFACE_TEXTURE_STREAM) {
215             int[] textures = new int[1];
216             // generate one texture pointer and bind it as an external texture.
217             GLES20.glGenTextures(1, textures, 0);
218             GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
219             // No mip-mapping with camera source.
220             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
221                     GL10.GL_TEXTURE_MIN_FILTER,
222                     GL10.GL_LINEAR);
223             GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
224                     GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
225             // Clamp to edge is only option.
226             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
227                     GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
228             GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
229                     GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
230 
231             int texture_id = textures[0];
232             mSurfaceTexture = new SurfaceTexture(texture_id);
233             mSurfaceTexture.setDefaultBufferSize(320, 240);
234             mSurfaceTexture.setOnFrameAvailableListener(this);
235             mSurfaceTextureSurface = new Surface(mSurfaceTexture);
236         }
237 
238         if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) {
239             // Create ImageReader to receive thumbnail sized YUV image buffers.
240             mRawImageReader = ImageReader.newInstance(
241                     mCameraInfoCache.getRawStreamSize().getWidth(),
242                     mCameraInfoCache.getRawStreamSize().getHeight(),
243                     mCameraInfoCache.getRawFormat(),
244                     RAW_IMAGEREADER_SIZE);
245             mRawImageReader.setOnImageAvailableListener(mRawImageListener, mOpsHandler);
246         }
247 
248         // Load click sound.
249         mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
250 
251     }
252 
setCallback(MyCameraCallback callback)253     public void setCallback(MyCameraCallback callback) {
254         mMyCameraCallback = callback;
255     }
256 
triggerAFScan()257     public void triggerAFScan() {
258         Log.v(TAG, "AF trigger");
259         issuePreviewCaptureRequest(true);
260     }
261 
setCAF()262     public void setCAF() {
263         Log.v(TAG, "run CAF");
264         issuePreviewCaptureRequest(false);
265     }
266 
takePicture()267     public void takePicture() {
268         mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
269         mOpsHandler.post(new Runnable() {
270             @Override
271             public void run() {
272                 runReprocessing();
273             }
274         });
275     }
276 
onFrameAvailable(SurfaceTexture surfaceTexture)277     public void onFrameAvailable (SurfaceTexture surfaceTexture) {
278         Log.v(TAG, " onFrameAvailable(SurfaceTexture)");
279     }
280 
setBurst(boolean go)281     public void setBurst(boolean go) {
282         // if false to true transition.
283         if (go && !mIsBursting) {
284             takePicture();
285         }
286         mIsBursting = go;
287     }
288 
isRawAvailable()289     public boolean isRawAvailable() {
290         return mCameraInfoCache.rawAvailable();
291     }
292 
isReprocessingAvailable()293     public boolean isReprocessingAvailable() {
294         return mCameraInfoCache.isYuvReprocessingAvailable();
295     }
296 
297     @Override
getPreviewSize()298     public Size getPreviewSize() {
299         return mCameraInfoCache.getPreviewSize();
300     }
301 
302     @Override
getFieldOfView()303     public float[] getFieldOfView() {
304         return mCameraInfoCache.getFieldOfView();
305     }
306 
307     @Override
getOrientation()308     public int getOrientation() {
309         return mCameraInfoCache.sensorOrientation();
310     }
311 
312     @Override
openCamera()313     public void openCamera() {
314         // If API2 FULL mode is not available, display toast
315         if (!mCameraInfoCache.isCamera2FullModeAvailable()) {
316             mMyCameraCallback.noCamera2Full();
317         }
318 
319         Log.v(TAG, "Opening camera " + mCameraInfoCache.getCameraId());
320         mOpsHandler.post(new Runnable() {
321             @Override
322             public void run() {
323                 CameraTimer.t_open_start = SystemClock.elapsedRealtime();
324                 try {
325                     mCameraManager.openCamera(mCameraInfoCache.getCameraId(), mCameraStateCallback, null);
326                 } catch (CameraAccessException e) {
327                     Log.e(TAG, "Unable to openCamera().");
328                 }
329             }
330         });
331     }
332 
333     @Override
closeCamera()334     public void closeCamera() {
335         // TODO: We are stalling main thread now which is bad.
336         Log.v(TAG, "Closing camera " + mCameraInfoCache.getCameraId());
337         if (mCameraDevice != null) {
338             try {
339                 mCurrentCaptureSession.abortCaptures();
340             } catch (CameraAccessException e) {
341                 Log.e(TAG, "Could not abortCaptures().");
342             }
343             mCameraDevice.close();
344         }
345         mCurrentCaptureSession = null;
346         Log.v(TAG, "Done closing camera " + mCameraInfoCache.getCameraId());
347     }
348 
startPreview(final Surface surface)349     public void startPreview(final Surface surface) {
350         Log.v(TAG, "STARTUP_REQUIREMENT preview Surface ready.");
351         mPreviewSurface = surface;
352         tryToStartCaptureSession();
353     }
354 
355     private CameraDevice.StateCallback mCameraStateCallback = new LoggingCallbacks.DeviceStateCallback() {
356         @Override
357         public void onOpened(CameraDevice camera) {
358             CameraTimer.t_open_end = SystemClock.elapsedRealtime();
359             mCameraDevice = camera;
360             Log.v(TAG, "STARTUP_REQUIREMENT Done opening camera " + mCameraInfoCache.getCameraId() +
361                     ". HAL open took: (" + (CameraTimer.t_open_end - CameraTimer.t_open_start) + " ms)");
362 
363             super.onOpened(camera);
364             tryToStartCaptureSession();
365         }
366     };
367 
tryToStartCaptureSession()368     private void tryToStartCaptureSession() {
369         if (mCameraDevice != null && mAllThingsInitialized && mPreviewSurface != null) {
370             mOpsHandler.post(new Runnable() {
371                 @Override
372                 public void run() {
373                     // It used to be: this needed to be posted on a Handler.
374                     startCaptureSession();
375                 }
376             });
377         }
378     }
379 
380     // Create CameraCaptureSession. Callback will start repeating request with current parameters.
startCaptureSession()381     private void startCaptureSession() {
382         CameraTimer.t_session_go = SystemClock.elapsedRealtime();
383 
384         Log.v(TAG, "Configuring session..");
385         List<Surface> outputSurfaces = new ArrayList<Surface>(3);
386 
387         outputSurfaces.add(mPreviewSurface);
388         Log.v(TAG, "  .. added SurfaceView " + mCameraInfoCache.getPreviewSize().getWidth() +
389                 " x " + mCameraInfoCache.getPreviewSize().getHeight());
390 
391         outputSurfaces.add(mYuv1ImageReader.getSurface());
392         Log.v(TAG, "  .. added YUV ImageReader " + mCameraInfoCache.getYuvStream1Size().getWidth() +
393                 " x " + mCameraInfoCache.getYuvStream1Size().getHeight());
394 
395         if (SECOND_YUV_IMAGEREADER_STREAM) {
396             outputSurfaces.add(mYuv2ImageReader.getSurface());
397             Log.v(TAG, "  .. added YUV ImageReader " + mCameraInfoCache.getYuvStream2Size().getWidth() +
398                     " x " + mCameraInfoCache.getYuvStream2Size().getHeight());
399         }
400 
401         if (SECOND_SURFACE_TEXTURE_STREAM) {
402             outputSurfaces.add(mSurfaceTextureSurface);
403             Log.v(TAG, "  .. added SurfaceTexture");
404         }
405 
406         if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) {
407             outputSurfaces.add(mRawImageReader.getSurface());
408             Log.v(TAG, "  .. added Raw ImageReader " + mCameraInfoCache.getRawStreamSize().getWidth() +
409                     " x " + mCameraInfoCache.getRawStreamSize().getHeight());
410         }
411 
412         if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
413             outputSurfaces.add(mJpegImageReader.getSurface());
414             Log.v(TAG, "  .. added JPEG ImageReader " + mCameraInfoCache.getJpegStreamSize().getWidth() +
415                     " x " + mCameraInfoCache.getJpegStreamSize().getHeight());
416         }
417 
418         try {
419             if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.isYuvReprocessingAvailable()) {
420                 InputConfiguration inputConfig = new InputConfiguration(mCameraInfoCache.getYuvStream1Size().getWidth(),
421                         mCameraInfoCache.getYuvStream1Size().getHeight(), ImageFormat.YUV_420_888);
422                 mCameraDevice.createReprocessableCaptureSession(inputConfig, outputSurfaces,
423                         mSessionStateCallback, null);
424                 Log.v(TAG, "  Call to createReprocessableCaptureSession complete.");
425             } else {
426                 mCameraDevice.createCaptureSession(outputSurfaces, mSessionStateCallback, null);
427                 Log.v(TAG, "  Call to createCaptureSession complete.");
428             }
429 
430         } catch (CameraAccessException e) {
431             Log.e(TAG, "Error configuring ISP.");
432         }
433     }
434 
435     ImageWriter mImageWriter;
436 
437     private CameraCaptureSession.StateCallback mSessionStateCallback = new LoggingCallbacks.SessionStateCallback() {
438         @Override
439         public void onReady(CameraCaptureSession session) {
440             Log.v(TAG, "capture session onReady().  HAL capture session took: (" + (SystemClock.elapsedRealtime() - CameraTimer.t_session_go) + " ms)");
441             mCurrentCaptureSession = session;
442             issuePreviewCaptureRequest(false);
443 
444             if (session.isReprocessable()) {
445                 mImageWriter = ImageWriter.newInstance(session.getInputSurface(), IMAGEWRITER_SIZE);
446                 mImageWriter.setOnImageReleasedListener(
447                         new ImageWriter.OnImageReleasedListener() {
448                             @Override
449                             public void onImageReleased(ImageWriter writer) {
450                                 Log.v(TAG, "ImageWriter.OnImageReleasedListener onImageReleased()");
451                             }
452                         }, null);
453                 Log.v(TAG, "Created ImageWriter.");
454             }
455             super.onReady(session);
456         }
457     };
458 
459     // Variables to hold capture flow state.
460     private boolean mCaptureYuv1 = false;
461     private boolean mCaptureYuv2 = false;
462     private boolean mCaptureRaw = false;
463     private int mCaptureNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_FAST;
464     private int mCaptureEdgeMode = CaptureRequest.EDGE_MODE_FAST;
465     private boolean mCaptureFace = false;
466     // Variables to hold reprocessing state.
467     private int mReprocessingNoiseMode = CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY;
468     private int mReprocessingEdgeMode = CaptureRequest.EDGE_MODE_HIGH_QUALITY;
469 
setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face)470     public void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face) {
471         if (yuv1 != null) mCaptureYuv1 = yuv1;
472         if (yuv2 != null) mCaptureYuv2 = yuv2;
473         if (raw10 != null) mCaptureRaw = raw10 && RAW_STREAM_ENABLE;
474         if (nr) {
475             mCaptureNoiseMode = getNextMode(mCaptureNoiseMode, mCameraInfoCache.noiseModes);
476         }
477         if (edge) {
478             mCaptureEdgeMode = getNextMode(mCaptureEdgeMode, mCameraInfoCache.edgeModes);
479         }
480         if (face != null) mCaptureFace = face;
481         mMyCameraCallback.setNoiseEdgeText(
482                 "NR " + noiseModeToString(mCaptureNoiseMode),
483                 "Edge " + edgeModeToString(mCaptureEdgeMode)
484         );
485 
486         if (mCurrentCaptureSession != null) {
487             issuePreviewCaptureRequest(false);
488         }
489     }
490 
setReprocessingFlow(Boolean nr, Boolean edge)491     public void setReprocessingFlow(Boolean nr, Boolean edge) {
492         if (nr) {
493             mReprocessingNoiseMode = getNextMode(mReprocessingNoiseMode, mCameraInfoCache.noiseModes);
494         }
495         if (edge) {
496             mReprocessingEdgeMode = getNextMode(mReprocessingEdgeMode, mCameraInfoCache.edgeModes);
497         }
498         mMyCameraCallback.setNoiseEdgeTextForReprocessing(
499                 "NR " + noiseModeToString(mReprocessingNoiseMode),
500                 "Edge " + edgeModeToString(mReprocessingEdgeMode)
501         );
502     }
503 
issuePreviewCaptureRequest(boolean AFtrigger)504     public void issuePreviewCaptureRequest(boolean AFtrigger) {
505         CameraTimer.t_burst = SystemClock.elapsedRealtime();
506         Log.v(TAG, "issuePreviewCaptureRequest...");
507         try {
508             CaptureRequest.Builder b1 = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
509             b1.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_USE_SCENE_MODE);
510             b1.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY);
511             if (AFtrigger) {
512                 b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
513             } else {
514                 b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
515             }
516 
517             b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCaptureNoiseMode);
518             b1.set(CaptureRequest.EDGE_MODE, mCaptureEdgeMode);
519             b1.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mCaptureFace ? mCameraInfoCache.bestFaceDetectionMode() : CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
520 
521             Log.v(TAG, "  .. NR=" + mCaptureNoiseMode + "  Edge=" + mCaptureEdgeMode + "  Face=" + mCaptureFace);
522 
523             if (mCaptureYuv1) {
524                 b1.addTarget(mYuv1ImageReader.getSurface());
525                 Log.v(TAG, "  .. YUV1 on");
526             }
527 
528             if (mCaptureRaw) {
529                 b1.addTarget(mRawImageReader.getSurface());
530             }
531 
532             b1.addTarget(mPreviewSurface);
533 
534             if (mCaptureYuv2) {
535                 if (SECOND_SURFACE_TEXTURE_STREAM) {
536                     b1.addTarget(mSurfaceTextureSurface);
537                 }
538                 if (SECOND_YUV_IMAGEREADER_STREAM) {
539                     b1.addTarget(mYuv2ImageReader.getSurface());
540                 }
541                 Log.v(TAG, "  .. YUV2 on");
542             }
543 
544             if (AFtrigger) {
545                 b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
546                 mCurrentCaptureSession.capture(b1.build(), mCaptureCallback, mOpsHandler);
547                 b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
548             }
549             mCurrentCaptureSession.setRepeatingRequest(b1.build(), mCaptureCallback, mOpsHandler);
550         } catch (CameraAccessException e) {
551             Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest.");
552         }
553     }
554 
runReprocessing()555     void runReprocessing() {
556         if (mYuv1LastReceivedImage == null) {
557             Log.e(TAG, "No YUV Image available.");
558             return;
559         }
560         mImageWriter.queueInputImage(mYuv1LastReceivedImage);
561         Log.v(TAG, "  Sent YUV1 image to ImageWriter.queueInputImage()");
562         try {
563             CaptureRequest.Builder b1 = mCameraDevice.createReprocessCaptureRequest(mLastTotalCaptureResult);
564             // Todo: Read current orientation instead of just assuming device is in native orientation
565             b1.set(CaptureRequest.JPEG_ORIENTATION, mCameraInfoCache.sensorOrientation());
566             b1.set(CaptureRequest.JPEG_QUALITY, (byte) 95);
567             b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mReprocessingNoiseMode);
568             b1.set(CaptureRequest.EDGE_MODE, mReprocessingEdgeMode);
569             b1.addTarget(mJpegImageReader.getSurface());
570             mCurrentCaptureSession.capture(b1.build(), mReprocessingCaptureCallback, mOpsHandler);
571             mReprocessingRequestNanoTime = System.nanoTime();
572         } catch (CameraAccessException e) {
573             Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest.");
574         }
575         mYuv1LastReceivedImage = null;
576         Log.v(TAG, "  Reprocessing request submitted.");
577     }
578 
579 
580     /*********************************
581      * onImageAvailable() processing *
582      *********************************/
583 
584     ImageReader.OnImageAvailableListener mYuv1ImageListener =
585             new ImageReader.OnImageAvailableListener() {
586                 @Override
587                 public void onImageAvailable(ImageReader reader) {
588                     Image img = reader.acquireLatestImage();
589                     if (img == null) {
590                         Log.e(TAG, "Null image returned YUV1");
591                         return;
592                     }
593                     if (mYuv1LastReceivedImage != null) {
594                         mYuv1LastReceivedImage.close();
595                     }
596                     mYuv1LastReceivedImage = img;
597                     if (++mYuv1ImageCounter % LOG_NTH_FRAME == 0) {
598                         Log.v(TAG, "YUV1 buffer available, Frame #=" + mYuv1ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp());
599                     }
600 
601                 }
602             };
603 
604 
605     ImageReader.OnImageAvailableListener mJpegImageListener =
606             new ImageReader.OnImageAvailableListener() {
607                 @Override
608                 public void onImageAvailable(ImageReader reader) {
609                     Image img = reader.acquireLatestImage();
610                     if (img == null) {
611                         Log.e(TAG, "Null image returned JPEG");
612                         return;
613                     }
614                     Image.Plane plane0 = img.getPlanes()[0];
615                     final ByteBuffer buffer = plane0.getBuffer();
616                     long dt = System.nanoTime() - mReprocessingRequestNanoTime;
617                     Log.v(TAG, String.format("JPEG buffer available, w=%d h=%d time=%d size=%d dt=%.1f ms  ISO=%d",
618                             img.getWidth(), img.getHeight(), img.getTimestamp(), buffer.capacity(), 0.000001 * dt, mLastIso));
619                     // Save JPEG on the utility thread,
620                     final byte[] jpegBuf;
621                     if (buffer.hasArray()) {
622                         jpegBuf = buffer.array();
623                     } else {
624                         jpegBuf = new byte[buffer.capacity()];
625                         buffer.get(jpegBuf);
626                     }
627                     mMyCameraCallback.jpegAvailable(jpegBuf, img.getWidth(), img.getHeight());
628                     img.close();
629 
630                     // take (reprocess) another picture right away if bursting.
631                     if (mIsBursting) {
632                         takePicture();
633                     }
634                 }
635             };
636 
637 
638     ImageReader.OnImageAvailableListener mYuv2ImageListener =
639             new ImageReader.OnImageAvailableListener() {
640                 @Override
641                 public void onImageAvailable(ImageReader reader) {
642                     Image img = reader.acquireLatestImage();
643                     if (img == null) {
644                         Log.e(TAG, "Null image returned YUV2");
645                     } else {
646                         if (++mYuv2ImageCounter % LOG_NTH_FRAME == 0) {
647                             Log.v(TAG, "YUV2 buffer available, Frame #=" + mYuv2ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp());
648                         }
649                         img.close();
650                     }
651                 }
652             };
653 
654 
655     ImageReader.OnImageAvailableListener mRawImageListener =
656             new ImageReader.OnImageAvailableListener() {
657                 @Override
658                 public void onImageAvailable(ImageReader reader) {
659                     final Image img = reader.acquireLatestImage();
660                     if (img == null) {
661                         Log.e(TAG, "Null image returned RAW");
662                     } else {
663                         if (++mRawImageCounter % LOG_NTH_FRAME == 0) {
664                             Image.Plane plane0 = img.getPlanes()[0];
665                             final ByteBuffer buffer = plane0.getBuffer();
666                             Log.v(TAG, "Raw buffer available, Frame #=" + mRawImageCounter + "w=" + img.getWidth()
667                                     + " h=" + img.getHeight()
668                                     + " format=" + CameraDeviceReport.getFormatName(img.getFormat())
669                                     + " time=" + img.getTimestamp()
670                                     + " size=" + buffer.capacity()
671                                     + " getRowStride()=" + plane0.getRowStride());
672                         }
673                         img.close();
674                     }
675                 }
676             };
677 
678     /*************************************
679      * CaptureResult metadata processing *
680      *************************************/
681 
682     private CameraCaptureSession.CaptureCallback mCaptureCallback = new LoggingCallbacks.SessionCaptureCallback() {
683         @Override
684         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
685             if (!mFirstFrameArrived) {
686                 mFirstFrameArrived = true;
687                 long now = SystemClock.elapsedRealtime();
688                 long dt = now - CameraTimer.t0;
689                 long camera_dt = now - CameraTimer.t_session_go + CameraTimer.t_open_end - CameraTimer.t_open_start;
690                 long repeating_req_dt = now - CameraTimer.t_burst;
691                 Log.v(TAG, "App control to first frame: (" + dt + " ms)");
692                 Log.v(TAG, "HAL request to first frame: (" + repeating_req_dt + " ms) " + " Total HAL wait: (" + camera_dt + " ms)");
693                 mMyCameraCallback.receivedFirstFrame();
694                 mMyCameraCallback.performanceDataAvailable((int) dt, (int) camera_dt, null);
695             }
696             publishFrameData(result);
697             // Used for reprocessing.
698             mLastTotalCaptureResult = result;
699             super.onCaptureCompleted(session, request, result);
700         }
701     };
702 
703     // Reprocessing capture completed.
704     private CameraCaptureSession.CaptureCallback mReprocessingCaptureCallback = new LoggingCallbacks.SessionCaptureCallback() {
705         @Override
706         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
707             Log.v(TAG, "Reprocessing onCaptureCompleted()");
708         }
709     };
710 
711     private static double SHORT_LOG_EXPOSURE = Math.log10(1000000000 / 10000); // 1/10000 second
712     private static double LONG_LOG_EXPOSURE = Math.log10(1000000000 / 10); // 1/10 second
713     public int FPS_CALC_LOOKBACK = 15;
714     private LinkedList<Long> mFrameTimes = new LinkedList<Long>();
715 
publishFrameData(TotalCaptureResult result)716     private void publishFrameData(TotalCaptureResult result) {
717         // Faces.
718         final Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
719         NormalizedFace[] newFaces = new NormalizedFace[faces.length];
720         if (faces.length > 0) {
721             int offX = mCameraInfoCache.faceOffsetX();
722             int offY = mCameraInfoCache.faceOffsetY();
723             int dX = mCameraInfoCache.activeAreaWidth() - 2 * offX;
724             int dY = mCameraInfoCache.activeAreaHeight() - 2 * offY;
725             if (mCameraInfoCache.IS_NEXUS_6 && mCameraIsFront) {
726                 // Front camera on Nexus 6 is currently 16 x 9 cropped to 4 x 3.
727                 // TODO: Generalize this.
728                 int cropOffset = dX / 8;
729                 dX -= 2 * cropOffset;
730                 offX += cropOffset;
731             }
732             int orientation = mCameraInfoCache.sensorOrientation();
733             for (int i = 0; i < faces.length; ++i) {
734                 newFaces[i] = new NormalizedFace(faces[i], dX, dY, offX, offY);
735                 if (mCameraIsFront && orientation == 90) {
736                     newFaces[i].mirrorInY();
737                 }
738                 if (mCameraIsFront && orientation == 270) {
739                     newFaces[i].mirrorInX();
740                 }
741                 if (!mCameraIsFront && orientation == 270) {
742                     newFaces[i].mirrorInX();
743                     newFaces[i].mirrorInY();
744                 }
745             }
746         }
747 
748         // Normalized lens and exposure coordinates.
749         double rm = Math.log10(result.get(CaptureResult.SENSOR_EXPOSURE_TIME));
750         float normExposure = (float) ((rm - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE));
751         float normLensPos = (mCameraInfoCache.getDiopterHi() - result.get(CaptureResult.LENS_FOCUS_DISTANCE)) / (mCameraInfoCache.getDiopterHi() - mCameraInfoCache.getDiopterLow());
752         mLastIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
753 
754         // Update frame arrival history.
755         mFrameTimes.add(result.get(CaptureResult.SENSOR_TIMESTAMP));
756         if (mFrameTimes.size() > FPS_CALC_LOOKBACK) {
757             mFrameTimes.removeFirst();
758         }
759 
760         // Frame drop detector
761         {
762             float frameDuration = result.get(CaptureResult.SENSOR_FRAME_DURATION);
763             if (mFrameTimes.size() > 1) {
764                 long dt = result.get(CaptureResult.SENSOR_TIMESTAMP) - mFrameTimes.get(mFrameTimes.size()-2);
765                 if (dt > 3 * frameDuration / 2 && LOG_DROPPED_FRAMES) {
766                     float drops = (dt * 1f / frameDuration) - 1f;
767                     Log.e(TAG, String.format("dropped %.2f frames", drops));
768                     mMyCameraCallback.performanceDataAvailable(null, null, drops);
769                 }
770             }
771         }
772 
773         // FPS calc.
774         float fps = 0;
775         if (mFrameTimes.size() > 1) {
776             long dt = mFrameTimes.getLast() - mFrameTimes.getFirst();
777             fps = (mFrameTimes.size() - 1) * 1000000000f / dt;
778             fps = (float) Math.floor(fps + 0.1); // round to nearest whole number, ish.
779         }
780 
781         // Do callback.
782         if (mMyCameraCallback != null) {
783             mMyCameraCallback.frameDataAvailable(newFaces, normExposure, normLensPos, fps,
784                     (int) mLastIso, result.get(CaptureResult.CONTROL_AF_STATE), result.get(CaptureResult.CONTROL_AE_STATE), result.get(CaptureResult.CONTROL_AWB_STATE));
785         } else {
786             Log.v(TAG, "mMyCameraCallbacks is null!!.");
787         }
788     }
789 
790     long mLastIso = 0;
791 
792     /*********************
793      * UTILITY FUNCTIONS *
794      *********************/
795 
796     /**
797      * Return the next mode after currentMode in supportedModes, wrapping to
798      * start of mode list if currentMode is last.  Returns currentMode if it is not found in
799      * supportedModes.
800      *
801      * @param currentMode
802      * @param supportedModes
803      * @return next mode after currentMode in supportedModes
804      */
getNextMode(int currentMode, int[] supportedModes)805     private int getNextMode(int currentMode, int[] supportedModes) {
806         boolean getNext = false;
807         for (int m : supportedModes) {
808             if (getNext) {
809                 return m;
810             }
811             if (m == currentMode) {
812                 getNext = true;
813             }
814         }
815         if (getNext) {
816             return supportedModes[0];
817         }
818         // Can't find mode in list
819         return currentMode;
820     }
821 
edgeModeToString(int mode)822     private static String edgeModeToString(int mode) {
823         switch (mode) {
824             case CaptureRequest.EDGE_MODE_OFF:
825                 return "OFF";
826             case CaptureRequest.EDGE_MODE_FAST:
827                 return "FAST";
828             case CaptureRequest.EDGE_MODE_HIGH_QUALITY:
829                 return "HiQ";
830             case CaptureRequest.EDGE_MODE_ZERO_SHUTTER_LAG:
831                 return "ZSL";
832         }
833         return Integer.toString(mode);
834     }
835 
noiseModeToString(int mode)836     private static String noiseModeToString(int mode) {
837         switch (mode) {
838             case CaptureRequest.NOISE_REDUCTION_MODE_OFF:
839                 return "OFF";
840             case CaptureRequest.NOISE_REDUCTION_MODE_FAST:
841                 return "FAST";
842             case CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY:
843                 return "HiQ";
844             case CaptureRequest.NOISE_REDUCTION_MODE_MINIMAL:
845                 return "MIN";
846             case CaptureRequest.NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG:
847                 return "ZSL";
848         }
849         return Integer.toString(mode);
850     }
851 }
852