• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package com.android.camera2.its;
18 
19 import android.app.Service;
20 import android.content.Context;
21 import android.content.Intent;
22 import android.graphics.ImageFormat;
23 import android.hardware.camera2.CameraCaptureSession;
24 import android.hardware.camera2.CameraAccessException;
25 import android.hardware.camera2.CameraCharacteristics;
26 import android.hardware.camera2.CameraDevice;
27 import android.hardware.camera2.CameraManager;
28 import android.hardware.camera2.CaptureFailure;
29 import android.hardware.camera2.CaptureRequest;
30 import android.hardware.camera2.CaptureResult;
31 import android.hardware.camera2.DngCreator;
32 import android.hardware.camera2.TotalCaptureResult;
33 import android.hardware.camera2.params.MeteringRectangle;
34 import android.hardware.Sensor;
35 import android.hardware.SensorEvent;
36 import android.hardware.SensorEventListener;
37 import android.hardware.SensorManager;
38 import android.media.Image;
39 import android.media.ImageReader;
40 import android.net.Uri;
41 import android.os.ConditionVariable;
42 import android.os.Handler;
43 import android.os.HandlerThread;
44 import android.os.IBinder;
45 import android.os.Message;
46 import android.os.Vibrator;
47 import android.util.Log;
48 import android.util.Rational;
49 import android.util.Size;
50 import android.view.Surface;
51 
52 import com.android.ex.camera2.blocking.BlockingCameraManager;
53 import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
54 import com.android.ex.camera2.blocking.BlockingStateCallback;
55 import com.android.ex.camera2.blocking.BlockingSessionCallback;
56 
57 import org.json.JSONArray;
58 import org.json.JSONObject;
59 
60 import java.io.BufferedReader;
61 import java.io.BufferedWriter;
62 import java.io.ByteArrayOutputStream;
63 import java.io.IOException;
64 import java.io.InputStreamReader;
65 import java.io.OutputStreamWriter;
66 import java.io.PrintWriter;
67 import java.math.BigInteger;
68 import java.net.ServerSocket;
69 import java.net.Socket;
70 import java.net.SocketTimeoutException;
71 import java.nio.ByteBuffer;
72 import java.nio.charset.Charset;
73 import java.security.MessageDigest;
74 import java.util.ArrayList;
75 import java.util.Arrays;
76 import java.util.LinkedList;
77 import java.util.List;
78 import java.util.concurrent.BlockingQueue;
79 import java.util.concurrent.CountDownLatch;
80 import java.util.concurrent.LinkedBlockingDeque;
81 import java.util.concurrent.TimeUnit;
82 import java.util.concurrent.atomic.AtomicInteger;
83 
84 public class ItsService extends Service implements SensorEventListener {
85     public static final String TAG = ItsService.class.getSimpleName();
86 
87     // Timeouts, in seconds.
88     public static final int TIMEOUT_CALLBACK = 3;
89     public static final int TIMEOUT_3A = 10;
90 
91     // State transition timeouts, in ms.
92     private static final long TIMEOUT_IDLE_MS = 2000;
93     private static final long TIMEOUT_STATE_MS = 500;
94 
95     // Timeout to wait for a capture result after the capture buffer has arrived, in ms.
96     private static final long TIMEOUT_CAP_RES = 2000;
97 
98     private static final int MAX_CONCURRENT_READER_BUFFERS = 8;
99 
100     // Supports at most RAW+YUV+JPEG, one surface each.
101     private static final int MAX_NUM_OUTPUT_SURFACES = 3;
102 
103     public static final int SERVERPORT = 6000;
104 
105     public static final String REGION_KEY = "regions";
106     public static final String REGION_AE_KEY = "ae";
107     public static final String REGION_AWB_KEY = "awb";
108     public static final String REGION_AF_KEY = "af";
109     public static final String LOCK_AE_KEY = "aeLock";
110     public static final String LOCK_AWB_KEY = "awbLock";
111     public static final String TRIGGER_KEY = "triggers";
112     public static final String TRIGGER_AE_KEY = "ae";
113     public static final String TRIGGER_AF_KEY = "af";
114     public static final String VIB_PATTERN_KEY = "pattern";
115 
116     private CameraManager mCameraManager = null;
117     private HandlerThread mCameraThread = null;
118     private Handler mCameraHandler = null;
119     private BlockingCameraManager mBlockingCameraManager = null;
120     private BlockingStateCallback mCameraListener = null;
121     private CameraDevice mCamera = null;
122     private CameraCaptureSession mSession = null;
123     private ImageReader[] mCaptureReaders = null;
124     private CameraCharacteristics mCameraCharacteristics = null;
125 
126     private Vibrator mVibrator = null;
127 
128     private HandlerThread mSaveThreads[] = new HandlerThread[MAX_NUM_OUTPUT_SURFACES];
129     private Handler mSaveHandlers[] = new Handler[MAX_NUM_OUTPUT_SURFACES];
130     private HandlerThread mResultThread = null;
131     private Handler mResultHandler = null;
132 
133     private volatile boolean mThreadExitFlag = false;
134 
135     private volatile ServerSocket mSocket = null;
136     private volatile SocketRunnable mSocketRunnableObj = null;
137     private volatile BlockingQueue<ByteBuffer> mSocketWriteQueue =
138             new LinkedBlockingDeque<ByteBuffer>();
139     private final Object mSocketWriteEnqueueLock = new Object();
140     private final Object mSocketWriteDrainLock = new Object();
141 
142     private volatile BlockingQueue<Object[]> mSerializerQueue =
143             new LinkedBlockingDeque<Object[]>();
144 
145     private AtomicInteger mCountCallbacksRemaining = new AtomicInteger();
146     private AtomicInteger mCountRawOrDng = new AtomicInteger();
147     private AtomicInteger mCountRaw10 = new AtomicInteger();
148     private AtomicInteger mCountJpg = new AtomicInteger();
149     private AtomicInteger mCountYuv = new AtomicInteger();
150     private AtomicInteger mCountCapRes = new AtomicInteger();
151     private boolean mCaptureRawIsDng;
152     private CaptureResult mCaptureResults[] = null;
153 
154     private volatile ConditionVariable mInterlock3A = new ConditionVariable(true);
155     private volatile boolean mIssuedRequest3A = false;
156     private volatile boolean mConvergedAE = false;
157     private volatile boolean mConvergedAF = false;
158     private volatile boolean mConvergedAWB = false;
159     private volatile boolean mLockedAE = false;
160     private volatile boolean mLockedAWB = false;
161     private volatile boolean mNeedsLockedAE = false;
162     private volatile boolean mNeedsLockedAWB = false;
163 
164     class MySensorEvent {
165         public Sensor sensor;
166         public int accuracy;
167         public long timestamp;
168         public float values[];
169     }
170 
171     // For capturing motion sensor traces.
172     private SensorManager mSensorManager = null;
173     private Sensor mAccelSensor = null;
174     private Sensor mMagSensor = null;
175     private Sensor mGyroSensor = null;
176     private volatile LinkedList<MySensorEvent> mEvents = null;
177     private volatile Object mEventLock = new Object();
178     private volatile boolean mEventsEnabled = false;
179 
180     public interface CaptureCallback {
onCaptureAvailable(Image capture)181         void onCaptureAvailable(Image capture);
182     }
183 
184     public abstract class CaptureResultListener extends CameraCaptureSession.CaptureCallback {}
185 
186     @Override
onBind(Intent intent)187     public IBinder onBind(Intent intent) {
188         return null;
189     }
190 
191     @Override
onCreate()192     public void onCreate() {
193         try {
194             mThreadExitFlag = false;
195 
196             // Get handle to camera manager.
197             mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
198             if (mCameraManager == null) {
199                 throw new ItsException("Failed to connect to camera manager");
200             }
201             mBlockingCameraManager = new BlockingCameraManager(mCameraManager);
202             mCameraListener = new BlockingStateCallback();
203 
204             // Register for motion events.
205             mEvents = new LinkedList<MySensorEvent>();
206             mSensorManager = (SensorManager)getSystemService(Context.SENSOR_SERVICE);
207             mAccelSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
208             mMagSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
209             mGyroSensor = mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
210             mSensorManager.registerListener(this, mAccelSensor, SensorManager.SENSOR_DELAY_FASTEST);
211             mSensorManager.registerListener(this, mMagSensor, SensorManager.SENSOR_DELAY_FASTEST);
212             mSensorManager.registerListener(this, mGyroSensor, SensorManager.SENSOR_DELAY_FASTEST);
213 
214             // Get a handle to the system vibrator.
215             mVibrator = (Vibrator)getSystemService(Context.VIBRATOR_SERVICE);
216 
217             // Create threads to receive images and save them.
218             for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) {
219                 mSaveThreads[i] = new HandlerThread("SaveThread" + i);
220                 mSaveThreads[i].start();
221                 mSaveHandlers[i] = new Handler(mSaveThreads[i].getLooper());
222             }
223 
224             // Create a thread to handle object serialization.
225             (new Thread(new SerializerRunnable())).start();;
226 
227             // Create a thread to receive capture results and process them.
228             mResultThread = new HandlerThread("ResultThread");
229             mResultThread.start();
230             mResultHandler = new Handler(mResultThread.getLooper());
231 
232             // Create a thread for the camera device.
233             mCameraThread = new HandlerThread("ItsCameraThread");
234             mCameraThread.start();
235             mCameraHandler = new Handler(mCameraThread.getLooper());
236 
237             // Create a thread to process commands, listening on a TCP socket.
238             mSocketRunnableObj = new SocketRunnable();
239             (new Thread(mSocketRunnableObj)).start();
240         } catch (ItsException e) {
241             Logt.e(TAG, "Service failed to start: ", e);
242         }
243     }
244 
245     @Override
onStartCommand(Intent intent, int flags, int startId)246     public int onStartCommand(Intent intent, int flags, int startId) {
247         try {
248             // Just log a message indicating that the service is running and is able to accept
249             // socket connections.
250             while (!mThreadExitFlag && mSocket==null) {
251                 Thread.sleep(1);
252             }
253             if (!mThreadExitFlag){
254                 Logt.i(TAG, "ItsService ready");
255             } else {
256                 Logt.e(TAG, "Starting ItsService in bad state");
257             }
258         } catch (java.lang.InterruptedException e) {
259             Logt.e(TAG, "Error starting ItsService (interrupted)", e);
260         }
261         return START_STICKY;
262     }
263 
264     @Override
onDestroy()265     public void onDestroy() {
266         mThreadExitFlag = true;
267         for (int i = 0; i < MAX_NUM_OUTPUT_SURFACES; i++) {
268             if (mSaveThreads[i] != null) {
269                 mSaveThreads[i].quit();
270                 mSaveThreads[i] = null;
271             }
272         }
273         if (mResultThread != null) {
274             mResultThread.quitSafely();
275             mResultThread = null;
276         }
277         if (mCameraThread != null) {
278             mCameraThread.quitSafely();
279             mCameraThread = null;
280         }
281     }
282 
openCameraDevice(int cameraId)283     public void openCameraDevice(int cameraId) throws ItsException {
284         Logt.i(TAG, String.format("Opening camera %d", cameraId));
285 
286         String[] devices;
287         try {
288             devices = mCameraManager.getCameraIdList();
289             if (devices == null || devices.length == 0) {
290                 throw new ItsException("No camera devices");
291             }
292         } catch (CameraAccessException e) {
293             throw new ItsException("Failed to get device ID list", e);
294         }
295 
296         try {
297             mCamera = mBlockingCameraManager.openCamera(devices[cameraId],
298                     mCameraListener, mCameraHandler);
299             mCameraCharacteristics = mCameraManager.getCameraCharacteristics(
300                     devices[cameraId]);
301         } catch (CameraAccessException e) {
302             throw new ItsException("Failed to open camera", e);
303         } catch (BlockingOpenException e) {
304             throw new ItsException("Failed to open camera (after blocking)", e);
305         }
306         mSocketRunnableObj.sendResponse("cameraOpened", "");
307     }
308 
closeCameraDevice()309     public void closeCameraDevice() throws ItsException {
310         try {
311             if (mCamera != null) {
312                 Logt.i(TAG, "Closing camera");
313                 mCamera.close();
314                 mCamera = null;
315             }
316         } catch (Exception e) {
317             throw new ItsException("Failed to close device");
318         }
319         mSocketRunnableObj.sendResponse("cameraClosed", "");
320     }
321 
322     class SerializerRunnable implements Runnable {
323         // Use a separate thread to perform JSON serialization (since this can be slow due to
324         // the reflection).
325         @Override
run()326         public void run() {
327             Logt.i(TAG, "Serializer thread starting");
328             while (! mThreadExitFlag) {
329                 try {
330                     Object objs[] = mSerializerQueue.take();
331                     JSONObject jsonObj = new JSONObject();
332                     String tag = null;
333                     for (int i = 0; i < objs.length; i++) {
334                         Object obj = objs[i];
335                         if (obj instanceof String) {
336                             if (tag != null) {
337                                 throw new ItsException("Multiple tags for socket response");
338                             }
339                             tag = (String)obj;
340                         } else if (obj instanceof CameraCharacteristics) {
341                             jsonObj.put("cameraProperties", ItsSerializer.serialize(
342                                     (CameraCharacteristics)obj));
343                         } else if (obj instanceof CaptureRequest) {
344                             jsonObj.put("captureRequest", ItsSerializer.serialize(
345                                     (CaptureRequest)obj));
346                         } else if (obj instanceof CaptureResult) {
347                             jsonObj.put("captureResult", ItsSerializer.serialize(
348                                     (CaptureResult)obj));
349                         } else if (obj instanceof JSONArray) {
350                             jsonObj.put("outputs", (JSONArray)obj);
351                         } else {
352                             throw new ItsException("Invalid object received for serialiation");
353                         }
354                     }
355                     if (tag == null) {
356                         throw new ItsException("No tag provided for socket response");
357                     }
358                     mSocketRunnableObj.sendResponse(tag, null, jsonObj, null);
359                     Logt.i(TAG, String.format("Serialized %s", tag));
360                 } catch (org.json.JSONException e) {
361                     Logt.e(TAG, "Error serializing object", e);
362                     break;
363                 } catch (ItsException e) {
364                     Logt.e(TAG, "Error serializing object", e);
365                     break;
366                 } catch (java.lang.InterruptedException e) {
367                     Logt.e(TAG, "Error serializing object (interrupted)", e);
368                     break;
369                 }
370             }
371             Logt.i(TAG, "Serializer thread terminated");
372         }
373     }
374 
375     class SocketWriteRunnable implements Runnable {
376 
377         // Use a separate thread to service a queue of objects to be written to the socket,
378         // writing each sequentially in order. This is needed since different handler functions
379         // (called on different threads) will need to send data back to the host script.
380 
381         public Socket mOpenSocket = null;
382 
SocketWriteRunnable(Socket openSocket)383         public SocketWriteRunnable(Socket openSocket) {
384             mOpenSocket = openSocket;
385         }
386 
setOpenSocket(Socket openSocket)387         public void setOpenSocket(Socket openSocket) {
388             mOpenSocket = openSocket;
389         }
390 
391         @Override
run()392         public void run() {
393             Logt.i(TAG, "Socket writer thread starting");
394             while (true) {
395                 try {
396                     ByteBuffer b = mSocketWriteQueue.take();
397                     synchronized(mSocketWriteDrainLock) {
398                         if (mOpenSocket == null) {
399                             continue;
400                         }
401                         if (b.hasArray()) {
402                             mOpenSocket.getOutputStream().write(b.array());
403                         } else {
404                             byte[] barray = new byte[b.capacity()];
405                             b.get(barray);
406                             mOpenSocket.getOutputStream().write(barray);
407                         }
408                         mOpenSocket.getOutputStream().flush();
409                         Logt.i(TAG, String.format("Wrote to socket: %d bytes", b.capacity()));
410                     }
411                 } catch (IOException e) {
412                     Logt.e(TAG, "Error writing to socket", e);
413                     break;
414                 } catch (java.lang.InterruptedException e) {
415                     Logt.e(TAG, "Error writing to socket (interrupted)", e);
416                     break;
417                 }
418             }
419             Logt.i(TAG, "Socket writer thread terminated");
420         }
421     }
422 
423     class SocketRunnable implements Runnable {
424 
425         // Format of sent messages (over the socket):
426         // * Serialized JSON object on a single line (newline-terminated)
427         // * For byte buffers, the binary data then follows
428         //
429         // Format of received messages (from the socket):
430         // * Serialized JSON object on a single line (newline-terminated)
431 
432         private Socket mOpenSocket = null;
433         private SocketWriteRunnable mSocketWriteRunnable = null;
434 
435         @Override
run()436         public void run() {
437             Logt.i(TAG, "Socket thread starting");
438             try {
439                 mSocket = new ServerSocket(SERVERPORT);
440             } catch (IOException e) {
441                 Logt.e(TAG, "Failed to create socket", e);
442             }
443 
444             // Create a new thread to handle writes to this socket.
445             mSocketWriteRunnable = new SocketWriteRunnable(null);
446             (new Thread(mSocketWriteRunnable)).start();
447 
448             while (!mThreadExitFlag) {
449                 // Receive the socket-open request from the host.
450                 try {
451                     Logt.i(TAG, "Waiting for client to connect to socket");
452                     mOpenSocket = mSocket.accept();
453                     if (mOpenSocket == null) {
454                         Logt.e(TAG, "Socket connection error");
455                         break;
456                     }
457                     mSocketWriteQueue.clear();
458                     mSocketWriteRunnable.setOpenSocket(mOpenSocket);
459                     Logt.i(TAG, "Socket connected");
460                 } catch (IOException e) {
461                     Logt.e(TAG, "Socket open error: ", e);
462                     break;
463                 }
464 
465                 // Process commands over the open socket.
466                 while (!mThreadExitFlag) {
467                     try {
468                         BufferedReader input = new BufferedReader(
469                                 new InputStreamReader(mOpenSocket.getInputStream()));
470                         if (input == null) {
471                             Logt.e(TAG, "Failed to get socket input stream");
472                             break;
473                         }
474                         String line = input.readLine();
475                         if (line == null) {
476                             Logt.i(TAG, "Socket readline retuned null (host disconnected)");
477                             break;
478                         }
479                         processSocketCommand(line);
480                     } catch (IOException e) {
481                         Logt.e(TAG, "Socket read error: ", e);
482                         break;
483                     } catch (ItsException e) {
484                         Logt.e(TAG, "Script error: ", e);
485                         break;
486                     }
487                 }
488 
489                 // Close socket and go back to waiting for a new connection.
490                 try {
491                     synchronized(mSocketWriteDrainLock) {
492                         mSocketWriteQueue.clear();
493                         mOpenSocket.close();
494                         mOpenSocket = null;
495                         Logt.i(TAG, "Socket disconnected");
496                     }
497                 } catch (java.io.IOException e) {
498                     Logt.e(TAG, "Exception closing socket");
499                 }
500             }
501 
502             // It's an overall error state if the code gets here; no recevery.
503             // Try to do some cleanup, but the service probably needs to be restarted.
504             Logt.i(TAG, "Socket server loop exited");
505             mThreadExitFlag = true;
506             try {
507                 if (mOpenSocket != null) {
508                     mOpenSocket.close();
509                     mOpenSocket = null;
510                 }
511             } catch (java.io.IOException e) {
512                 Logt.w(TAG, "Exception closing socket");
513             }
514             try {
515                 if (mSocket != null) {
516                     mSocket.close();
517                     mSocket = null;
518                 }
519             } catch (java.io.IOException e) {
520                 Logt.w(TAG, "Exception closing socket");
521             }
522         }
523 
processSocketCommand(String cmd)524         public void processSocketCommand(String cmd)
525                 throws ItsException {
526             // Each command is a serialized JSON object.
527             try {
528                 JSONObject cmdObj = new JSONObject(cmd);
529                 if ("open".equals(cmdObj.getString("cmdName"))) {
530                     int cameraId = cmdObj.getInt("cameraId");
531                     openCameraDevice(cameraId);
532                 } else if ("close".equals(cmdObj.getString("cmdName"))) {
533                     closeCameraDevice();
534                 } else if ("getCameraProperties".equals(cmdObj.getString("cmdName"))) {
535                     doGetProps();
536                 } else if ("startSensorEvents".equals(cmdObj.getString("cmdName"))) {
537                     doStartSensorEvents();
538                 } else if ("getSensorEvents".equals(cmdObj.getString("cmdName"))) {
539                     doGetSensorEvents();
540                 } else if ("do3A".equals(cmdObj.getString("cmdName"))) {
541                     do3A(cmdObj);
542                 } else if ("doCapture".equals(cmdObj.getString("cmdName"))) {
543                     doCapture(cmdObj);
544                 } else if ("doVibrate".equals(cmdObj.getString("cmdName"))) {
545                     doVibrate(cmdObj);
546                 } else {
547                     throw new ItsException("Unknown command: " + cmd);
548                 }
549             } catch (org.json.JSONException e) {
550                 Logt.e(TAG, "Invalid command: ", e);
551             }
552         }
553 
sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)554         public void sendResponse(String tag, String str, JSONObject obj, ByteBuffer bbuf)
555                 throws ItsException {
556             try {
557                 JSONObject jsonObj = new JSONObject();
558                 jsonObj.put("tag", tag);
559                 if (str != null) {
560                     jsonObj.put("strValue", str);
561                 }
562                 if (obj != null) {
563                     jsonObj.put("objValue", obj);
564                 }
565                 if (bbuf != null) {
566                     jsonObj.put("bufValueSize", bbuf.capacity());
567                 }
568                 ByteBuffer bstr = ByteBuffer.wrap(
569                         (jsonObj.toString()+"\n").getBytes(Charset.defaultCharset()));
570                 synchronized(mSocketWriteEnqueueLock) {
571                     if (bstr != null) {
572                         mSocketWriteQueue.put(bstr);
573                     }
574                     if (bbuf != null) {
575                         mSocketWriteQueue.put(bbuf);
576                     }
577                 }
578             } catch (org.json.JSONException e) {
579                 throw new ItsException("JSON error: ", e);
580             } catch (java.lang.InterruptedException e) {
581                 throw new ItsException("Socket error: ", e);
582             }
583         }
584 
sendResponse(String tag, String str)585         public void sendResponse(String tag, String str)
586                 throws ItsException {
587             sendResponse(tag, str, null, null);
588         }
589 
sendResponse(String tag, JSONObject obj)590         public void sendResponse(String tag, JSONObject obj)
591                 throws ItsException {
592             sendResponse(tag, null, obj, null);
593         }
594 
sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)595         public void sendResponseCaptureBuffer(String tag, ByteBuffer bbuf)
596                 throws ItsException {
597             sendResponse(tag, null, null, bbuf);
598         }
599 
sendResponse(LinkedList<MySensorEvent> events)600         public void sendResponse(LinkedList<MySensorEvent> events)
601                 throws ItsException {
602             try {
603                 JSONArray accels = new JSONArray();
604                 JSONArray mags = new JSONArray();
605                 JSONArray gyros = new JSONArray();
606                 for (MySensorEvent event : events) {
607                     JSONObject obj = new JSONObject();
608                     obj.put("time", event.timestamp);
609                     obj.put("x", event.values[0]);
610                     obj.put("y", event.values[1]);
611                     obj.put("z", event.values[2]);
612                     if (event.sensor.getType() == Sensor.TYPE_ACCELEROMETER) {
613                         accels.put(obj);
614                     } else if (event.sensor.getType() == Sensor.TYPE_MAGNETIC_FIELD) {
615                         mags.put(obj);
616                     } else if (event.sensor.getType() == Sensor.TYPE_GYROSCOPE) {
617                         gyros.put(obj);
618                     }
619                 }
620                 JSONObject obj = new JSONObject();
621                 obj.put("accel", accels);
622                 obj.put("mag", mags);
623                 obj.put("gyro", gyros);
624                 sendResponse("sensorEvents", null, obj, null);
625             } catch (org.json.JSONException e) {
626                 throw new ItsException("JSON error: ", e);
627             }
628         }
629 
sendResponse(CameraCharacteristics props)630         public void sendResponse(CameraCharacteristics props)
631                 throws ItsException {
632             try {
633                 Object objs[] = new Object[2];
634                 objs[0] = "cameraProperties";
635                 objs[1] = props;
636                 mSerializerQueue.put(objs);
637             } catch (InterruptedException e) {
638                 throw new ItsException("Interrupted: ", e);
639             }
640         }
641 
sendResponseCaptureResult(CameraCharacteristics props, CaptureRequest request, CaptureResult result, ImageReader[] readers)642         public void sendResponseCaptureResult(CameraCharacteristics props,
643                                               CaptureRequest request,
644                                               CaptureResult result,
645                                               ImageReader[] readers)
646                 throws ItsException {
647             try {
648                 JSONArray jsonSurfaces = new JSONArray();
649                 for (int i = 0; i < readers.length; i++) {
650                     JSONObject jsonSurface = new JSONObject();
651                     jsonSurface.put("width", readers[i].getWidth());
652                     jsonSurface.put("height", readers[i].getHeight());
653                     int format = readers[i].getImageFormat();
654                     if (format == ImageFormat.RAW_SENSOR) {
655                         jsonSurface.put("format", "raw");
656                     } else if (format == ImageFormat.RAW10) {
657                         jsonSurface.put("format", "raw10");
658                     } else if (format == ImageFormat.JPEG) {
659                         jsonSurface.put("format", "jpeg");
660                     } else if (format == ImageFormat.YUV_420_888) {
661                         jsonSurface.put("format", "yuv");
662                     } else {
663                         throw new ItsException("Invalid format");
664                     }
665                     jsonSurfaces.put(jsonSurface);
666                 }
667 
668                 Object objs[] = new Object[5];
669                 objs[0] = "captureResults";
670                 objs[1] = props;
671                 objs[2] = request;
672                 objs[3] = result;
673                 objs[4] = jsonSurfaces;
674                 mSerializerQueue.put(objs);
675             } catch (org.json.JSONException e) {
676                 throw new ItsException("JSON error: ", e);
677             } catch (InterruptedException e) {
678                 throw new ItsException("Interrupted: ", e);
679             }
680         }
681     }
682 
683     public ImageReader.OnImageAvailableListener
createAvailableListener(final CaptureCallback listener)684             createAvailableListener(final CaptureCallback listener) {
685         return new ImageReader.OnImageAvailableListener() {
686             @Override
687             public void onImageAvailable(ImageReader reader) {
688                 Image i = null;
689                 try {
690                     i = reader.acquireNextImage();
691                     listener.onCaptureAvailable(i);
692                 } finally {
693                     if (i != null) {
694                         i.close();
695                     }
696                 }
697             }
698         };
699     }
700 
701     private ImageReader.OnImageAvailableListener
702             createAvailableListenerDropper(final CaptureCallback listener) {
703         return new ImageReader.OnImageAvailableListener() {
704             @Override
705             public void onImageAvailable(ImageReader reader) {
706                 Image i = reader.acquireNextImage();
707                 i.close();
708             }
709         };
710     }
711 
712     private void doStartSensorEvents() throws ItsException {
713         synchronized(mEventLock) {
714             mEventsEnabled = true;
715         }
716         mSocketRunnableObj.sendResponse("sensorEventsStarted", "");
717     }
718 
719     private void doGetSensorEvents() throws ItsException {
720         synchronized(mEventLock) {
721             mSocketRunnableObj.sendResponse(mEvents);
722             mEvents.clear();
723             mEventsEnabled = false;
724         }
725     }
726 
727     private void doGetProps() throws ItsException {
728         mSocketRunnableObj.sendResponse(mCameraCharacteristics);
729     }
730 
731     private void prepareCaptureReader(int[] widths, int[] heights, int formats[], int numSurfaces) {
732         if (mCaptureReaders != null) {
733             for (int i = 0; i < mCaptureReaders.length; i++) {
734                 if (mCaptureReaders[i] != null) {
735                     mCaptureReaders[i].close();
736                 }
737             }
738         }
739         mCaptureReaders = new ImageReader[numSurfaces];
740         for (int i = 0; i < numSurfaces; i++) {
741             mCaptureReaders[i] = ImageReader.newInstance(widths[i], heights[i], formats[i],
742                     MAX_CONCURRENT_READER_BUFFERS);
743         }
744     }
745 
746     private void do3A(JSONObject params) throws ItsException {
747         try {
748             // Start a 3A action, and wait for it to converge.
749             // Get the converged values for each "A", and package into JSON result for caller.
750 
751             // 3A happens on full-res frames.
752             Size sizes[] = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
753             int widths[] = new int[1];
754             int heights[] = new int[1];
755             int formats[] = new int[1];
756             widths[0] = sizes[0].getWidth();
757             heights[0] = sizes[0].getHeight();
758             formats[0] = ImageFormat.YUV_420_888;
759             int width = widths[0];
760             int height = heights[0];
761 
762             prepareCaptureReader(widths, heights, formats, 1);
763             List<Surface> outputSurfaces = new ArrayList<Surface>(1);
764             outputSurfaces.add(mCaptureReaders[0].getSurface());
765             BlockingSessionCallback sessionListener = new BlockingSessionCallback();
766             mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
767             mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
768 
769             // Add a listener that just recycles buffers; they aren't saved anywhere.
770             ImageReader.OnImageAvailableListener readerListener =
771                     createAvailableListenerDropper(mCaptureCallback);
772             mCaptureReaders[0].setOnImageAvailableListener(readerListener, mSaveHandlers[0]);
773 
774             // Get the user-specified regions for AE, AWB, AF.
775             // Note that the user specifies normalized [x,y,w,h], which is converted below
776             // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
777             // also has a fifth "weight" element: [x0,y0,x1,y1,w].
778             MeteringRectangle[] regionAE = new MeteringRectangle[]{
779                     new MeteringRectangle(0,0,width,height,1)};
780             MeteringRectangle[] regionAF = new MeteringRectangle[]{
781                     new MeteringRectangle(0,0,width,height,1)};
782             MeteringRectangle[] regionAWB = new MeteringRectangle[]{
783                     new MeteringRectangle(0,0,width,height,1)};
784             if (params.has(REGION_KEY)) {
785                 JSONObject regions = params.getJSONObject(REGION_KEY);
786                 if (regions.has(REGION_AE_KEY)) {
787                     regionAE = ItsUtils.getJsonWeightedRectsFromArray(
788                             regions.getJSONArray(REGION_AE_KEY), true, width, height);
789                 }
790                 if (regions.has(REGION_AF_KEY)) {
791                     regionAF = ItsUtils.getJsonWeightedRectsFromArray(
792                             regions.getJSONArray(REGION_AF_KEY), true, width, height);
793                 }
794                 if (regions.has(REGION_AWB_KEY)) {
795                     regionAWB = ItsUtils.getJsonWeightedRectsFromArray(
796                             regions.getJSONArray(REGION_AWB_KEY), true, width, height);
797                 }
798             }
799 
800             // If AE or AWB lock is specified, then the 3A will converge first and then lock these
801             // values, waiting until the HAL has reported that the lock was successful.
802             mNeedsLockedAE = params.optBoolean(LOCK_AE_KEY, false);
803             mNeedsLockedAWB = params.optBoolean(LOCK_AWB_KEY, false);
804 
805             // By default, AE and AF both get triggered, but the user can optionally override this.
806             // Also, AF won't get triggered if the lens is fixed-focus.
807             boolean doAE = true;
808             boolean doAF = true;
809             if (params.has(TRIGGER_KEY)) {
810                 JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
811                 if (triggers.has(TRIGGER_AE_KEY)) {
812                     doAE = triggers.getBoolean(TRIGGER_AE_KEY);
813                 }
814                 if (triggers.has(TRIGGER_AF_KEY)) {
815                     doAF = triggers.getBoolean(TRIGGER_AF_KEY);
816                 }
817             }
818             if (doAF && mCameraCharacteristics.get(
819                             CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) == 0) {
820                 // Send a dummy result back for the code that is waiting for this message to see
821                 // that AF has converged.
822                 Logt.i(TAG, "Ignoring request for AF on fixed-focus camera");
823                 mSocketRunnableObj.sendResponse("afResult", "0.0");
824                 doAF = false;
825             }
826 
827             mInterlock3A.open();
828             mIssuedRequest3A = false;
829             mConvergedAE = false;
830             mConvergedAWB = false;
831             mConvergedAF = false;
832             mLockedAE = false;
833             mLockedAWB = false;
834             long tstart = System.currentTimeMillis();
835             boolean triggeredAE = false;
836             boolean triggeredAF = false;
837 
838             Logt.i(TAG, String.format("Initiating 3A: AE:%d, AF:%d, AWB:1, AELOCK:%d, AWBLOCK:%d",
839                     doAE?1:0, doAF?1:0, mNeedsLockedAE?1:0, mNeedsLockedAWB?1:0));
840 
841             // Keep issuing capture requests until 3A has converged.
842             while (true) {
843 
844                 // Block until can take the next 3A frame. Only want one outstanding frame
845                 // at a time, to simplify the logic here.
846                 if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
847                         System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
848                     throw new ItsException("3A failed to converge (timeout)");
849                 }
850                 mInterlock3A.close();
851 
852                 // If not converged yet, issue another capture request.
853                 if (       (doAE && (!triggeredAE || !mConvergedAE))
854                         || !mConvergedAWB
855                         || (doAF && (!triggeredAF || !mConvergedAF))
856                         || (doAE && mNeedsLockedAE && !mLockedAE)
857                         || (mNeedsLockedAWB && !mLockedAWB)) {
858 
859                     // Baseline capture request for 3A.
860                     CaptureRequest.Builder req = mCamera.createCaptureRequest(
861                             CameraDevice.TEMPLATE_PREVIEW);
862                     req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
863                     req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
864                     req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
865                             CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
866                     req.set(CaptureRequest.CONTROL_AE_MODE,
867                             CaptureRequest.CONTROL_AE_MODE_ON);
868                     req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
869                     req.set(CaptureRequest.CONTROL_AE_LOCK, false);
870                     req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
871                     req.set(CaptureRequest.CONTROL_AF_MODE,
872                             CaptureRequest.CONTROL_AF_MODE_AUTO);
873                     req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
874                     req.set(CaptureRequest.CONTROL_AWB_MODE,
875                             CaptureRequest.CONTROL_AWB_MODE_AUTO);
876                     req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
877                     req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
878 
879                     if (mConvergedAE && mNeedsLockedAE) {
880                         req.set(CaptureRequest.CONTROL_AE_LOCK, true);
881                     }
882                     if (mConvergedAWB && mNeedsLockedAWB) {
883                         req.set(CaptureRequest.CONTROL_AWB_LOCK, true);
884                     }
885 
886                     // Trigger AE first.
887                     if (doAE && !triggeredAE) {
888                         Logt.i(TAG, "Triggering AE");
889                         req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
890                                 CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
891                         triggeredAE = true;
892                     }
893 
894                     // After AE has converged, trigger AF.
895                     if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
896                         Logt.i(TAG, "Triggering AF");
897                         req.set(CaptureRequest.CONTROL_AF_TRIGGER,
898                                 CaptureRequest.CONTROL_AF_TRIGGER_START);
899                         triggeredAF = true;
900                     }
901 
902                     req.addTarget(mCaptureReaders[0].getSurface());
903 
904                     mIssuedRequest3A = true;
905                     mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
906                 } else {
907                     mSocketRunnableObj.sendResponse("3aConverged", "");
908                     Logt.i(TAG, "3A converged");
909                     break;
910                 }
911             }
912         } catch (android.hardware.camera2.CameraAccessException e) {
913             throw new ItsException("Access error: ", e);
914         } catch (org.json.JSONException e) {
915             throw new ItsException("JSON error: ", e);
916         } finally {
917             mSocketRunnableObj.sendResponse("3aDone", "");
918         }
919     }
920 
921     private void doVibrate(JSONObject params) throws ItsException {
922         try {
923             if (mVibrator == null) {
924                 throw new ItsException("Unable to start vibrator");
925             }
926             JSONArray patternArray = params.getJSONArray(VIB_PATTERN_KEY);
927             int len = patternArray.length();
928             long pattern[] = new long[len];
929             for (int i = 0; i < len; i++) {
930                 pattern[i] = patternArray.getLong(i);
931             }
932             Logt.i(TAG, String.format("Starting vibrator, pattern length %d",len));
933             mVibrator.vibrate(pattern, -1);
934             mSocketRunnableObj.sendResponse("vibrationStarted", "");
935         } catch (org.json.JSONException e) {
936             throw new ItsException("JSON error: ", e);
937         }
938     }
939 
940     private void doCapture(JSONObject params) throws ItsException {
941         try {
942             // Parse the JSON to get the list of capture requests.
943             List<CaptureRequest.Builder> requests = ItsSerializer.deserializeRequestList(
944                     mCamera, params);
945 
946             // Set the output surface(s) and listeners.
947             int widths[] = new int[MAX_NUM_OUTPUT_SURFACES];
948             int heights[] = new int[MAX_NUM_OUTPUT_SURFACES];
949             int formats[] = new int[MAX_NUM_OUTPUT_SURFACES];
950             int numSurfaces = 0;
951             try {
952                 mCountRawOrDng.set(0);
953                 mCountJpg.set(0);
954                 mCountYuv.set(0);
955                 mCountRaw10.set(0);
956                 mCountCapRes.set(0);
957                 mCaptureRawIsDng = false;
958                 mCaptureResults = new CaptureResult[requests.size()];
959 
960                 JSONArray jsonOutputSpecs = ItsUtils.getOutputSpecs(params);
961                 if (jsonOutputSpecs != null) {
962                     numSurfaces = jsonOutputSpecs.length();
963                     if (numSurfaces > MAX_NUM_OUTPUT_SURFACES) {
964                         throw new ItsException("Too many output surfaces");
965                     }
966                     for (int i = 0; i < numSurfaces; i++) {
967                         // Get the specified surface.
968                         JSONObject surfaceObj = jsonOutputSpecs.getJSONObject(i);
969                         String sformat = surfaceObj.optString("format");
970                         Size sizes[];
971                         if ("yuv".equals(sformat) || "".equals(sformat)) {
972                             // Default to YUV if no format is specified.
973                             formats[i] = ImageFormat.YUV_420_888;
974                             sizes = ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
975                         } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
976                             formats[i] = ImageFormat.JPEG;
977                             sizes = ItsUtils.getJpegOutputSizes(mCameraCharacteristics);
978                         } else if ("raw".equals(sformat)) {
979                             formats[i] = ImageFormat.RAW_SENSOR;
980                             sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
981                         } else if ("raw10".equals(sformat)) {
982                             formats[i] = ImageFormat.RAW10;
983                             sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
984                         } else if ("dng".equals(sformat)) {
985                             formats[i] = ImageFormat.RAW_SENSOR;
986                             sizes = ItsUtils.getRawOutputSizes(mCameraCharacteristics);
987                             mCaptureRawIsDng = true;
988                         } else {
989                             throw new ItsException("Unsupported format: " + sformat);
990                         }
991                         // If the size is omitted, then default to the largest allowed size for the
992                         // format.
993                         widths[i] = surfaceObj.optInt("width");
994                         heights[i] = surfaceObj.optInt("height");
995                         if (widths[i] <= 0) {
996                             if (sizes == null || sizes.length == 0) {
997                                 throw new ItsException(String.format(
998                                         "Zero stream configs available for requested format: %s",
999                                         sformat));
1000                             }
1001                             widths[i] = sizes[0].getWidth();
1002                         }
1003                         if (heights[i] <= 0) {
1004                             heights[i] = sizes[0].getHeight();
1005                         }
1006                     }
1007                 } else {
1008                     // No surface(s) specified at all.
1009                     // Default: a single output surface which is full-res YUV.
1010                     Size sizes[] =
1011                             ItsUtils.getYuvOutputSizes(mCameraCharacteristics);
1012                     numSurfaces = 1;
1013                     widths[0] = sizes[0].getWidth();
1014                     heights[0] = sizes[0].getHeight();
1015                     formats[0] = ImageFormat.YUV_420_888;
1016                 }
1017 
1018                 prepareCaptureReader(widths, heights, formats, numSurfaces);
1019                 List<Surface> outputSurfaces = new ArrayList<Surface>(numSurfaces);
1020                 for (int i = 0; i < numSurfaces; i++) {
1021                     outputSurfaces.add(mCaptureReaders[i].getSurface());
1022                 }
1023                 BlockingSessionCallback sessionListener = new BlockingSessionCallback();
1024                 mCamera.createCaptureSession(outputSurfaces, sessionListener, mCameraHandler);
1025                 mSession = sessionListener.waitAndGetSession(TIMEOUT_IDLE_MS);
1026 
1027                 for (int i = 0; i < numSurfaces; i++) {
1028                     ImageReader.OnImageAvailableListener readerListener =
1029                             createAvailableListener(mCaptureCallback);
1030                     mCaptureReaders[i].setOnImageAvailableListener(readerListener,mSaveHandlers[i]);
1031                 }
1032 
1033                 // Plan for how many callbacks need to be received throughout the duration of this
1034                 // sequence of capture requests. There is one callback per image surface, and one
1035                 // callback for the CaptureResult, for each capture.
1036                 int numCaptures = requests.size();
1037                 mCountCallbacksRemaining.set(numCaptures * (numSurfaces + 1));
1038 
1039             } catch (CameraAccessException e) {
1040                 throw new ItsException("Error configuring outputs", e);
1041             } catch (org.json.JSONException e) {
1042                 throw new ItsException("JSON error", e);
1043             }
1044 
1045             // Initiate the captures.
1046             for (int i = 0; i < requests.size(); i++) {
1047                 // For DNG captures, need the LSC map to be available.
1048                 if (mCaptureRawIsDng) {
1049                     requests.get(i).set(CaptureRequest.STATISTICS_LENS_SHADING_MAP_MODE, 1);
1050                 }
1051 
1052                 CaptureRequest.Builder req = requests.get(i);
1053                 for (int j = 0; j < numSurfaces; j++) {
1054                     req.addTarget(mCaptureReaders[j].getSurface());
1055                 }
1056                 mSession.capture(req.build(), mCaptureResultListener, mResultHandler);
1057             }
1058 
1059             // Make sure all callbacks have been hit (wait until captures are done).
1060             // If no timeouts are received after a timeout, then fail.
1061             int currentCount = mCountCallbacksRemaining.get();
1062             while (currentCount > 0) {
1063                 try {
1064                     Thread.sleep(TIMEOUT_CALLBACK*1000);
1065                 } catch (InterruptedException e) {
1066                     throw new ItsException("Timeout failure", e);
1067                 }
1068                 int newCount = mCountCallbacksRemaining.get();
1069                 if (newCount == currentCount) {
1070                     throw new ItsException(
1071                             "No callback received within timeout");
1072                 }
1073                 currentCount = newCount;
1074             }
1075         } catch (android.hardware.camera2.CameraAccessException e) {
1076             throw new ItsException("Access error: ", e);
1077         }
1078     }
1079 
1080     @Override
1081     public final void onSensorChanged(SensorEvent event) {
1082         synchronized(mEventLock) {
1083             if (mEventsEnabled) {
1084                 MySensorEvent ev2 = new MySensorEvent();
1085                 ev2.sensor = event.sensor;
1086                 ev2.accuracy = event.accuracy;
1087                 ev2.timestamp = event.timestamp;
1088                 ev2.values = new float[event.values.length];
1089                 System.arraycopy(event.values, 0, ev2.values, 0, event.values.length);
1090                 mEvents.add(ev2);
1091             }
1092         }
1093     }
1094 
1095     @Override
1096     public final void onAccuracyChanged(Sensor sensor, int accuracy) {
1097     }
1098 
1099     private final CaptureCallback mCaptureCallback = new CaptureCallback() {
1100         @Override
1101         public void onCaptureAvailable(Image capture) {
1102             try {
1103                 int format = capture.getFormat();
1104                 if (format == ImageFormat.JPEG) {
1105                     Logt.i(TAG, "Received JPEG capture");
1106                     byte[] img = ItsUtils.getDataFromImage(capture);
1107                     ByteBuffer buf = ByteBuffer.wrap(img);
1108                     int count = mCountJpg.getAndIncrement();
1109                     mSocketRunnableObj.sendResponseCaptureBuffer("jpegImage", buf);
1110                 } else if (format == ImageFormat.YUV_420_888) {
1111                     Logt.i(TAG, "Received YUV capture");
1112                     byte[] img = ItsUtils.getDataFromImage(capture);
1113                     ByteBuffer buf = ByteBuffer.wrap(img);
1114                     int count = mCountYuv.getAndIncrement();
1115                     mSocketRunnableObj.sendResponseCaptureBuffer("yuvImage", buf);
1116                 } else if (format == ImageFormat.RAW10) {
1117                     Logt.i(TAG, "Received RAW10 capture");
1118                     byte[] img = ItsUtils.getDataFromImage(capture);
1119                     ByteBuffer buf = ByteBuffer.wrap(img);
1120                     int count = mCountRaw10.getAndIncrement();
1121                     mSocketRunnableObj.sendResponseCaptureBuffer("raw10Image", buf);
1122                 } else if (format == ImageFormat.RAW_SENSOR) {
1123                     Logt.i(TAG, "Received RAW16 capture");
1124                     int count = mCountRawOrDng.getAndIncrement();
1125                     if (! mCaptureRawIsDng) {
1126                         byte[] img = ItsUtils.getDataFromImage(capture);
1127                         ByteBuffer buf = ByteBuffer.wrap(img);
1128                         mSocketRunnableObj.sendResponseCaptureBuffer("rawImage", buf);
1129                     } else {
1130                         // Wait until the corresponding capture result is ready, up to a timeout.
1131                         long t0 = android.os.SystemClock.elapsedRealtime();
1132                         while (! mThreadExitFlag
1133                                 && android.os.SystemClock.elapsedRealtime()-t0 < TIMEOUT_CAP_RES) {
1134                             if (mCaptureResults[count] != null) {
1135                                 Logt.i(TAG, "Writing capture as DNG");
1136                                 DngCreator dngCreator = new DngCreator(
1137                                         mCameraCharacteristics, mCaptureResults[count]);
1138                                 ByteArrayOutputStream dngStream = new ByteArrayOutputStream();
1139                                 dngCreator.writeImage(dngStream, capture);
1140                                 byte[] dngArray = dngStream.toByteArray();
1141                                 ByteBuffer dngBuf = ByteBuffer.wrap(dngArray);
1142                                 mSocketRunnableObj.sendResponseCaptureBuffer("dngImage", dngBuf);
1143                                 break;
1144                             } else {
1145                                 Thread.sleep(1);
1146                             }
1147                         }
1148                     }
1149                 } else {
1150                     throw new ItsException("Unsupported image format: " + format);
1151                 }
1152                 mCountCallbacksRemaining.decrementAndGet();
1153             } catch (IOException e) {
1154                 Logt.e(TAG, "Script error: ", e);
1155             } catch (InterruptedException e) {
1156                 Logt.e(TAG, "Script error: ", e);
1157             } catch (ItsException e) {
1158                 Logt.e(TAG, "Script error: ", e);
1159             }
1160         }
1161     };
1162 
1163     private static float r2f(Rational r) {
1164         return (float)r.getNumerator() / (float)r.getDenominator();
1165     }
1166 
1167     private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() {
1168         @Override
1169         public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request,
1170                 long timestamp, long frameNumber) {
1171         }
1172 
1173         @Override
1174         public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
1175                 TotalCaptureResult result) {
1176             try {
1177                 // Currently result has all 0 values.
1178                 if (request == null || result == null) {
1179                     throw new ItsException("Request/result is invalid");
1180                 }
1181 
1182                 StringBuilder logMsg = new StringBuilder();
1183                 logMsg.append(String.format(
1184                         "Capt result: AE=%d, AF=%d, AWB=%d, sens=%d, exp=%.1fms, dur=%.1fms, ",
1185                         result.get(CaptureResult.CONTROL_AE_STATE),
1186                         result.get(CaptureResult.CONTROL_AF_STATE),
1187                         result.get(CaptureResult.CONTROL_AWB_STATE),
1188                         result.get(CaptureResult.SENSOR_SENSITIVITY),
1189                         result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f,
1190                         result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1000000.0f));
1191                 if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
1192                     logMsg.append(String.format(
1193                             "gains=[%.1f, %.1f, %.1f, %.1f], ",
1194                             result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
1195                             result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
1196                             result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
1197                             result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue()));
1198                 } else {
1199                     logMsg.append("gains=[], ");
1200                 }
1201                 if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
1202                     logMsg.append(String.format(
1203                             "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
1204                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)),
1205                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)),
1206                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)),
1207                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)),
1208                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)),
1209                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)),
1210                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)),
1211                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)),
1212                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2))));
1213                 } else {
1214                     logMsg.append("xform=[], ");
1215                 }
1216                 logMsg.append(String.format(
1217                         "foc=%.1f",
1218                         result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
1219                 Logt.i(TAG, logMsg.toString());
1220 
1221                 if (result.get(CaptureResult.CONTROL_AE_STATE) != null) {
1222                     mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
1223                                               CaptureResult.CONTROL_AE_STATE_CONVERGED ||
1224                                    result.get(CaptureResult.CONTROL_AE_STATE) ==
1225                                               CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED ||
1226                                    result.get(CaptureResult.CONTROL_AE_STATE) ==
1227                                               CaptureResult.CONTROL_AE_STATE_LOCKED;
1228                     mLockedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
1229                                            CaptureResult.CONTROL_AE_STATE_LOCKED;
1230                 }
1231                 if (result.get(CaptureResult.CONTROL_AF_STATE) != null) {
1232                     mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) ==
1233                                               CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
1234                 }
1235                 if (result.get(CaptureResult.CONTROL_AWB_STATE) != null) {
1236                     mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
1237                                                CaptureResult.CONTROL_AWB_STATE_CONVERGED ||
1238                                     result.get(CaptureResult.CONTROL_AWB_STATE) ==
1239                                                CaptureResult.CONTROL_AWB_STATE_LOCKED;
1240                     mLockedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
1241                                             CaptureResult.CONTROL_AWB_STATE_LOCKED;
1242                 }
1243 
1244                 if (mConvergedAE && (!mNeedsLockedAE || mLockedAE)) {
1245                     if (result.get(CaptureResult.SENSOR_SENSITIVITY) != null
1246                             && result.get(CaptureResult.SENSOR_EXPOSURE_TIME) != null) {
1247                         mSocketRunnableObj.sendResponse("aeResult", String.format("%d %d",
1248                                 result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
1249                                 result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
1250                                 ));
1251                     } else {
1252                         Logt.i(TAG, String.format(
1253                                 "AE converged but NULL exposure values, sensitivity:%b, expTime:%b",
1254                                 result.get(CaptureResult.SENSOR_SENSITIVITY) == null,
1255                                 result.get(CaptureResult.SENSOR_EXPOSURE_TIME) == null));
1256                     }
1257                 }
1258 
1259                 if (mConvergedAF) {
1260                     if (result.get(CaptureResult.LENS_FOCUS_DISTANCE) != null) {
1261                         mSocketRunnableObj.sendResponse("afResult", String.format("%f",
1262                                 result.get(CaptureResult.LENS_FOCUS_DISTANCE)
1263                                 ));
1264                     } else {
1265                         Logt.i(TAG, "AF converged but NULL focus distance values");
1266                     }
1267                 }
1268 
1269                 if (mConvergedAWB && (!mNeedsLockedAWB || mLockedAWB)) {
1270                     if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
1271                             && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
1272                         mSocketRunnableObj.sendResponse("awbResult", String.format(
1273                                 "%f %f %f %f %f %f %f %f %f %f %f %f %f",
1274                                 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getRed(),
1275                                 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenEven(),
1276                                 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getGreenOdd(),
1277                                 result.get(CaptureResult.COLOR_CORRECTION_GAINS).getBlue(),
1278                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,0)),
1279                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,0)),
1280                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,0)),
1281                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,1)),
1282                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,1)),
1283                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,1)),
1284                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(0,2)),
1285                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(1,2)),
1286                                 r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM).getElement(2,2))
1287                                 ));
1288                     } else {
1289                         Logt.i(TAG, String.format(
1290                                 "AWB converged but NULL color correction values, gains:%b, ccm:%b",
1291                                 result.get(CaptureResult.COLOR_CORRECTION_GAINS) == null,
1292                                 result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) == null));
1293                     }
1294                 }
1295 
1296                 if (mIssuedRequest3A) {
1297                     mIssuedRequest3A = false;
1298                     mInterlock3A.open();
1299                 } else {
1300                     int count = mCountCapRes.getAndIncrement();
1301                     mCaptureResults[count] = result;
1302                     mSocketRunnableObj.sendResponseCaptureResult(mCameraCharacteristics,
1303                             request, result, mCaptureReaders);
1304                     mCountCallbacksRemaining.decrementAndGet();
1305                 }
1306             } catch (ItsException e) {
1307                 Logt.e(TAG, "Script error: ", e);
1308             } catch (Exception e) {
1309                 Logt.e(TAG, "Script error: ", e);
1310             }
1311         }
1312 
1313         @Override
1314         public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request,
1315                 CaptureFailure failure) {
1316             Logt.e(TAG, "Script error: capture failed");
1317         }
1318     };
1319 }
1320