• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-FrameProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23 
24 #include "common/CameraDeviceBase.h"
25 #include "api1/Camera2Client.h"
26 #include "api1/client2/FrameProcessor.h"
27 
28 namespace android {
29 namespace camera2 {
30 
FrameProcessor(wp<CameraDeviceBase> device,sp<Camera2Client> client)31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32                                sp<Camera2Client> client) :
33     FrameProcessorBase(device),
34     mClient(client),
35     mLastFrameNumberOfFaces(0),
36     mLast3AFrameNumber(-1),
37     mLastAEFrameNumber(-1),
38     mLastAFrameNumber(-1),
39     mLastAWBFrameNumber(-1) {
40 
41     sp<CameraDeviceBase> d = device.promote();
42     mSynthesize3ANotify = !(d->willNotify3A());
43 
44     {
45         SharedParameters::Lock l(client->getParameters());
46 
47         mUsePartialResult = (mNumPartialResults > 1);
48 
49         // Initialize starting 3A state
50         m3aState.afTriggerId = l.mParameters.afTriggerCounter;
51         m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
52         // Check if lens is fixed-focus
53         if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
54             m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
55         } else {
56             m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
57         }
58         m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
59         m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
60         m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
61         m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
62     }
63 }
64 
~FrameProcessor()65 FrameProcessor::~FrameProcessor() {
66 }
67 
processSingleFrame(CaptureResult & frame,const sp<FrameProducer> & device)68 bool FrameProcessor::processSingleFrame(CaptureResult &frame,
69                                         const sp<FrameProducer> &device) {
70 
71     sp<Camera2Client> client = mClient.promote();
72     if (!client.get()) {
73         return false;
74     }
75 
76     bool isPartialResult = false;
77     if (mUsePartialResult) {
78         isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
79     }
80 
81     if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
82         return false;
83     }
84 
85     if (mSynthesize3ANotify) {
86         process3aState(frame, client);
87     }
88 
89     if (mCurrentRequestId != frame.mResultExtras.requestId) {
90         mCurrentRequestId = frame.mResultExtras.requestId;
91 
92         client->notifyRequestId(mCurrentRequestId);
93     }
94 
95     return FrameProcessorBase::processSingleFrame(frame, device);
96 }
97 
processFaceDetect(const CameraMetadata & frame,const sp<Camera2Client> & client)98 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
99         const sp<Camera2Client> &client) {
100     status_t res = BAD_VALUE;
101     ATRACE_CALL();
102     camera_metadata_ro_entry_t entry;
103     bool enableFaceDetect;
104 
105     {
106         SharedParameters::Lock l(client->getParameters());
107         enableFaceDetect = l.mParameters.enableFaceDetect;
108     }
109     entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
110 
111     // TODO: This should be an error once implementations are compliant
112     if (entry.count == 0) {
113         return OK;
114     }
115 
116     uint8_t faceDetectMode = entry.data.u8[0];
117 
118     camera_frame_metadata metadata;
119     Vector<camera_face_t> faces;
120     metadata.number_of_faces = 0;
121 
122     if (enableFaceDetect &&
123         faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
124 
125         SharedParameters::Lock l(client->getParameters());
126         entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
127         if (entry.count == 0) {
128             // No faces this frame
129             /* warning: locks SharedCameraCallbacks */
130             callbackFaceDetection(client, metadata);
131             return OK;
132         }
133         metadata.number_of_faces = entry.count / 4;
134         if (metadata.number_of_faces >
135                 l.mParameters.fastInfo.maxFaces) {
136             ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
137                     __FUNCTION__, client->getCameraId(),
138                     metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
139             return res;
140         }
141         const int32_t *faceRects = entry.data.i32;
142 
143         entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
144         if (entry.count == 0) {
145             ALOGE("%s: Camera %d: Unable to read face scores",
146                     __FUNCTION__, client->getCameraId());
147             return res;
148         }
149         const uint8_t *faceScores = entry.data.u8;
150 
151         const int32_t *faceLandmarks = NULL;
152         const int32_t *faceIds = NULL;
153 
154         if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
155             entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
156             if (entry.count == 0) {
157                 ALOGE("%s: Camera %d: Unable to read face landmarks",
158                         __FUNCTION__, client->getCameraId());
159                 return res;
160             }
161             faceLandmarks = entry.data.i32;
162 
163             entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
164 
165             if (entry.count == 0) {
166                 ALOGE("%s: Camera %d: Unable to read face IDs",
167                         __FUNCTION__, client->getCameraId());
168                 return res;
169             }
170             faceIds = entry.data.i32;
171         }
172 
173         entry = frame.find(ANDROID_SCALER_CROP_REGION);
174         if (entry.count < 4) {
175             ALOGE("%s: Camera %d: Unable to read crop region (count = %zu)",
176                     __FUNCTION__, client->getCameraId(), entry.count);
177             return res;
178         }
179 
180         Parameters::CropRegion scalerCrop = {
181             static_cast<float>(entry.data.i32[0]),
182             static_cast<float>(entry.data.i32[1]),
183             static_cast<float>(entry.data.i32[2]),
184             static_cast<float>(entry.data.i32[3])};
185 
186         faces.setCapacity(metadata.number_of_faces);
187 
188         size_t maxFaces = metadata.number_of_faces;
189         for (size_t i = 0; i < maxFaces; i++) {
190             if (faceScores[i] == 0) {
191                 metadata.number_of_faces--;
192                 continue;
193             }
194             if (faceScores[i] > 100) {
195                 ALOGW("%s: Face index %zu with out of range score %d",
196                         __FUNCTION__, i, faceScores[i]);
197             }
198 
199             camera_face_t face;
200 
201             face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
202                                 faceRects[i*4 + 0], scalerCrop);
203             face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
204                                 faceRects[i*4 + 1], scalerCrop);
205             face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
206                                 faceRects[i*4 + 2], scalerCrop);
207             face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
208                                 faceRects[i*4 + 3], scalerCrop);
209             face.score = faceScores[i];
210             if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
211                 face.id = faceIds[i];
212                 face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
213                         faceLandmarks[i*6 + 0], scalerCrop);
214                 face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
215                         faceLandmarks[i*6 + 1], scalerCrop);
216                 face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
217                         faceLandmarks[i*6 + 2], scalerCrop);
218                 face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
219                         faceLandmarks[i*6 + 3], scalerCrop);
220                 face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
221                         faceLandmarks[i*6 + 4], scalerCrop);
222                 face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
223                         faceLandmarks[i*6 + 5], scalerCrop);
224             } else {
225                 face.id = 0;
226                 face.left_eye[0] = face.left_eye[1] = -2000;
227                 face.right_eye[0] = face.right_eye[1] = -2000;
228                 face.mouth[0] = face.mouth[1] = -2000;
229             }
230             faces.push_back(face);
231         }
232 
233         metadata.faces = faces.editArray();
234     }
235 
236     /* warning: locks SharedCameraCallbacks */
237     callbackFaceDetection(client, metadata);
238 
239     return OK;
240 }
241 
process3aState(const CaptureResult & frame,const sp<Camera2Client> & client)242 status_t FrameProcessor::process3aState(const CaptureResult &frame,
243         const sp<Camera2Client> &client) {
244 
245     ATRACE_CALL();
246     const CameraMetadata &metadata = frame.mMetadata;
247     camera_metadata_ro_entry_t entry;
248     int cameraId = client->getCameraId();
249 
250     entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
251     int32_t frameNumber = entry.data.i32[0];
252 
253     // Don't send 3A notifications for the same frame number twice
254     if (frameNumber <= mLast3AFrameNumber) {
255         ALOGV("%s: Already sent 3A for frame number %d, skipping",
256                 __FUNCTION__, frameNumber);
257 
258         // Remove the entry if there is one for this frame number in mPending3AStates.
259         mPending3AStates.removeItem(frameNumber);
260         return OK;
261     }
262 
263     AlgState pendingState;
264 
265     ssize_t index = mPending3AStates.indexOfKey(frameNumber);
266     if (index != NAME_NOT_FOUND) {
267         pendingState = mPending3AStates.valueAt(index);
268     }
269 
270     // Update 3A states from the result.
271     bool gotAllStates = true;
272 
273     // TODO: Also use AE mode, AE trigger ID
274     bool gotAFState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
275             &pendingState.afMode, frameNumber, cameraId);
276 
277     bool gotAWBState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
278             &pendingState.awbMode, frameNumber, cameraId);
279 
280     bool gotAEState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
281             &pendingState.aeState, frameNumber, cameraId);
282 
283     gotAFState &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
284             &pendingState.afState, frameNumber, cameraId);
285 
286     gotAWBState &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
287             &pendingState.awbState, frameNumber, cameraId);
288 
289     pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
290     pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
291 
292     if (gotAEState && (frameNumber > mLastAEFrameNumber)) {
293         if (pendingState.aeState != m3aState.aeState ||
294                 pendingState.aeTriggerId > m3aState.aeTriggerId) {
295             ALOGV("%s: Camera %d: AE state %d->%d",
296                     __FUNCTION__, cameraId,
297                     m3aState.aeState, pendingState.aeState);
298             client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
299 
300             m3aState.aeState = pendingState.aeState;
301             m3aState.aeTriggerId = pendingState.aeTriggerId;
302             mLastAEFrameNumber = frameNumber;
303         }
304     }
305 
306     if (gotAFState && (frameNumber > mLastAFrameNumber)) {
307         if (pendingState.afState != m3aState.afState ||
308                 pendingState.afMode != m3aState.afMode ||
309                 pendingState.afTriggerId != m3aState.afTriggerId) {
310             ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
311                     __FUNCTION__, cameraId,
312                     m3aState.afState, pendingState.afState,
313                     m3aState.afMode, pendingState.afMode,
314                     m3aState.afTriggerId, pendingState.afTriggerId);
315             client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
316 
317             m3aState.afState = pendingState.afState;
318             m3aState.afMode = pendingState.afMode;
319             m3aState.afTriggerId = pendingState.afTriggerId;
320             mLastAFrameNumber = frameNumber;
321         }
322     }
323 
324     if (gotAWBState && (frameNumber > mLastAWBFrameNumber)) {
325         if (pendingState.awbState != m3aState.awbState ||
326                 pendingState.awbMode != m3aState.awbMode) {
327             ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
328                     __FUNCTION__, cameraId,
329                     m3aState.awbState, pendingState.awbState,
330                     m3aState.awbMode, pendingState.awbMode);
331             client->notifyAutoWhitebalance(pendingState.awbState,
332                     pendingState.aeTriggerId);
333 
334             m3aState.awbMode = pendingState.awbMode;
335             m3aState.awbState = pendingState.awbState;
336             mLastAWBFrameNumber = frameNumber;
337         }
338     }
339 
340     gotAllStates &= gotAEState & gotAFState & gotAWBState;
341     if (!gotAllStates) {
342         // If not all states are received, put the pending state to mPending3AStates.
343         if (index == NAME_NOT_FOUND) {
344             mPending3AStates.add(frameNumber, pendingState);
345         } else {
346             mPending3AStates.replaceValueAt(index, pendingState);
347         }
348         return NOT_ENOUGH_DATA;
349     }
350 
351     if (index != NAME_NOT_FOUND) {
352         mPending3AStates.removeItemsAt(index);
353     }
354 
355     mLast3AFrameNumber = frameNumber;
356 
357     return OK;
358 }
359 
360 template<typename Src, typename T>
updatePendingState(const CameraMetadata & result,int32_t tag,T * value,int32_t frameNumber,int cameraId)361 bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
362         T* value, int32_t frameNumber, int cameraId) {
363     camera_metadata_ro_entry_t entry;
364     if (value == NULL) {
365         ALOGE("%s: Camera %d: Value to write to is NULL",
366                 __FUNCTION__, cameraId);
367         return false;
368     }
369 
370     // Already got the value for this tag.
371     if (*value != static_cast<T>(NOT_SET)) {
372         return true;
373     }
374 
375     entry = result.find(tag);
376     if (entry.count == 0) {
377         const camera_metadata *metaBuffer = result.getAndLock();
378         ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
379                 __FUNCTION__, cameraId,
380                 get_local_camera_metadata_tag_name(tag, metaBuffer),
381                 frameNumber);
382         result.unlock(metaBuffer);
383         return false;
384     } else {
385         switch(sizeof(Src)){
386             case sizeof(uint8_t):
387                 *value = static_cast<T>(entry.data.u8[0]);
388                 break;
389             case sizeof(int32_t):
390                 *value = static_cast<T>(entry.data.i32[0]);
391                 break;
392             default:
393                 ALOGE("%s: Camera %d: Unsupported source",
394                         __FUNCTION__, cameraId);
395                 return false;
396         }
397     }
398     return true;
399 }
400 
401 
callbackFaceDetection(const sp<Camera2Client> & client,const camera_frame_metadata & metadata)402 void FrameProcessor::callbackFaceDetection(const sp<Camera2Client>& client,
403                                      const camera_frame_metadata &metadata) {
404 
405     camera_frame_metadata *metadata_ptr =
406         const_cast<camera_frame_metadata*>(&metadata);
407 
408     /**
409      * Filter out repeated 0-face callbacks,
410      * but not when the last frame was >0
411      */
412     if (metadata.number_of_faces != 0 ||
413         mLastFrameNumberOfFaces != metadata.number_of_faces) {
414 
415         Camera2Client::SharedCameraCallbacks::Lock
416             l(client->mSharedCameraCallbacks);
417         if (l.mRemoteCallback != NULL) {
418             l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
419                                             NULL,
420                                             metadata_ptr);
421         }
422     }
423 
424     mLastFrameNumberOfFaces = metadata.number_of_faces;
425 }
426 
427 }; // namespace camera2
428 }; // namespace android
429