1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-FrameProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23
24 #include "common/CameraDeviceBase.h"
25 #include "api1/Camera2Client.h"
26 #include "api1/client2/FrameProcessor.h"
27
28 namespace android {
29 namespace camera2 {
30
FrameProcessor(wp<CameraDeviceBase> device,sp<Camera2Client> client)31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32 sp<Camera2Client> client) :
33 FrameProcessorBase(device),
34 mClient(client),
35 mLastFrameNumberOfFaces(0),
36 mLast3AFrameNumber(-1),
37 mLastAEFrameNumber(-1),
38 mLastAFrameNumber(-1),
39 mLastAWBFrameNumber(-1) {
40
41 sp<CameraDeviceBase> d = device.promote();
42 mSynthesize3ANotify = !(d->willNotify3A());
43
44 {
45 SharedParameters::Lock l(client->getParameters());
46
47 mUsePartialResult = (mNumPartialResults > 1);
48
49 // Initialize starting 3A state
50 m3aState.afTriggerId = l.mParameters.afTriggerCounter;
51 m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
52 // Check if lens is fixed-focus
53 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
54 m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
55 } else {
56 m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
57 }
58 m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
59 m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
60 m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
61 m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
62 }
63 }
64
~FrameProcessor()65 FrameProcessor::~FrameProcessor() {
66 }
67
processSingleFrame(CaptureResult & frame,const sp<CameraDeviceBase> & device)68 bool FrameProcessor::processSingleFrame(CaptureResult &frame,
69 const sp<CameraDeviceBase> &device) {
70
71 sp<Camera2Client> client = mClient.promote();
72 if (!client.get()) {
73 return false;
74 }
75
76 bool isPartialResult = false;
77 if (mUsePartialResult) {
78 isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
79 }
80
81 if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
82 return false;
83 }
84
85 if (mSynthesize3ANotify) {
86 process3aState(frame, client);
87 }
88
89 return FrameProcessorBase::processSingleFrame(frame, device);
90 }
91
processFaceDetect(const CameraMetadata & frame,const sp<Camera2Client> & client)92 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
93 const sp<Camera2Client> &client) {
94 status_t res = BAD_VALUE;
95 ATRACE_CALL();
96 camera_metadata_ro_entry_t entry;
97 bool enableFaceDetect;
98
99 {
100 SharedParameters::Lock l(client->getParameters());
101 enableFaceDetect = l.mParameters.enableFaceDetect;
102 }
103 entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
104
105 // TODO: This should be an error once implementations are compliant
106 if (entry.count == 0) {
107 return OK;
108 }
109
110 uint8_t faceDetectMode = entry.data.u8[0];
111
112 camera_frame_metadata metadata;
113 Vector<camera_face_t> faces;
114 metadata.number_of_faces = 0;
115
116 if (enableFaceDetect &&
117 faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
118
119 SharedParameters::Lock l(client->getParameters());
120 entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
121 if (entry.count == 0) {
122 // No faces this frame
123 /* warning: locks SharedCameraCallbacks */
124 callbackFaceDetection(client, metadata);
125 return OK;
126 }
127 metadata.number_of_faces = entry.count / 4;
128 if (metadata.number_of_faces >
129 l.mParameters.fastInfo.maxFaces) {
130 ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
131 __FUNCTION__, client->getCameraId(),
132 metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
133 return res;
134 }
135 const int32_t *faceRects = entry.data.i32;
136
137 entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
138 if (entry.count == 0) {
139 ALOGE("%s: Camera %d: Unable to read face scores",
140 __FUNCTION__, client->getCameraId());
141 return res;
142 }
143 const uint8_t *faceScores = entry.data.u8;
144
145 const int32_t *faceLandmarks = NULL;
146 const int32_t *faceIds = NULL;
147
148 if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
149 entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
150 if (entry.count == 0) {
151 ALOGE("%s: Camera %d: Unable to read face landmarks",
152 __FUNCTION__, client->getCameraId());
153 return res;
154 }
155 faceLandmarks = entry.data.i32;
156
157 entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
158
159 if (entry.count == 0) {
160 ALOGE("%s: Camera %d: Unable to read face IDs",
161 __FUNCTION__, client->getCameraId());
162 return res;
163 }
164 faceIds = entry.data.i32;
165 }
166
167 entry = frame.find(ANDROID_SCALER_CROP_REGION);
168 if (entry.count < 4) {
169 ALOGE("%s: Camera %d: Unable to read crop region (count = %zu)",
170 __FUNCTION__, client->getCameraId(), entry.count);
171 return res;
172 }
173
174 Parameters::CropRegion scalerCrop = {
175 static_cast<float>(entry.data.i32[0]),
176 static_cast<float>(entry.data.i32[1]),
177 static_cast<float>(entry.data.i32[2]),
178 static_cast<float>(entry.data.i32[3])};
179
180 faces.setCapacity(metadata.number_of_faces);
181
182 size_t maxFaces = metadata.number_of_faces;
183 for (size_t i = 0; i < maxFaces; i++) {
184 if (faceScores[i] == 0) {
185 metadata.number_of_faces--;
186 continue;
187 }
188 if (faceScores[i] > 100) {
189 ALOGW("%s: Face index %zu with out of range score %d",
190 __FUNCTION__, i, faceScores[i]);
191 }
192
193 camera_face_t face;
194
195 face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
196 faceRects[i*4 + 0], scalerCrop);
197 face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
198 faceRects[i*4 + 1], scalerCrop);
199 face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
200 faceRects[i*4 + 2], scalerCrop);
201 face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
202 faceRects[i*4 + 3], scalerCrop);
203 face.score = faceScores[i];
204 if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
205 face.id = faceIds[i];
206 face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
207 faceLandmarks[i*6 + 0], scalerCrop);
208 face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
209 faceLandmarks[i*6 + 1], scalerCrop);
210 face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
211 faceLandmarks[i*6 + 2], scalerCrop);
212 face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
213 faceLandmarks[i*6 + 3], scalerCrop);
214 face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
215 faceLandmarks[i*6 + 4], scalerCrop);
216 face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
217 faceLandmarks[i*6 + 5], scalerCrop);
218 } else {
219 face.id = 0;
220 face.left_eye[0] = face.left_eye[1] = -2000;
221 face.right_eye[0] = face.right_eye[1] = -2000;
222 face.mouth[0] = face.mouth[1] = -2000;
223 }
224 faces.push_back(face);
225 }
226
227 metadata.faces = faces.editArray();
228 }
229
230 /* warning: locks SharedCameraCallbacks */
231 callbackFaceDetection(client, metadata);
232
233 return OK;
234 }
235
process3aState(const CaptureResult & frame,const sp<Camera2Client> & client)236 status_t FrameProcessor::process3aState(const CaptureResult &frame,
237 const sp<Camera2Client> &client) {
238
239 ATRACE_CALL();
240 const CameraMetadata &metadata = frame.mMetadata;
241 camera_metadata_ro_entry_t entry;
242 int cameraId = client->getCameraId();
243
244 entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
245 int32_t frameNumber = entry.data.i32[0];
246
247 // Don't send 3A notifications for the same frame number twice
248 if (frameNumber <= mLast3AFrameNumber) {
249 ALOGV("%s: Already sent 3A for frame number %d, skipping",
250 __FUNCTION__, frameNumber);
251
252 // Remove the entry if there is one for this frame number in mPending3AStates.
253 mPending3AStates.removeItem(frameNumber);
254 return OK;
255 }
256
257 AlgState pendingState;
258
259 ssize_t index = mPending3AStates.indexOfKey(frameNumber);
260 if (index != NAME_NOT_FOUND) {
261 pendingState = mPending3AStates.valueAt(index);
262 }
263
264 // Update 3A states from the result.
265 bool gotAllStates = true;
266
267 // TODO: Also use AE mode, AE trigger ID
268 bool gotAFState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
269 &pendingState.afMode, frameNumber, cameraId);
270
271 bool gotAWBState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
272 &pendingState.awbMode, frameNumber, cameraId);
273
274 bool gotAEState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
275 &pendingState.aeState, frameNumber, cameraId);
276
277 gotAFState &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
278 &pendingState.afState, frameNumber, cameraId);
279
280 gotAWBState &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
281 &pendingState.awbState, frameNumber, cameraId);
282
283 pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
284 pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
285
286 if (gotAEState && (frameNumber > mLastAEFrameNumber)) {
287 if (pendingState.aeState != m3aState.aeState ||
288 pendingState.aeTriggerId > m3aState.aeTriggerId) {
289 ALOGV("%s: Camera %d: AE state %d->%d",
290 __FUNCTION__, cameraId,
291 m3aState.aeState, pendingState.aeState);
292 client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
293
294 m3aState.aeState = pendingState.aeState;
295 m3aState.aeTriggerId = pendingState.aeTriggerId;
296 mLastAEFrameNumber = frameNumber;
297 }
298 }
299
300 if (gotAFState && (frameNumber > mLastAFrameNumber)) {
301 if (pendingState.afState != m3aState.afState ||
302 pendingState.afMode != m3aState.afMode ||
303 pendingState.afTriggerId != m3aState.afTriggerId) {
304 ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
305 __FUNCTION__, cameraId,
306 m3aState.afState, pendingState.afState,
307 m3aState.afMode, pendingState.afMode,
308 m3aState.afTriggerId, pendingState.afTriggerId);
309 client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
310
311 m3aState.afState = pendingState.afState;
312 m3aState.afMode = pendingState.afMode;
313 m3aState.afTriggerId = pendingState.afTriggerId;
314 mLastAFrameNumber = frameNumber;
315 }
316 }
317
318 if (gotAWBState && (frameNumber > mLastAWBFrameNumber)) {
319 if (pendingState.awbState != m3aState.awbState ||
320 pendingState.awbMode != m3aState.awbMode) {
321 ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
322 __FUNCTION__, cameraId,
323 m3aState.awbState, pendingState.awbState,
324 m3aState.awbMode, pendingState.awbMode);
325 client->notifyAutoWhitebalance(pendingState.awbState,
326 pendingState.aeTriggerId);
327
328 m3aState.awbMode = pendingState.awbMode;
329 m3aState.awbState = pendingState.awbState;
330 mLastAWBFrameNumber = frameNumber;
331 }
332 }
333
334 gotAllStates &= gotAEState & gotAFState & gotAWBState;
335 if (!gotAllStates) {
336 // If not all states are received, put the pending state to mPending3AStates.
337 if (index == NAME_NOT_FOUND) {
338 mPending3AStates.add(frameNumber, pendingState);
339 } else {
340 mPending3AStates.replaceValueAt(index, pendingState);
341 }
342 return NOT_ENOUGH_DATA;
343 }
344
345 if (index != NAME_NOT_FOUND) {
346 mPending3AStates.removeItemsAt(index);
347 }
348
349 mLast3AFrameNumber = frameNumber;
350
351 return OK;
352 }
353
354 template<typename Src, typename T>
updatePendingState(const CameraMetadata & result,int32_t tag,T * value,int32_t frameNumber,int cameraId)355 bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
356 T* value, int32_t frameNumber, int cameraId) {
357 camera_metadata_ro_entry_t entry;
358 if (value == NULL) {
359 ALOGE("%s: Camera %d: Value to write to is NULL",
360 __FUNCTION__, cameraId);
361 return false;
362 }
363
364 // Already got the value for this tag.
365 if (*value != static_cast<T>(NOT_SET)) {
366 return true;
367 }
368
369 entry = result.find(tag);
370 if (entry.count == 0) {
371 const camera_metadata *metaBuffer = result.getAndLock();
372 ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
373 __FUNCTION__, cameraId,
374 get_local_camera_metadata_tag_name(tag, metaBuffer),
375 frameNumber);
376 result.unlock(metaBuffer);
377 return false;
378 } else {
379 switch(sizeof(Src)){
380 case sizeof(uint8_t):
381 *value = static_cast<T>(entry.data.u8[0]);
382 break;
383 case sizeof(int32_t):
384 *value = static_cast<T>(entry.data.i32[0]);
385 break;
386 default:
387 ALOGE("%s: Camera %d: Unsupported source",
388 __FUNCTION__, cameraId);
389 return false;
390 }
391 }
392 return true;
393 }
394
395
callbackFaceDetection(const sp<Camera2Client> & client,const camera_frame_metadata & metadata)396 void FrameProcessor::callbackFaceDetection(const sp<Camera2Client>& client,
397 const camera_frame_metadata &metadata) {
398
399 camera_frame_metadata *metadata_ptr =
400 const_cast<camera_frame_metadata*>(&metadata);
401
402 /**
403 * Filter out repeated 0-face callbacks,
404 * but not when the last frame was >0
405 */
406 if (metadata.number_of_faces != 0 ||
407 mLastFrameNumberOfFaces != metadata.number_of_faces) {
408
409 Camera2Client::SharedCameraCallbacks::Lock
410 l(client->mSharedCameraCallbacks);
411 if (l.mRemoteCallback != NULL) {
412 l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
413 NULL,
414 metadata_ptr);
415 }
416 }
417
418 mLastFrameNumberOfFaces = metadata.number_of_faces;
419 }
420
421 }; // namespace camera2
422 }; // namespace android
423