1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #define LOG_TAG "Camera2-FrameProcessor"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20
21 #include <utils/Log.h>
22 #include <utils/Trace.h>
23
24 #include "common/CameraDeviceBase.h"
25 #include "api1/Camera2Client.h"
26 #include "api1/client2/FrameProcessor.h"
27
28 namespace android {
29 namespace camera2 {
30
FrameProcessor(wp<CameraDeviceBase> device,sp<Camera2Client> client)31 FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32 sp<Camera2Client> client) :
33 FrameProcessorBase(device),
34 mClient(client),
35 mLastFrameNumberOfFaces(0),
36 mLast3AFrameNumber(-1),
37 mLastAEFrameNumber(-1),
38 mLastAFrameNumber(-1),
39 mLastAWBFrameNumber(-1) {
40
41 sp<CameraDeviceBase> d = device.promote();
42 mSynthesize3ANotify = !(d->willNotify3A());
43
44 {
45 SharedParameters::Lock l(client->getParameters());
46
47 mUsePartialResult = (mNumPartialResults > 1);
48
49 // Initialize starting 3A state
50 m3aState.afTriggerId = l.mParameters.afTriggerCounter;
51 m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
52 // Check if lens is fixed-focus
53 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
54 m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
55 } else {
56 m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
57 }
58 m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
59 m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
60 m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
61 m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
62
63 mLastFocalLength = l.mParameters.params.getFloat(CameraParameters::KEY_FOCAL_LENGTH);
64 }
65 }
66
~FrameProcessor()67 FrameProcessor::~FrameProcessor() {
68 }
69
processSingleFrame(CaptureResult & frame,const sp<FrameProducer> & device)70 bool FrameProcessor::processSingleFrame(CaptureResult &frame,
71 const sp<FrameProducer> &device) {
72
73 sp<Camera2Client> client = mClient.promote();
74 if (!client.get()) {
75 return false;
76 }
77
78 bool isPartialResult = false;
79 if (mUsePartialResult) {
80 isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
81 }
82
83 if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
84 return false;
85 }
86
87 if (mSynthesize3ANotify) {
88 process3aState(frame, client);
89 }
90
91 if (mCurrentRequestId != frame.mResultExtras.requestId) {
92 mCurrentRequestId = frame.mResultExtras.requestId;
93
94 client->notifyRequestId(mCurrentRequestId);
95 }
96
97 processLensState(frame.mMetadata, client);
98
99 return FrameProcessorBase::processSingleFrame(frame, device);
100 }
101
processLensState(const CameraMetadata & frame,const sp<Camera2Client> & client)102 void FrameProcessor::processLensState(const CameraMetadata &frame,
103 const sp<Camera2Client> &client) {
104 ATRACE_CALL();
105 camera_metadata_ro_entry_t entry;
106
107 entry = frame.find(ANDROID_LENS_FOCAL_LENGTH);
108 if (entry.count == 0) {
109 return;
110 }
111
112 if (fabs(entry.data.f[0] - mLastFocalLength) > 0.001f) {
113 SharedParameters::Lock l(client->getParameters());
114 l.mParameters.params.setFloat(
115 CameraParameters::KEY_FOCAL_LENGTH,
116 entry.data.f[0]);
117 l.mParameters.paramsFlattened = l.mParameters.params.flatten();
118
119 mLastFocalLength = entry.data.f[0];
120 }
121 }
122
processFaceDetect(const CameraMetadata & frame,const sp<Camera2Client> & client)123 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
124 const sp<Camera2Client> &client) {
125 status_t res = BAD_VALUE;
126 ATRACE_CALL();
127 camera_metadata_ro_entry_t entry;
128 bool enableFaceDetect;
129
130 {
131 SharedParameters::Lock l(client->getParameters());
132 enableFaceDetect = l.mParameters.enableFaceDetect;
133 }
134 entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
135
136 // TODO: This should be an error once implementations are compliant
137 if (entry.count == 0) {
138 return OK;
139 }
140
141 uint8_t faceDetectMode = entry.data.u8[0];
142
143 camera_frame_metadata metadata;
144 Vector<camera_face_t> faces;
145 metadata.number_of_faces = 0;
146
147 if (enableFaceDetect &&
148 faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
149
150 SharedParameters::Lock l(client->getParameters());
151 entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
152 if (entry.count == 0) {
153 // No faces this frame
154 /* warning: locks SharedCameraCallbacks */
155 callbackFaceDetection(client, metadata);
156 return OK;
157 }
158 metadata.number_of_faces = entry.count / 4;
159 if (metadata.number_of_faces >
160 l.mParameters.fastInfo.maxFaces) {
161 ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
162 __FUNCTION__, client->getCameraId(),
163 metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
164 return res;
165 }
166 const int32_t *faceRects = entry.data.i32;
167
168 entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
169 if (entry.count == 0) {
170 ALOGE("%s: Camera %d: Unable to read face scores",
171 __FUNCTION__, client->getCameraId());
172 return res;
173 }
174 const uint8_t *faceScores = entry.data.u8;
175
176 const int32_t *faceLandmarks = NULL;
177 const int32_t *faceIds = NULL;
178
179 if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
180 entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
181 if (entry.count == 0) {
182 ALOGE("%s: Camera %d: Unable to read face landmarks",
183 __FUNCTION__, client->getCameraId());
184 return res;
185 }
186 faceLandmarks = entry.data.i32;
187
188 entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
189
190 if (entry.count == 0) {
191 ALOGE("%s: Camera %d: Unable to read face IDs",
192 __FUNCTION__, client->getCameraId());
193 return res;
194 }
195 faceIds = entry.data.i32;
196 }
197
198 entry = frame.find(ANDROID_SCALER_CROP_REGION);
199 if (entry.count < 4) {
200 ALOGE("%s: Camera %d: Unable to read crop region (count = %zu)",
201 __FUNCTION__, client->getCameraId(), entry.count);
202 return res;
203 }
204
205 Parameters::CropRegion scalerCrop = {
206 static_cast<float>(entry.data.i32[0]),
207 static_cast<float>(entry.data.i32[1]),
208 static_cast<float>(entry.data.i32[2]),
209 static_cast<float>(entry.data.i32[3])};
210
211 faces.setCapacity(metadata.number_of_faces);
212
213 size_t maxFaces = metadata.number_of_faces;
214 for (size_t i = 0; i < maxFaces; i++) {
215 if (faceScores[i] == 0) {
216 metadata.number_of_faces--;
217 continue;
218 }
219 if (faceScores[i] > 100) {
220 ALOGW("%s: Face index %zu with out of range score %d",
221 __FUNCTION__, i, faceScores[i]);
222 }
223
224 camera_face_t face;
225
226 face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
227 faceRects[i*4 + 0], scalerCrop);
228 face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
229 faceRects[i*4 + 1], scalerCrop);
230 face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
231 faceRects[i*4 + 2], scalerCrop);
232 face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
233 faceRects[i*4 + 3], scalerCrop);
234 face.score = faceScores[i];
235 if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
236 face.id = faceIds[i];
237 face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
238 faceLandmarks[i*6 + 0], scalerCrop);
239 face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
240 faceLandmarks[i*6 + 1], scalerCrop);
241 face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
242 faceLandmarks[i*6 + 2], scalerCrop);
243 face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
244 faceLandmarks[i*6 + 3], scalerCrop);
245 face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
246 faceLandmarks[i*6 + 4], scalerCrop);
247 face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
248 faceLandmarks[i*6 + 5], scalerCrop);
249 } else {
250 face.id = 0;
251 face.left_eye[0] = face.left_eye[1] = -2000;
252 face.right_eye[0] = face.right_eye[1] = -2000;
253 face.mouth[0] = face.mouth[1] = -2000;
254 }
255 faces.push_back(face);
256 }
257
258 metadata.faces = faces.editArray();
259 }
260
261 /* warning: locks SharedCameraCallbacks */
262 callbackFaceDetection(client, metadata);
263
264 return OK;
265 }
266
process3aState(const CaptureResult & frame,const sp<Camera2Client> & client)267 status_t FrameProcessor::process3aState(const CaptureResult &frame,
268 const sp<Camera2Client> &client) {
269
270 ATRACE_CALL();
271 const CameraMetadata &metadata = frame.mMetadata;
272 camera_metadata_ro_entry_t entry;
273 int cameraId = client->getCameraId();
274
275 entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
276 int32_t frameNumber = entry.data.i32[0];
277
278 // Don't send 3A notifications for the same frame number twice
279 if (frameNumber <= mLast3AFrameNumber) {
280 ALOGV("%s: Already sent 3A for frame number %d, skipping",
281 __FUNCTION__, frameNumber);
282
283 // Remove the entry if there is one for this frame number in mPending3AStates.
284 mPending3AStates.removeItem(frameNumber);
285 return OK;
286 }
287
288 AlgState pendingState;
289
290 ssize_t index = mPending3AStates.indexOfKey(frameNumber);
291 if (index != NAME_NOT_FOUND) {
292 pendingState = mPending3AStates.valueAt(index);
293 }
294
295 // Update 3A states from the result.
296 bool gotAllStates = true;
297
298 // TODO: Also use AE mode, AE trigger ID
299 bool gotAFState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
300 &pendingState.afMode, frameNumber, cameraId);
301
302 bool gotAWBState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
303 &pendingState.awbMode, frameNumber, cameraId);
304
305 bool gotAEState = updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
306 &pendingState.aeState, frameNumber, cameraId);
307
308 gotAFState &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
309 &pendingState.afState, frameNumber, cameraId);
310
311 gotAWBState &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
312 &pendingState.awbState, frameNumber, cameraId);
313
314 pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
315 pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
316
317 if (gotAEState && (frameNumber > mLastAEFrameNumber)) {
318 if (pendingState.aeState != m3aState.aeState ||
319 pendingState.aeTriggerId > m3aState.aeTriggerId) {
320 ALOGV("%s: Camera %d: AE state %d->%d",
321 __FUNCTION__, cameraId,
322 m3aState.aeState, pendingState.aeState);
323 client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
324
325 m3aState.aeState = pendingState.aeState;
326 m3aState.aeTriggerId = pendingState.aeTriggerId;
327 mLastAEFrameNumber = frameNumber;
328 }
329 }
330
331 if (gotAFState && (frameNumber > mLastAFrameNumber)) {
332 if (pendingState.afState != m3aState.afState ||
333 pendingState.afMode != m3aState.afMode ||
334 pendingState.afTriggerId != m3aState.afTriggerId) {
335 ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
336 __FUNCTION__, cameraId,
337 m3aState.afState, pendingState.afState,
338 m3aState.afMode, pendingState.afMode,
339 m3aState.afTriggerId, pendingState.afTriggerId);
340 client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
341
342 m3aState.afState = pendingState.afState;
343 m3aState.afMode = pendingState.afMode;
344 m3aState.afTriggerId = pendingState.afTriggerId;
345 mLastAFrameNumber = frameNumber;
346 }
347 }
348
349 if (gotAWBState && (frameNumber > mLastAWBFrameNumber)) {
350 if (pendingState.awbState != m3aState.awbState ||
351 pendingState.awbMode != m3aState.awbMode) {
352 ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
353 __FUNCTION__, cameraId,
354 m3aState.awbState, pendingState.awbState,
355 m3aState.awbMode, pendingState.awbMode);
356 client->notifyAutoWhitebalance(pendingState.awbState,
357 pendingState.aeTriggerId);
358
359 m3aState.awbMode = pendingState.awbMode;
360 m3aState.awbState = pendingState.awbState;
361 mLastAWBFrameNumber = frameNumber;
362 }
363 }
364
365 gotAllStates &= gotAEState & gotAFState & gotAWBState;
366 if (!gotAllStates) {
367 // If not all states are received, put the pending state to mPending3AStates.
368 if (index == NAME_NOT_FOUND) {
369 mPending3AStates.add(frameNumber, pendingState);
370 } else {
371 mPending3AStates.replaceValueAt(index, pendingState);
372 }
373 return NOT_ENOUGH_DATA;
374 }
375
376 if (index != NAME_NOT_FOUND) {
377 mPending3AStates.removeItemsAt(index);
378 }
379
380 mLast3AFrameNumber = frameNumber;
381
382 return OK;
383 }
384
385 template<typename Src, typename T>
updatePendingState(const CameraMetadata & result,int32_t tag,T * value,int32_t frameNumber,int cameraId)386 bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
387 T* value, int32_t frameNumber, int cameraId) {
388 camera_metadata_ro_entry_t entry;
389 if (value == NULL) {
390 ALOGE("%s: Camera %d: Value to write to is NULL",
391 __FUNCTION__, cameraId);
392 return false;
393 }
394
395 // Already got the value for this tag.
396 if (*value != static_cast<T>(NOT_SET)) {
397 return true;
398 }
399
400 entry = result.find(tag);
401 if (entry.count == 0) {
402 const camera_metadata *metaBuffer = result.getAndLock();
403 ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
404 __FUNCTION__, cameraId,
405 get_local_camera_metadata_tag_name(tag, metaBuffer),
406 frameNumber);
407 result.unlock(metaBuffer);
408 return false;
409 } else {
410 switch(sizeof(Src)){
411 case sizeof(uint8_t):
412 *value = static_cast<T>(entry.data.u8[0]);
413 break;
414 case sizeof(int32_t):
415 *value = static_cast<T>(entry.data.i32[0]);
416 break;
417 default:
418 ALOGE("%s: Camera %d: Unsupported source",
419 __FUNCTION__, cameraId);
420 return false;
421 }
422 }
423 return true;
424 }
425
426
callbackFaceDetection(const sp<Camera2Client> & client,const camera_frame_metadata & metadata)427 void FrameProcessor::callbackFaceDetection(const sp<Camera2Client>& client,
428 const camera_frame_metadata &metadata) {
429
430 camera_frame_metadata *metadata_ptr =
431 const_cast<camera_frame_metadata*>(&metadata);
432
433 /**
434 * Filter out repeated 0-face callbacks,
435 * but not when the last frame was >0
436 */
437 if (metadata.number_of_faces != 0 ||
438 mLastFrameNumberOfFaces != metadata.number_of_faces) {
439
440 Camera2Client::SharedCameraCallbacks::Lock
441 l(client->mSharedCameraCallbacks);
442 if (l.mRemoteCallback != NULL) {
443 l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
444 NULL,
445 metadata_ptr);
446 }
447 }
448
449 mLastFrameNumberOfFaces = metadata.number_of_faces;
450 }
451
452 }; // namespace camera2
453 }; // namespace android
454