1 /*
2 * Copyright (C) Texas Instruments - http://www.ti.com/
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 /**
18 * @file OMXFD.cpp
19 *
20 * This file contains functionality for handling face detection.
21 *
22 */
23
24 #undef LOG_TAG
25
26 #define LOG_TAG "CameraHAL"
27
28 #include "CameraHal.h"
29 #include "OMXCameraAdapter.h"
30
31 #define FACE_DETECTION_THRESHOLD 80
32
33 // constants used for face smooth filtering
34 static const int HorizontalFilterThreshold = 40;
35 static const int VerticalFilterThreshold = 40;
36 static const int HorizontalFaceSizeThreshold = 30;
37 static const int VerticalFaceSizeThreshold = 30;
38
39
40 namespace android {
41
setParametersFD(const CameraParameters & params,BaseCameraAdapter::AdapterState state)42 status_t OMXCameraAdapter::setParametersFD(const CameraParameters ¶ms,
43 BaseCameraAdapter::AdapterState state)
44 {
45 status_t ret = NO_ERROR;
46
47 LOG_FUNCTION_NAME;
48
49 LOG_FUNCTION_NAME_EXIT;
50
51 return ret;
52 }
53
startFaceDetection()54 status_t OMXCameraAdapter::startFaceDetection()
55 {
56 status_t ret = NO_ERROR;
57
58 Mutex::Autolock lock(mFaceDetectionLock);
59
60 ret = setFaceDetection(true, mDeviceOrientation);
61 if (ret != NO_ERROR) {
62 goto out;
63 }
64
65 if ( mFaceDetectionRunning )
66 {
67 //Disable region priority and enable face priority for AF
68 setAlgoPriority(REGION_PRIORITY, FOCUS_ALGO, false);
69 setAlgoPriority(FACE_PRIORITY, FOCUS_ALGO , true);
70
71 //Disable Region priority and enable Face priority
72 setAlgoPriority(REGION_PRIORITY, EXPOSURE_ALGO, false);
73 setAlgoPriority(FACE_PRIORITY, EXPOSURE_ALGO, true);
74 }
75
76 // Note: White balance will not be face prioritized, since
77 // the algorithm needs full frame statistics, and not face
78 // regions alone.
79
80 faceDetectionNumFacesLastOutput = 0;
81 out:
82 return ret;
83 }
84
stopFaceDetection()85 status_t OMXCameraAdapter::stopFaceDetection()
86 {
87 status_t ret = NO_ERROR;
88 const char *str = NULL;
89 BaseCameraAdapter::AdapterState state;
90 BaseCameraAdapter::getState(state);
91
92 Mutex::Autolock lock(mFaceDetectionLock);
93
94 ret = setFaceDetection(false, mDeviceOrientation);
95 if (ret != NO_ERROR) {
96 goto out;
97 }
98
99 // Reset 3A settings
100 ret = setParameters3A(mParams, state);
101 if (ret != NO_ERROR) {
102 goto out;
103 }
104
105 if (mPending3Asettings) {
106 apply3Asettings(mParameters3A);
107 }
108
109 faceDetectionNumFacesLastOutput = 0;
110 out:
111 return ret;
112 }
113
pauseFaceDetection(bool pause)114 void OMXCameraAdapter::pauseFaceDetection(bool pause)
115 {
116 Mutex::Autolock lock(mFaceDetectionLock);
117 // pausing will only take affect if fd is already running
118 if (mFaceDetectionRunning) {
119 mFaceDetectionPaused = pause;
120 faceDetectionNumFacesLastOutput = 0;
121 }
122 }
123
setFaceDetection(bool enable,OMX_U32 orientation)124 status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
125 {
126 status_t ret = NO_ERROR;
127 OMX_ERRORTYPE eError = OMX_ErrorNone;
128 OMX_CONFIG_EXTRADATATYPE extraDataControl;
129 OMX_CONFIG_OBJDETECTIONTYPE objDetection;
130
131 LOG_FUNCTION_NAME;
132
133 if ( OMX_StateInvalid == mComponentState )
134 {
135 CAMHAL_LOGEA("OMX component is in invalid state");
136 ret = -EINVAL;
137 }
138
139 if ( NO_ERROR == ret )
140 {
141 if ( orientation > 270 ) {
142 orientation = 0;
143 }
144
145 OMX_INIT_STRUCT_PTR (&objDetection, OMX_CONFIG_OBJDETECTIONTYPE);
146 objDetection.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
147 objDetection.nDeviceOrientation = orientation;
148 if ( enable )
149 {
150 objDetection.bEnable = OMX_TRUE;
151 }
152 else
153 {
154 objDetection.bEnable = OMX_FALSE;
155 }
156
157 eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
158 ( OMX_INDEXTYPE ) OMX_IndexConfigImageFaceDetection,
159 &objDetection);
160 if ( OMX_ErrorNone != eError )
161 {
162 CAMHAL_LOGEB("Error while configuring face detection 0x%x", eError);
163 ret = -1;
164 }
165 else
166 {
167 CAMHAL_LOGDA("Face detection configured successfully");
168 }
169 }
170
171 if ( NO_ERROR == ret )
172 {
173 OMX_INIT_STRUCT_PTR (&extraDataControl, OMX_CONFIG_EXTRADATATYPE);
174 extraDataControl.nPortIndex = mCameraAdapterParameters.mPrevPortIndex;
175 extraDataControl.eExtraDataType = OMX_FaceDetection;
176 extraDataControl.eCameraView = OMX_2D;
177 if ( enable )
178 {
179 extraDataControl.bEnable = OMX_TRUE;
180 }
181 else
182 {
183 extraDataControl.bEnable = OMX_FALSE;
184 }
185
186 eError = OMX_SetConfig(mCameraAdapterParameters.mHandleComp,
187 ( OMX_INDEXTYPE ) OMX_IndexConfigOtherExtraDataControl,
188 &extraDataControl);
189 if ( OMX_ErrorNone != eError )
190 {
191 CAMHAL_LOGEB("Error while configuring face detection extra data 0x%x",
192 eError);
193 ret = -1;
194 }
195 else
196 {
197 CAMHAL_LOGDA("Face detection extra data configured successfully");
198 }
199 }
200
201 if ( NO_ERROR == ret )
202 {
203 mFaceDetectionRunning = enable;
204 mFaceDetectionPaused = !enable;
205 }
206
207 LOG_FUNCTION_NAME_EXIT;
208
209 return ret;
210 }
211
detectFaces(OMX_BUFFERHEADERTYPE * pBuffHeader,sp<CameraFDResult> & result,size_t previewWidth,size_t previewHeight)212 status_t OMXCameraAdapter::detectFaces(OMX_BUFFERHEADERTYPE* pBuffHeader,
213 sp<CameraFDResult> &result,
214 size_t previewWidth,
215 size_t previewHeight)
216 {
217 status_t ret = NO_ERROR;
218 OMX_ERRORTYPE eError = OMX_ErrorNone;
219 OMX_TI_FACERESULT *faceResult;
220 OMX_OTHER_EXTRADATATYPE *extraData;
221 OMX_FACEDETECTIONTYPE *faceData;
222 OMX_TI_PLATFORMPRIVATE *platformPrivate;
223 camera_frame_metadata_t *faces;
224
225 LOG_FUNCTION_NAME;
226
227 if ( OMX_StateExecuting != mComponentState ) {
228 CAMHAL_LOGEA("OMX component is not in executing state");
229 return NO_INIT;
230 }
231
232 if ( NULL == pBuffHeader ) {
233 CAMHAL_LOGEA("Invalid Buffer header");
234 return-EINVAL;
235 }
236
237 platformPrivate = (OMX_TI_PLATFORMPRIVATE *) (pBuffHeader->pPlatformPrivate);
238 if ( NULL != platformPrivate ) {
239 if ( sizeof(OMX_TI_PLATFORMPRIVATE) == platformPrivate->nSize ) {
240 CAMHAL_LOGVB("Size = %d, sizeof = %d, pAuxBuf = 0x%x, pAuxBufSize= %d, pMetaDataBufer = 0x%x, nMetaDataSize = %d",
241 platformPrivate->nSize,
242 sizeof(OMX_TI_PLATFORMPRIVATE),
243 platformPrivate->pAuxBuf1,
244 platformPrivate->pAuxBufSize1,
245 platformPrivate->pMetaDataBuffer,
246 platformPrivate->nMetaDataSize);
247 } else {
248 CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE size mismatch: expected = %d, received = %d",
249 ( unsigned int ) sizeof(OMX_TI_PLATFORMPRIVATE),
250 ( unsigned int ) platformPrivate->nSize);
251 ret = -EINVAL;
252 }
253 } else {
254 CAMHAL_LOGEA("Invalid OMX_TI_PLATFORMPRIVATE");
255 return-EINVAL;
256 }
257
258
259 if ( 0 >= platformPrivate->nMetaDataSize ) {
260 CAMHAL_LOGEB("OMX_TI_PLATFORMPRIVATE nMetaDataSize is size is %d",
261 ( unsigned int ) platformPrivate->nMetaDataSize);
262 return -EINVAL;
263 }
264
265 extraData = (OMX_OTHER_EXTRADATATYPE *) (platformPrivate->pMetaDataBuffer);
266 if ( NULL != extraData ) {
267 CAMHAL_LOGVB("Size = %d, sizeof = %d, eType = 0x%x, nDataSize= %d, nPortIndex = 0x%x, nVersion = 0x%x",
268 extraData->nSize,
269 sizeof(OMX_OTHER_EXTRADATATYPE),
270 extraData->eType,
271 extraData->nDataSize,
272 extraData->nPortIndex,
273 extraData->nVersion);
274 } else {
275 CAMHAL_LOGEA("Invalid OMX_OTHER_EXTRADATATYPE");
276 return -EINVAL;
277 }
278
279 faceData = ( OMX_FACEDETECTIONTYPE * ) extraData->data;
280 if ( NULL != faceData ) {
281 if ( sizeof(OMX_FACEDETECTIONTYPE) == faceData->nSize ) {
282 CAMHAL_LOGVB("Faces detected %d",
283 faceData->ulFaceCount,
284 faceData->nSize,
285 sizeof(OMX_FACEDETECTIONTYPE),
286 faceData->eCameraView,
287 faceData->nPortIndex,
288 faceData->nVersion);
289 } else {
290 CAMHAL_LOGEB("OMX_FACEDETECTIONTYPE size mismatch: expected = %d, received = %d",
291 ( unsigned int ) sizeof(OMX_FACEDETECTIONTYPE),
292 ( unsigned int ) faceData->nSize);
293 return -EINVAL;
294 }
295 } else {
296 CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE");
297 return -EINVAL;
298 }
299
300 ret = encodeFaceCoordinates(faceData, &faces, previewWidth, previewHeight);
301
302 if ( NO_ERROR == ret ) {
303 result = new CameraFDResult(faces);
304 } else {
305 result.clear();
306 result = NULL;
307 }
308
309 LOG_FUNCTION_NAME_EXIT;
310
311 return ret;
312 }
313
encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE * faceData,camera_frame_metadata_t ** pFaces,size_t previewWidth,size_t previewHeight)314 status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
315 camera_frame_metadata_t **pFaces,
316 size_t previewWidth,
317 size_t previewHeight)
318 {
319 status_t ret = NO_ERROR;
320 camera_face_t *faces;
321 camera_frame_metadata_t *faceResult;
322 size_t hRange, vRange;
323 double tmp;
324
325 LOG_FUNCTION_NAME;
326
327 if ( NULL == faceData ) {
328 CAMHAL_LOGEA("Invalid OMX_FACEDETECTIONTYPE parameter");
329 return EINVAL;
330 }
331
332 LOG_FUNCTION_NAME
333
334 hRange = CameraFDResult::RIGHT - CameraFDResult::LEFT;
335 vRange = CameraFDResult::BOTTOM - CameraFDResult::TOP;
336
337 faceResult = ( camera_frame_metadata_t * ) malloc(sizeof(camera_frame_metadata_t));
338 if ( NULL == faceResult ) {
339 return -ENOMEM;
340 }
341
342 if ( 0 < faceData->ulFaceCount ) {
343 int orient_mult;
344 int trans_left, trans_top, trans_right, trans_bot;
345
346 faces = ( camera_face_t * ) malloc(sizeof(camera_face_t)*faceData->ulFaceCount);
347 if ( NULL == faces ) {
348 return -ENOMEM;
349 }
350
351 /**
352 / * When device is 180 degrees oriented to the sensor, need to translate
353 / * the output from Ducati to what Android expects
354 / * Ducati always gives face coordinates in this form, irrespective of
355 / * rotation, i.e (l,t) always represents the point towards the left eye
356 / * and top of hair.
357 / * (l, t)
358 / * ---------------
359 / * - ,,,,,,, -
360 / * - | | -
361 / * - |<a <a| -
362 / * - (| ^ |) -
363 / * - | -=- | -
364 / * - \_____/ -
365 / * ---------------
366 / * (r, b)
367 / *
368 / * However, Android expects the coords to be in respect with what the
369 / * sensor is viewing, i.e Android expects sensor to see this with (l,t)
370 / * and (r,b) like so:
371 / * (l, t)
372 / * ---------------
373 / * - _____ -
374 / * - / \ -
375 / * - | -=- | -
376 / * - (| ^ |) -
377 / * - |a> a>| -
378 / * - | | -
379 / * - ,,,,,,, -
380 / * ---------------
381 / * (r, b)
382 */
383
384 if (mDeviceOrientation == 180) {
385 orient_mult = -1;
386 trans_left = 2; // right is now left
387 trans_top = 3; // bottom is now top
388 trans_right = 0; // left is now right
389 trans_bot = 1; // top is not bottom
390 } else {
391 orient_mult = 1;
392 trans_left = 0; // left
393 trans_top = 1; // top
394 trans_right = 2; // right
395 trans_bot = 3; // bottom
396 }
397
398 int j = 0, i = 0;
399 for ( ; j < faceData->ulFaceCount ; j++)
400 {
401 OMX_S32 nLeft = 0;
402 OMX_S32 nTop = 0;
403 //Face filtering
404 //For real faces, it is seen that the h/w passes a score >=80
405 //For false faces, we seem to get even a score of 70 sometimes.
406 //In order to avoid any issue at application level, we filter
407 //<=70 score here.
408 if(faceData->tFacePosition[j].nScore <= FACE_DETECTION_THRESHOLD)
409 continue;
410
411 if (mDeviceOrientation == 180) {
412 // from sensor pov, the left pos is the right corner of the face in pov of frame
413 nLeft = faceData->tFacePosition[j].nLeft + faceData->tFacePosition[j].nWidth;
414 nTop = faceData->tFacePosition[j].nTop + faceData->tFacePosition[j].nHeight;
415 } else {
416 nLeft = faceData->tFacePosition[j].nLeft;
417 nTop = faceData->tFacePosition[j].nTop;
418 }
419
420 tmp = ( double ) nLeft / ( double ) previewWidth;
421 tmp *= hRange;
422 tmp -= hRange/2;
423 faces[i].rect[trans_left] = tmp;
424
425 tmp = ( double ) nTop / ( double )previewHeight;
426 tmp *= vRange;
427 tmp -= vRange/2;
428 faces[i].rect[trans_top] = tmp;
429
430 tmp = ( double ) faceData->tFacePosition[j].nWidth / ( double ) previewWidth;
431 tmp *= hRange;
432 tmp *= orient_mult;
433 faces[i].rect[trans_right] = faces[i].rect[trans_left] + tmp;
434
435 tmp = ( double ) faceData->tFacePosition[j].nHeight / ( double ) previewHeight;
436 tmp *= vRange;
437 tmp *= orient_mult;
438 faces[i].rect[trans_bot] = faces[i].rect[trans_top] + tmp;
439
440 faces[i].score = faceData->tFacePosition[j].nScore;
441 faces[i].id = 0;
442 faces[i].left_eye[0] = CameraFDResult::INVALID_DATA;
443 faces[i].left_eye[1] = CameraFDResult::INVALID_DATA;
444 faces[i].right_eye[0] = CameraFDResult::INVALID_DATA;
445 faces[i].right_eye[1] = CameraFDResult::INVALID_DATA;
446 faces[i].mouth[0] = CameraFDResult::INVALID_DATA;
447 faces[i].mouth[1] = CameraFDResult::INVALID_DATA;
448 i++;
449 }
450
451 faceResult->number_of_faces = i;
452 faceResult->faces = faces;
453
454 for (int i = 0; i < faceResult->number_of_faces; i++)
455 {
456 int centerX = (faces[i].rect[trans_left] + faces[i].rect[trans_right] ) / 2;
457 int centerY = (faces[i].rect[trans_top] + faces[i].rect[trans_bot] ) / 2;
458
459 int sizeX = (faces[i].rect[trans_right] - faces[i].rect[trans_left] ) ;
460 int sizeY = (faces[i].rect[trans_bot] - faces[i].rect[trans_top] ) ;
461
462 for (int j = 0; j < faceDetectionNumFacesLastOutput; j++)
463 {
464 int tempCenterX = (faceDetectionLastOutput[j].rect[trans_left] +
465 faceDetectionLastOutput[j].rect[trans_right] ) / 2;
466 int tempCenterY = (faceDetectionLastOutput[j].rect[trans_top] +
467 faceDetectionLastOutput[j].rect[trans_bot] ) / 2;
468 int tempSizeX = (faceDetectionLastOutput[j].rect[trans_right] -
469 faceDetectionLastOutput[j].rect[trans_left] ) ;
470 int tempSizeY = (faceDetectionLastOutput[j].rect[trans_bot] -
471 faceDetectionLastOutput[j].rect[trans_top] ) ;
472
473 if ( (abs(tempCenterX - centerX) < HorizontalFilterThreshold) &&
474 (abs(tempCenterY - centerY) < VerticalFilterThreshold) )
475 {
476 // Found Face. It did not move too far.
477 // Now check size of rectangle compare to last output
478 if ( (abs (tempSizeX -sizeX) < HorizontalFaceSizeThreshold) &&
479 (abs (tempSizeY -sizeY) < VerticalFaceSizeThreshold) )
480 {
481 // Rectangle is almost same as last time
482 // Output exactly what was done for this face last time.
483 faces[i] = faceDetectionLastOutput[j];
484 }
485 else
486 {
487 // TODO(XXX): Rectangle size changed but position is same.
488 // Possibly we can apply just positional correctness.
489 }
490 }
491 }
492 }
493
494 // Save this output for next iteration
495 for (int i = 0; i < faceResult->number_of_faces; i++)
496 {
497 faceDetectionLastOutput[i] = faces[i];
498 }
499 faceDetectionNumFacesLastOutput = faceResult->number_of_faces;
500 } else {
501 faceResult->number_of_faces = 0;
502 faceResult->faces = NULL;
503 }
504
505 *pFaces = faceResult;
506
507 LOG_FUNCTION_NAME_EXIT;
508
509 return ret;
510 }
511
512 };
513