1 /*
2 OpenCV for Android NDK
3 Copyright (c) 2006-2009 SIProp Project http://www.siprop.org/
4
5 This software is provided 'as-is', without any express or implied warranty.
6 In no event will the authors be held liable for any damages arising from the use of this software.
7 Permission is granted to anyone to use this software for any purpose,
8 including commercial applications, and to alter it and redistribute it freely,
9 subject to the following restrictions:
10
11 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required.
12 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
13 3. This notice may not be removed or altered from any source distribution.
14 */
15 #include "cvjni.h"
16 #include <time.h>
17
18
19 #define THRESHOLD 10
20 #define THRESHOLD_MAX_VALUE 255
21
22 #define CONTOUR_MAX_LEVEL 1
23 #define LINE_THICKNESS 2
24 #define LINE_TYPE 8
25
26 #define HAAR_SCALE (1.4)
27 #define IMAGE_SCALE (2)
28 #define MIN_NEIGHBORS (2)
29 #define HAAR_FLAGS_SINGLE_FACE (0 | CV_HAAR_FIND_BIGGEST_OBJECT | CV_HAAR_DO_ROUGH_SEARCH)
30 #define HAAR_FLAGS_ALL_FACES (0)
31 // Other options we dropped out:
32 // CV_HAAR_DO_CANNY_PRUNING | CV_HAAR_SCALE_IMAGE
33 #define MIN_SIZE_WIDTH (20)
34 #define MIN_SIZE_HEIGHT (20)
35 #define PAD_FACE_SIZE (10)
36 #define PAD_FACE_AREA (40)
37 #define PAD_FACE_AREA_2 (PAD_FACE_AREA * 2)
38
39 // Initialize a socket capture to grab images from a socket connection.
40 JNIEXPORT
41 jboolean
42 JNICALL
Java_org_siprop_opencv_OpenCV_createSocketCapture(JNIEnv * env,jobject thiz,jstring address_str,jstring port_str,jint width,jint height)43 Java_org_siprop_opencv_OpenCV_createSocketCapture(JNIEnv* env,
44 jobject thiz,
45 jstring address_str,
46 jstring port_str,
47 jint width,
48 jint height) {
49 const char *address_chars = env->GetStringUTFChars(address_str, 0);
50 if (address_chars == 0) {
51 LOGV("Error loading socket address.");
52 return false;
53 }
54
55 const char *port_chars = env->GetStringUTFChars(port_str, 0);
56 if (port_chars == 0) {
57 env->ReleaseStringUTFChars(address_str, address_chars);
58 LOGV("Error loading socket port.");
59 return false;
60 }
61
62 m_capture = cvCreateSocketCapture(address_chars, port_chars, width, height);
63 env->ReleaseStringUTFChars(address_str, address_chars);
64 env->ReleaseStringUTFChars(port_str, port_chars);
65 if (m_capture == 0)
66 {
67 LOGV("Error creating socket capture.");
68 return false;
69 }
70
71 return true;
72 }
73
74 JNIEXPORT
75 void
76 JNICALL
Java_org_siprop_opencv_OpenCV_releaseSocketCapture(JNIEnv * env,jobject thiz)77 Java_org_siprop_opencv_OpenCV_releaseSocketCapture(JNIEnv* env,
78 jobject thiz) {
79 if (m_capture) {
80 cvReleaseCapture(&m_capture);
81 m_capture = 0;
82 }
83 }
84
85 JNIEXPORT
86 jboolean
87 JNICALL
Java_org_siprop_opencv_OpenCV_grabSourceImageFromCapture(JNIEnv * env,jobject thiz)88 Java_org_siprop_opencv_OpenCV_grabSourceImageFromCapture(JNIEnv* env,
89 jobject thiz) {
90 if (m_capture == 0)
91 {
92 LOGE("Capture was never initialized.");
93 return false;
94 }
95
96 if (cvGrabFrame(m_capture) == 0)
97 {
98 LOGE("Failed to grab frame from the capture.");
99 return false;
100 }
101
102 IplImage *frame = cvRetrieveFrame(m_capture);
103 if (frame == 0)
104 {
105 LOGE("Failed to retrieve frame from the capture.");
106 return false;
107 }
108
109 if (m_sourceImage) {
110 cvReleaseImage(&m_sourceImage);
111 m_sourceImage = 0;
112 }
113
114 m_sourceImage = cvCreateImage(cvGetSize(frame), IPL_DEPTH_8U,
115 frame->nChannels);
116
117 // Check the origin of image. If top left, copy the image frame to frame_copy.
118 // Else flip and copy the image.
119 if (frame->origin == IPL_ORIGIN_TL) {
120 cvCopy(frame, m_sourceImage, 0);
121 }
122 else {
123 cvFlip(frame, m_sourceImage, 0);
124 }
125
126 return true;
127 }
128
129 // Generate and return a boolean array from the source image.
130 // Return 0 if a failure occurs or if the source image is undefined.
131 JNIEXPORT
132 jbooleanArray
133 JNICALL
Java_org_siprop_opencv_OpenCV_getSourceImage(JNIEnv * env,jobject thiz)134 Java_org_siprop_opencv_OpenCV_getSourceImage(JNIEnv* env,
135 jobject thiz)
136 {
137 if (m_sourceImage == 0) {
138 LOGE("Error source image was not set.");
139 return 0;
140 }
141
142 CvMat stub;
143 CvMat *mat_image = cvGetMat(m_sourceImage, &stub);
144 int channels = CV_MAT_CN( mat_image->type );
145 int ipl_depth = cvCvToIplDepth(mat_image->type);
146
147 WLNonFileByteStream *strm = new WLNonFileByteStream();
148 loadImageBytes(mat_image->data.ptr, mat_image->step, mat_image->width,
149 mat_image->height, ipl_depth, channels, strm);
150
151 int imageSize = strm->GetSize();
152 jbooleanArray res_array = env->NewBooleanArray(imageSize);
153 if (res_array == 0) {
154 LOGE("Unable to allocate a new boolean array for the source image.");
155 return 0;
156 }
157 env->SetBooleanArrayRegion(res_array, 0, imageSize, (jboolean*)strm->GetByte());
158
159 strm->Close();
160 SAFE_DELETE(strm);
161
162 return res_array;
163 }
164
165 // Given an integer array of image data, load an IplImage.
166 // It is the responsibility of the caller to release the IplImage.
getIplImageFromIntArray(JNIEnv * env,jintArray array_data,jint width,jint height)167 IplImage* getIplImageFromIntArray(JNIEnv* env, jintArray array_data,
168 jint width, jint height) {
169 // Load Image
170 int *pixels = env->GetIntArrayElements(array_data, 0);
171 if (pixels == 0) {
172 LOGE("Error getting int array of pixels.");
173 return 0;
174 }
175
176 IplImage *image = loadPixels(pixels, width, height);
177 env->ReleaseIntArrayElements(array_data, pixels, 0);
178 if(image == 0) {
179 LOGE("Error loading pixel array.");
180 return 0;
181 }
182
183 return image;
184 }
185
186 // Set the source image and return true if successful or false otherwise.
187 JNIEXPORT
188 jboolean
189 JNICALL
Java_org_siprop_opencv_OpenCV_setSourceImage(JNIEnv * env,jobject thiz,jintArray photo_data,jint width,jint height)190 Java_org_siprop_opencv_OpenCV_setSourceImage(JNIEnv* env,
191 jobject thiz,
192 jintArray photo_data,
193 jint width,
194 jint height)
195 {
196 // Release the image if it hasn't already been released.
197 if (m_sourceImage) {
198 cvReleaseImage(&m_sourceImage);
199 m_sourceImage = 0;
200 }
201 m_facesFound = 0;
202
203 m_sourceImage = getIplImageFromIntArray(env, photo_data, width, height);
204 if (m_sourceImage == 0) {
205 LOGE("Error source image could not be created.");
206 return false;
207 }
208
209 return true;
210 }
211
212 JNIEXPORT
213 jbooleanArray
214 JNICALL
Java_org_siprop_opencv_OpenCV_findContours(JNIEnv * env,jobject thiz,jint width,jint height)215 Java_org_siprop_opencv_OpenCV_findContours(JNIEnv* env,
216 jobject thiz,
217 jint width,
218 jint height) {
219 IplImage *grayImage = cvCreateImage( cvGetSize(m_sourceImage), IPL_DEPTH_8U, 1 ); // �O���[�X�P�[���摜�pIplImage
220 IplImage *binaryImage = cvCreateImage( cvGetSize(m_sourceImage), IPL_DEPTH_8U, 1 ); // 2�l�摜�pIplImage
221 IplImage *contourImage = cvCreateImage( cvGetSize(m_sourceImage), IPL_DEPTH_8U, 3 ); // �֊s�摜�pIplImage
222
223 // BGR����O���[�X�P�[���ɕϊ�����
224 cvCvtColor( m_sourceImage, grayImage, CV_BGR2GRAY );
225
226 // �O���[�X�P�[������2�l�ɕϊ�����
227 cvThreshold( grayImage, binaryImage, THRESHOLD, THRESHOLD_MAX_VALUE, CV_THRESH_BINARY );
228
229 // �֊s���o�p�̃��������m�ۂ���
230 CvMemStorage* storage = cvCreateMemStorage( 0 ); // ���o���ꂽ�֊s��ۑ�����̈�
231 CvSeq* find_contour = 0; // �֊s�ւ̃|�C���^
232
233 // 2�l�摜���̗֊s�������A���̐���Ԃ�
234 int find_contour_num = cvFindContours(
235 binaryImage, // ���͉摜(�W�r�b�g�V���O���`�����l���j
236 storage, // ���o���ꂽ�֊s��ۑ�����̈�
237 &find_contour, // ��ԊO���̗֊s�ւ̃|�C���^�ւ̃|�C���^
238 sizeof( CvContour ), // �V�[�P���X�w�b�_�̃T�C�Y
239 CV_RETR_LIST, // ���o���[�h
240 CV_CHAIN_APPROX_NONE, // �����@
241 cvPoint( 0, 0 ) // �I�t�Z�b�g
242 );
243
244 // ���̗̂֊s��ԐF�ŕ`�悷��
245 CvScalar red = CV_RGB( 255, 0, 0 );
246 cvDrawContours(
247 m_sourceImage, // �֊s��`�悷��摜
248 find_contour, // �ŏ��̗֊s�ւ̃|�C���^
249 red, // �O���֊s���̐F
250 red, // �����֊s���i���j�̐F
251 CONTOUR_MAX_LEVEL, // �`�悳���֊s�̍ő僌�x��
252 LINE_THICKNESS, // �`�悳���֊s���̑���
253 LINE_TYPE, // ���̎��
254 cvPoint( 0, 0 ) // �I�t�Z�b�g
255 );
256
257 int imageSize;
258 CvMat stub, *mat_image;
259 int channels, ipl_depth;
260 mat_image = cvGetMat( m_sourceImage, &stub );
261 channels = CV_MAT_CN( mat_image->type );
262
263 ipl_depth = cvCvToIplDepth(mat_image->type);
264
265 LOGV("Load loadImageBytes.");
266 WLNonFileByteStream* strm = new WLNonFileByteStream();
267 loadImageBytes(mat_image->data.ptr, mat_image->step, mat_image->width,
268 mat_image->height, ipl_depth, channels, strm);
269
270 imageSize = strm->GetSize();
271 jbooleanArray res_array = env->NewBooleanArray(imageSize);
272 LOGV("Load NewBooleanArray.");
273 if (res_array == 0) {
274 return 0;
275 }
276 env->SetBooleanArrayRegion(res_array, 0, imageSize, (jboolean*)strm->GetByte());
277 LOGV("Load SetBooleanArrayRegion.");
278
279 LOGV("Release sourceImage");
280 if (m_sourceImage) {
281 cvReleaseImage(&m_sourceImage);
282 m_sourceImage = 0;
283 }
284 LOGV("Release binaryImage");
285 cvReleaseImage( &binaryImage );
286 LOGV("Release grayImage");
287 cvReleaseImage( &grayImage );
288 LOGV("Release contourImage");
289 cvReleaseImage( &contourImage );
290 LOGV("Release storage");
291 cvReleaseMemStorage( &storage );
292 LOGV("Delete strm");
293 strm->Close();
294 SAFE_DELETE(strm);
295
296 return res_array;
297 }
298
299 JNIEXPORT
300 jboolean
301 JNICALL
Java_org_siprop_opencv_OpenCV_initFaceDetection(JNIEnv * env,jobject thiz,jstring cascade_path_str)302 Java_org_siprop_opencv_OpenCV_initFaceDetection(JNIEnv* env,
303 jobject thiz,
304 jstring cascade_path_str) {
305
306 // First call release to ensure the memory is empty.
307 Java_org_siprop_opencv_OpenCV_releaseFaceDetection(env, thiz);
308
309 char buffer[100];
310 clock_t total_time_start = clock();
311
312 m_smallestFaceSize.width = MIN_SIZE_WIDTH;
313 m_smallestFaceSize.height = MIN_SIZE_HEIGHT;
314
315 const char *cascade_path_chars = env->GetStringUTFChars(cascade_path_str, 0);
316 if (cascade_path_chars == 0) {
317 LOGE("Error getting cascade string.");
318 return false;
319 }
320
321 m_cascade = (CvHaarClassifierCascade*)cvLoad(cascade_path_chars);
322 env->ReleaseStringUTFChars(cascade_path_str, cascade_path_chars);
323 if (m_cascade == 0) {
324 LOGE("Error loading cascade.");
325 return false;
326 }
327
328 m_storage = cvCreateMemStorage(0);
329
330 clock_t total_time_finish = clock() - total_time_start;
331 sprintf(buffer, "Total Time to init: %f", (double)total_time_finish / (double)CLOCKS_PER_SEC);
332 LOGV(buffer);
333
334 return true;
335 }
336
337 // Release all of the memory used by face tracking.
338 JNIEXPORT
339 void
340 JNICALL
Java_org_siprop_opencv_OpenCV_releaseFaceDetection(JNIEnv * env,jobject thiz)341 Java_org_siprop_opencv_OpenCV_releaseFaceDetection(JNIEnv* env,
342 jobject thiz) {
343
344 m_facesFound = 0;
345 m_faceCropArea.width = m_faceCropArea.height = 0;
346
347 if (m_cascade) {
348 cvReleaseHaarClassifierCascade(&m_cascade);
349 m_cascade = 0;
350 }
351
352 if (m_sourceImage) {
353 cvReleaseImage(&m_sourceImage);
354 m_sourceImage = 0;
355 }
356
357 if (m_grayImage) {
358 cvReleaseImage(&m_grayImage);
359 m_grayImage = 0;
360 }
361
362 if (m_smallImage) {
363 cvReleaseImage(&m_smallImage);
364 m_smallImage = 0;
365 }
366
367 if (m_storage) {
368 cvReleaseMemStorage(&m_storage);
369 m_storage = 0;
370 }
371 }
372
373 // Initalize the small image and the gray image using the input source image.
374 // If a previous face was specified, we will limit the ROI to that face.
initFaceDetectionImages(IplImage * sourceImage,double scale=1.0)375 void initFaceDetectionImages(IplImage *sourceImage, double scale = 1.0) {
376 if (m_grayImage == 0) {
377 m_grayImage = cvCreateImage(cvGetSize(sourceImage), IPL_DEPTH_8U, 1);
378 }
379
380 if (m_smallImage == 0) {
381 m_smallImage = cvCreateImage(cvSize(cvRound(sourceImage->width / scale),
382 cvRound(sourceImage->height / scale)), IPL_DEPTH_8U, 1);
383 }
384
385 if(m_faceCropArea.width > 0 && m_faceCropArea.height > 0) {
386 cvSetImageROI(m_smallImage, m_faceCropArea);
387
388 CvRect tPrev = cvRect(m_faceCropArea.x * scale, m_faceCropArea.y * scale,
389 m_faceCropArea.width * scale, m_faceCropArea.height * scale);
390 cvSetImageROI(sourceImage, tPrev);
391 cvSetImageROI(m_grayImage, tPrev);
392 } else {
393 cvResetImageROI(m_smallImage);
394 cvResetImageROI(m_grayImage);
395 }
396
397 cvCvtColor(sourceImage, m_grayImage, CV_BGR2GRAY);
398 cvResize(m_grayImage, m_smallImage, CV_INTER_LINEAR);
399 cvEqualizeHist(m_smallImage, m_smallImage);
400 cvClearMemStorage(m_storage);
401
402 cvResetImageROI(sourceImage);
403 }
404
405 // Given a sequence of rectangles, return an array of Android Rect objects
406 // or null if any errors occur.
seqRectsToAndroidRects(JNIEnv * env,CvSeq * rects)407 jobjectArray seqRectsToAndroidRects(JNIEnv* env, CvSeq *rects) {
408 if (rects == 0 || rects->total <= 0) {
409 LOGE("No rectangles were specified!");
410 return 0;
411 }
412
413 jclass jcls = env->FindClass("android/graphics/Rect");
414 if (jcls == 0) {
415 LOGE("Unable to find class android.graphics.Rect");
416 return 0;
417 }
418
419 jmethodID jconstruct = env->GetMethodID(jcls, "<init>", "(IIII)V");
420 if (jconstruct == 0) {
421 LOGE("Unable to find constructor Rect(int, int, int, int)");
422 return 0;
423 }
424
425 jobjectArray ary = env->NewObjectArray(rects->total, jcls, 0);
426 if (ary == 0) {
427 LOGE("Unable to create Rect array");
428 return 0;
429 }
430
431 for (int i = 0; i < rects->total; i++) {
432 char buffer[100];
433 CvRect *rect = (CvRect*)cvGetSeqElem(rects, i);
434 if (rect == 0) {
435 sprintf(buffer, "Invalid Rectangle #%d", i);
436 LOGE(buffer);
437 return 0;
438 }
439
440 jobject jrect = env->NewObject(jcls, jconstruct, rect->x, rect->y,
441 rect->width, rect->height);
442 if (jrect == 0) {
443 sprintf(buffer, "Unable to create Android Rect object for rectangle #%d", i);
444 LOGE(buffer);
445 return 0;
446 }
447
448 env->SetObjectArrayElement(ary, i, jrect);
449 env->DeleteLocalRef(jrect);
450 }
451
452 return ary;
453 }
454
455 // Identify all of the faces in the source image and return an array
456 // of Android Rect objects with the face coordinates. If any errors
457 // occur, a 0 array will be returned.
458 JNIEXPORT
459 jobjectArray
460 JNICALL
Java_org_siprop_opencv_OpenCV_findAllFaces(JNIEnv * env,jobject thiz)461 Java_org_siprop_opencv_OpenCV_findAllFaces(JNIEnv* env,
462 jobject thiz) {
463 char buffer[100];
464 clock_t total_time_start = clock();
465
466 if (m_cascade == 0 || m_storage == 0) {
467 LOGE("Error find faces was not initialized.");
468 return 0;
469 }
470
471 if (m_sourceImage == 0) {
472 LOGE("Error source image was not set.");
473 return 0;
474 }
475
476 initFaceDetectionImages(m_sourceImage, IMAGE_SCALE);
477
478 clock_t haar_detect_time_start = clock();
479 m_facesFound = mycvHaarDetectObjects(m_smallImage, m_cascade, m_storage, HAAR_SCALE,
480 MIN_NEIGHBORS, HAAR_FLAGS_ALL_FACES, cvSize(MIN_SIZE_WIDTH, MIN_SIZE_HEIGHT));
481
482 clock_t haar_detect_time_finish = clock() - haar_detect_time_start;
483 sprintf(buffer, "Total Time to cvHaarDetectObjects in findAllFaces: %f", (double)haar_detect_time_finish / (double)CLOCKS_PER_SEC);
484 LOGV(buffer);
485
486 jobjectArray faceRects = 0;
487 if (m_facesFound == 0 || m_facesFound->total <= 0) {
488 LOGV("FACES_DETECTED 0");
489 } else {
490 sprintf(buffer, "FACES_DETECTED %d", m_facesFound->total);
491 LOGV(buffer);
492 m_faceCropArea.width = m_faceCropArea.height = 0;
493 faceRects = seqRectsToAndroidRects(env, m_facesFound);
494 }
495
496 clock_t total_time_finish = clock() - total_time_start;
497 sprintf(buffer, "Total Time to findAllFaces: %f", (double)total_time_finish / (double)CLOCKS_PER_SEC);
498 LOGV(buffer);
499
500 return faceRects;
501 }
502
503 // Store the previous face found in the scene.
storePreviousFace(CvRect * face)504 void storePreviousFace(CvRect* face) {
505 char buffer[100];
506 if (m_faceCropArea.width > 0 && m_faceCropArea.height > 0) {
507 face->x += m_faceCropArea.x;
508 face->y += m_faceCropArea.y;
509 sprintf(buffer, "Face rect + m_faceCropArea: (%d, %d) to (%d, %d)", face->x, face->y,
510 face->x + face->width, face->y + face->height);
511 LOGV(buffer);
512 }
513
514 int startX = MAX(face->x - PAD_FACE_AREA, 0);
515 int startY = MAX(face->y - PAD_FACE_AREA, 0);
516 int w = m_smallImage->width - startX - face->width - PAD_FACE_AREA_2;
517 int h = m_smallImage->height - startY - face->height - PAD_FACE_AREA_2;
518 int sw = face->x - PAD_FACE_AREA, sh = face->y - PAD_FACE_AREA;
519 m_faceCropArea = cvRect(startX, startY,
520 face->width + PAD_FACE_AREA_2 + ((w < 0) ? w : 0) + ((sw < 0) ? sw : 0),
521 face->height + PAD_FACE_AREA_2 + ((h < 0) ? h : 0) + ((sh < 0) ? sh : 0));
522 sprintf(buffer, "m_faceCropArea: (%d, %d) to (%d, %d)", m_faceCropArea.x, m_faceCropArea.y,
523 m_faceCropArea.x + m_faceCropArea.width, m_faceCropArea.y + m_faceCropArea.height);
524 LOGV(buffer);
525 }
526
527 // Given a rectangle, return an Android Rect object or null if any
528 // errors occur.
rectToAndroidRect(JNIEnv * env,CvRect * rect)529 jobject rectToAndroidRect(JNIEnv* env, CvRect *rect) {
530 if (rect == 0) {
531 LOGE("No rectangle was specified!");
532 return 0;
533 }
534
535 jclass jcls = env->FindClass("android/graphics/Rect");
536 if (jcls == 0) {
537 LOGE("Unable to find class android.graphics.Rect");
538 return 0;
539 }
540
541 jmethodID jconstruct = env->GetMethodID(jcls, "<init>", "(IIII)V");
542 if (jconstruct == 0) {
543 LOGE("Unable to find constructor Rect(int, int, int, int)");
544 return 0;
545 }
546
547 return env->NewObject(jcls, jconstruct, rect->x, rect->y,
548 rect->width, rect->height);
549 }
550
551 // Identify a single face in the source image and return an Android
552 // Android Rect object with the face coordinates. This method is
553 // optimized by focusing on a single face and cropping the detection
554 // region to the area where the face is located plus some additional
555 // padding to account for slight head movements. If any errors occur,
556 // a 0 array will be returned.
557 JNIEXPORT
558 jobject
559 JNICALL
Java_org_siprop_opencv_OpenCV_findSingleFace(JNIEnv * env,jobject thiz)560 Java_org_siprop_opencv_OpenCV_findSingleFace(JNIEnv* env,
561 jobject thiz) {
562 char buffer[100];
563 clock_t total_time_start = clock();
564
565 if (m_cascade == 0 || m_storage == 0) {
566 LOGE("Error find faces was not initialized.");
567 return 0;
568 }
569
570 if (m_sourceImage == 0) {
571 LOGE("Error source image was not set.");
572 return 0;
573 }
574
575 initFaceDetectionImages(m_sourceImage, IMAGE_SCALE);
576
577 clock_t haar_detect_time_start = clock();
578 m_facesFound = mycvHaarDetectObjects(m_smallImage, m_cascade, m_storage, HAAR_SCALE,
579 MIN_NEIGHBORS, HAAR_FLAGS_SINGLE_FACE, m_smallestFaceSize);
580
581 clock_t haar_detect_time_finish = clock() - haar_detect_time_start;
582 sprintf(buffer, "Total Time to cvHaarDetectObjects in findSingleFace: %f", (double)haar_detect_time_finish / (double)CLOCKS_PER_SEC);
583 LOGV(buffer);
584
585 jobject faceRect = 0;
586 if (m_facesFound == 0 || m_facesFound->total <= 0) {
587 LOGV("FACES_DETECTED 0");
588 m_faceCropArea.width = m_faceCropArea.height = 0;
589 m_smallestFaceSize.width = MIN_SIZE_WIDTH;
590 m_smallestFaceSize.height = MIN_SIZE_HEIGHT;
591 } else {
592 LOGV("FACES_DETECTED 1");
593 CvRect *face = (CvRect*)cvGetSeqElem(m_facesFound, 0);
594 if (face == 0) {
595 LOGE("Invalid rectangle detected");
596 return 0;
597 }
598 m_smallestFaceSize.width = MAX(face->width - PAD_FACE_SIZE, MIN_SIZE_WIDTH);
599 m_smallestFaceSize.height = MAX(face->height - PAD_FACE_SIZE, MIN_SIZE_HEIGHT);
600 faceRect = rectToAndroidRect(env, face);
601 storePreviousFace(face);
602 }
603
604 clock_t total_time_finish = clock() - total_time_start;
605 sprintf(buffer, "Total Time to findSingleFace: %f", (double)total_time_finish / (double)CLOCKS_PER_SEC);
606 LOGV(buffer);
607
608 return faceRect;
609 }
610
611 // Draw a rectangle on the source image around the specified face rectangle.
612 // Scale the face area to the draw area based on the specified scale.
highlightFace(IplImage * sourceImage,CvRect * face,double scale=1.0)613 void highlightFace(IplImage *sourceImage, CvRect *face, double scale = 1.0) {
614 char buffer[100];
615 sprintf(buffer, "Face Rectangle: (x: %d, y: %d) to (w: %d, h: %d)",
616 face->x, face->y, face->width, face->height);
617 LOGV(buffer);
618 CvPoint pt1 = cvPoint(int(face->x * scale), int(face->y * scale));
619 CvPoint pt2 = cvPoint(int((face->x + face->width) * scale),
620 int((face->y + face->height) * scale));
621 sprintf(buffer, "Draw Rectangle: (%d, %d) to (%d, %d)", pt1.x, pt1.y, pt2.x, pt2.y);
622 LOGV(buffer);
623 cvRectangle(sourceImage, pt1, pt2, CV_RGB(255, 0, 0), 3, 8, 0);
624 }
625
626 // Draw rectangles on the source image around each face that was found.
627 // Scale the face area to the draw area based on the specified scale.
628 // Return true if at least one face was highlighted and false otherwise.
highlightFaces(IplImage * sourceImage,CvSeq * faces,double scale=1.0)629 bool highlightFaces(IplImage *sourceImage, CvSeq *faces, double scale = 1.0) {
630 if (faces == 0 || faces->total <= 0) {
631 LOGV("No faces were highlighted!");
632 return false;
633 } else {
634 LOGV("Drawing rectangles on each face");
635 int count;
636 CvRect* face;
637 for (int i = 0; i < faces->total; i++) {
638 face = (CvRect*)cvGetSeqElem(faces, i);
639 highlightFace(sourceImage, face, scale);
640 }
641 }
642
643 return true;
644 }
645
646 // Highlight the faces that were detected in the source image.
647 // Return true if one or more faces is highlighted or false otherwise.
648 JNIEXPORT
649 jboolean
650 JNICALL
Java_org_siprop_opencv_OpenCV_highlightFaces(JNIEnv * env,jobject thiz)651 Java_org_siprop_opencv_OpenCV_highlightFaces(JNIEnv* env,
652 jobject thiz) {
653 if (m_facesFound == 0 || m_facesFound->total <= 0) {
654 LOGV("No faces found to highlight!");
655 return false;
656 } else {
657 highlightFaces(m_sourceImage, m_facesFound, IMAGE_SCALE);
658 }
659
660 return true;
661 }
662
663 #if 0
664
665 JNIEXPORT
666 jbooleanArray
667 JNICALL
668 Java_org_siprop_opencv_OpenCV_faceDetect(JNIEnv* env,
669 jobject thiz,
670 jintArray photo_data1,
671 jintArray photo_data2,
672 jint width,
673 jint height) {
674 LOGV("Load desp.");
675
676 int i, x, y;
677 int* pixels;
678 IplImage *frameImage;
679
680 IplImage *backgroundImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
681 IplImage *grayImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
682 IplImage *differenceImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
683
684 IplImage *hsvImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 3 );
685 IplImage *hueImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
686 IplImage *saturationImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
687 IplImage *valueImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
688 IplImage *thresholdImage1 = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
689 IplImage *thresholdImage2 = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
690 IplImage *thresholdImage3 = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
691 IplImage *faceImage = cvCreateImage( cvSize(width, height), IPL_DEPTH_8U, 1 );
692
693 CvMoments moment;
694 double m_00;
695 double m_10;
696 double m_01;
697 int gravityX;
698 int gravityY;
699
700 jbooleanArray res_array;
701 int imageSize;
702
703
704
705 // Load Image
706 pixels = env->GetIntArrayElements(photo_data1, 0);
707 frameImage = loadPixels(pixels, width, height);
708 if(frameImage == 0) {
709 LOGV("Error loadPixels.");
710 return 0;
711 }
712
713
714 cvCvtColor( frameImage, backgroundImage, CV_BGR2GRAY );
715
716
717 pixels = env->GetIntArrayElements(photo_data2, 0);
718 frameImage = loadPixels(pixels, width, height);
719 if(frameImage == 0) {
720 LOGV("Error loadPixels.");
721 return 0;
722 }
723 cvCvtColor( frameImage, grayImage, CV_BGR2GRAY );
724 cvAbsDiff( grayImage, backgroundImage, differenceImage );
725
726 cvCvtColor( frameImage, hsvImage, CV_BGR2HSV );
727 LOGV("Load cvCvtColor.");
728 cvSplit( hsvImage, hueImage, saturationImage, valueImage, 0 );
729 LOGV("Load cvSplit.");
730 cvThreshold( hueImage, thresholdImage1, THRESH_BOTTOM, THRESHOLD_MAX_VALUE, CV_THRESH_BINARY );
731 cvThreshold( hueImage, thresholdImage2, THRESH_TOP, THRESHOLD_MAX_VALUE, CV_THRESH_BINARY_INV );
732 cvAnd( thresholdImage1, thresholdImage2, thresholdImage3, 0 );
733 LOGV("Load cvAnd.");
734
735 cvAnd( differenceImage, thresholdImage3, faceImage, 0 );
736
737 cvMoments( faceImage, &moment, 0 );
738 m_00 = cvGetSpatialMoment( &moment, 0, 0 );
739 m_10 = cvGetSpatialMoment( &moment, 1, 0 );
740 m_01 = cvGetSpatialMoment( &moment, 0, 1 );
741 gravityX = m_10 / m_00;
742 gravityY = m_01 / m_00;
743 LOGV("Load cvMoments.");
744
745
746 cvCircle( frameImage, cvPoint( gravityX, gravityY ), CIRCLE_RADIUS,
747 CV_RGB( 255, 0, 0 ), LINE_THICKNESS, LINE_TYPE, 0 );
748
749
750
751
752 CvMat stub, *mat_image;
753 int channels, ipl_depth;
754 mat_image = cvGetMat( frameImage, &stub );
755 channels = CV_MAT_CN( mat_image->type );
756
757 ipl_depth = cvCvToIplDepth(mat_image->type);
758
759 WLNonFileByteStream* m_strm = new WLNonFileByteStream();
760 loadImageBytes(mat_image->data.ptr, mat_image->step, mat_image->width,
761 mat_image->height, ipl_depth, channels, m_strm);
762 LOGV("Load loadImageBytes.");
763
764
765 imageSize = m_strm->GetSize();
766 res_array = env->NewBooleanArray(imageSize);
767 LOGV("Load NewByteArray.");
768 if (res_array == 0) {
769 return 0;
770 }
771 env->SetBooleanArrayRegion(res_array, 0, imageSize, (jboolean*)m_strm->GetByte());
772 LOGV("Load SetBooleanArrayRegion.");
773
774
775
776
777 cvReleaseImage( &backgroundImage );
778 cvReleaseImage( &grayImage );
779 cvReleaseImage( &differenceImage );
780 cvReleaseImage( &hsvImage );
781 cvReleaseImage( &hueImage );
782 cvReleaseImage( &saturationImage );
783 cvReleaseImage( &valueImage );
784 cvReleaseImage( &thresholdImage1 );
785 cvReleaseImage( &thresholdImage2 );
786 cvReleaseImage( &thresholdImage3 );
787 cvReleaseImage( &faceImage );
788 cvReleaseImage( &frameImage );
789 m_strm->Close();
790 SAFE_DELETE(m_strm);
791
792 return res_array;
793
794 }
795 #endif
796
797