1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 // By downloading, copying, installing or using the software you agree to this license.
6 // If you do not agree to this license, do not download, install,
7 // copy or use the software.
8 //
9 //
10 // Intel License Agreement
11 // For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000, Intel Corporation, all rights reserved.
14 // Third party copyrights are property of their respective owners.
15 //
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
18 //
19 // * Redistribution's of source code must retain the above copyright notice,
20 // this list of conditions and the following disclaimer.
21 //
22 // * Redistribution's in binary form must reproduce the above copyright notice,
23 // this list of conditions and the following disclaimer in the documentation
24 // and/or other materials provided with the distribution.
25 //
26 // * The name of Intel Corporation may not be used to endorse or promote products
27 // derived from this software without specific prior written permission.
28 //
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
39 //
40 //M*/
41
42 /* Haar features calculation */
43
44 #include "_cv.h"
45 #include <stdio.h>
46
47 /* these settings affect the quality of detection: change with care */
48 #define CV_ADJUST_FEATURES 1
49 #define CV_ADJUST_WEIGHTS 0
50
51 typedef int sumtype;
52 typedef double sqsumtype;
53
54 typedef struct CvHidHaarFeature
55 {
56 struct
57 {
58 sumtype *p0, *p1, *p2, *p3;
59 float weight;
60 }
61 rect[CV_HAAR_FEATURE_MAX];
62 }
63 CvHidHaarFeature;
64
65
66 typedef struct CvHidHaarTreeNode
67 {
68 CvHidHaarFeature feature;
69 float threshold;
70 int left;
71 int right;
72 }
73 CvHidHaarTreeNode;
74
75
76 typedef struct CvHidHaarClassifier
77 {
78 int count;
79 //CvHaarFeature* orig_feature;
80 CvHidHaarTreeNode* node;
81 float* alpha;
82 }
83 CvHidHaarClassifier;
84
85
86 typedef struct CvHidHaarStageClassifier
87 {
88 int count;
89 float threshold;
90 CvHidHaarClassifier* classifier;
91 int two_rects;
92
93 struct CvHidHaarStageClassifier* next;
94 struct CvHidHaarStageClassifier* child;
95 struct CvHidHaarStageClassifier* parent;
96 }
97 CvHidHaarStageClassifier;
98
99
100 struct CvHidHaarClassifierCascade
101 {
102 int count;
103 int is_stump_based;
104 int has_tilted_features;
105 int is_tree;
106 double inv_window_area;
107 CvMat sum, sqsum, tilted;
108 CvHidHaarStageClassifier* stage_classifier;
109 sqsumtype *pq0, *pq1, *pq2, *pq3;
110 sumtype *p0, *p1, *p2, *p3;
111
112 void** ipp_stages;
113 };
114
115
116 /* IPP functions for object detection */
117 icvHaarClassifierInitAlloc_32f_t icvHaarClassifierInitAlloc_32f_p = 0;
118 icvHaarClassifierFree_32f_t icvHaarClassifierFree_32f_p = 0;
119 icvApplyHaarClassifier_32f_C1R_t icvApplyHaarClassifier_32f_C1R_p = 0;
120 icvRectStdDev_32f_C1R_t icvRectStdDev_32f_C1R_p = 0;
121
122 const int icv_object_win_border = 1;
123 const float icv_stage_threshold_bias = 0.0001f;
124
125 static CvHaarClassifierCascade*
icvCreateHaarClassifierCascade(int stage_count)126 icvCreateHaarClassifierCascade( int stage_count )
127 {
128 CvHaarClassifierCascade* cascade = 0;
129
130 CV_FUNCNAME( "icvCreateHaarClassifierCascade" );
131
132 __BEGIN__;
133
134 int block_size = sizeof(*cascade) + stage_count*sizeof(*cascade->stage_classifier);
135
136 if( stage_count <= 0 )
137 CV_ERROR( CV_StsOutOfRange, "Number of stages should be positive" );
138
139 CV_CALL( cascade = (CvHaarClassifierCascade*)cvAlloc( block_size ));
140 memset( cascade, 0, block_size );
141
142 cascade->stage_classifier = (CvHaarStageClassifier*)(cascade + 1);
143 cascade->flags = CV_HAAR_MAGIC_VAL;
144 cascade->count = stage_count;
145
146 __END__;
147
148 return cascade;
149 }
150
151 static void
icvReleaseHidHaarClassifierCascade(CvHidHaarClassifierCascade ** _cascade)152 icvReleaseHidHaarClassifierCascade( CvHidHaarClassifierCascade** _cascade )
153 {
154 if( _cascade && *_cascade )
155 {
156 CvHidHaarClassifierCascade* cascade = *_cascade;
157 if( cascade->ipp_stages && icvHaarClassifierFree_32f_p )
158 {
159 int i;
160 for( i = 0; i < cascade->count; i++ )
161 {
162 if( cascade->ipp_stages[i] )
163 icvHaarClassifierFree_32f_p( cascade->ipp_stages[i] );
164 }
165 }
166 cvFree( &cascade->ipp_stages );
167 cvFree( _cascade );
168 }
169 }
170
171 /* create more efficient internal representation of haar classifier cascade */
172 static CvHidHaarClassifierCascade*
icvCreateHidHaarClassifierCascade(CvHaarClassifierCascade * cascade)173 icvCreateHidHaarClassifierCascade( CvHaarClassifierCascade* cascade )
174 {
175 CvRect* ipp_features = 0;
176 float *ipp_weights = 0, *ipp_thresholds = 0, *ipp_val1 = 0, *ipp_val2 = 0;
177 int* ipp_counts = 0;
178
179 CvHidHaarClassifierCascade* out = 0;
180
181 CV_FUNCNAME( "icvCreateHidHaarClassifierCascade" );
182
183 __BEGIN__;
184
185 int i, j, k, l;
186 int datasize;
187 int total_classifiers = 0;
188 int total_nodes = 0;
189 char errorstr[100];
190 CvHidHaarClassifier* haar_classifier_ptr;
191 CvHidHaarTreeNode* haar_node_ptr;
192 CvSize orig_window_size;
193 int has_tilted_features = 0;
194 int max_count = 0;
195
196 if( !CV_IS_HAAR_CLASSIFIER(cascade) )
197 CV_ERROR( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
198
199 if( cascade->hid_cascade )
200 CV_ERROR( CV_StsError, "hid_cascade has been already created" );
201
202 if( !cascade->stage_classifier )
203 CV_ERROR( CV_StsNullPtr, "" );
204
205 if( cascade->count <= 0 )
206 CV_ERROR( CV_StsOutOfRange, "Negative number of cascade stages" );
207
208 orig_window_size = cascade->orig_window_size;
209
210 /* check input structure correctness and calculate total memory size needed for
211 internal representation of the classifier cascade */
212 for( i = 0; i < cascade->count; i++ )
213 {
214 CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
215
216 if( !stage_classifier->classifier ||
217 stage_classifier->count <= 0 )
218 {
219 sprintf( errorstr, "header of the stage classifier #%d is invalid "
220 "(has null pointers or non-positive classfier count)", i );
221 CV_ERROR( CV_StsError, errorstr );
222 }
223
224 max_count = MAX( max_count, stage_classifier->count );
225 total_classifiers += stage_classifier->count;
226
227 for( j = 0; j < stage_classifier->count; j++ )
228 {
229 CvHaarClassifier* classifier = stage_classifier->classifier + j;
230
231 total_nodes += classifier->count;
232 for( l = 0; l < classifier->count; l++ )
233 {
234 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
235 {
236 if( classifier->haar_feature[l].rect[k].r.width )
237 {
238 CvRect r = classifier->haar_feature[l].rect[k].r;
239 int tilted = classifier->haar_feature[l].tilted;
240 has_tilted_features |= tilted != 0;
241 if( r.width < 0 || r.height < 0 || r.y < 0 ||
242 r.x + r.width > orig_window_size.width
243 ||
244 (!tilted &&
245 (r.x < 0 || r.y + r.height > orig_window_size.height))
246 ||
247 (tilted && (r.x - r.height < 0 ||
248 r.y + r.width + r.height > orig_window_size.height)))
249 {
250 sprintf( errorstr, "rectangle #%d of the classifier #%d of "
251 "the stage classifier #%d is not inside "
252 "the reference (original) cascade window", k, j, i );
253 CV_ERROR( CV_StsNullPtr, errorstr );
254 }
255 }
256 }
257 }
258 }
259 }
260
261 // this is an upper boundary for the whole hidden cascade size
262 datasize = sizeof(CvHidHaarClassifierCascade) +
263 sizeof(CvHidHaarStageClassifier)*cascade->count +
264 sizeof(CvHidHaarClassifier) * total_classifiers +
265 sizeof(CvHidHaarTreeNode) * total_nodes +
266 sizeof(void*)*(total_nodes + total_classifiers);
267
268 CV_CALL( out = (CvHidHaarClassifierCascade*)cvAlloc( datasize ));
269 memset( out, 0, sizeof(*out) );
270
271 /* init header */
272 out->count = cascade->count;
273 out->stage_classifier = (CvHidHaarStageClassifier*)(out + 1);
274 haar_classifier_ptr = (CvHidHaarClassifier*)(out->stage_classifier + cascade->count);
275 haar_node_ptr = (CvHidHaarTreeNode*)(haar_classifier_ptr + total_classifiers);
276
277 out->is_stump_based = 1;
278 out->has_tilted_features = has_tilted_features;
279 out->is_tree = 0;
280
281 /* initialize internal representation */
282 for( i = 0; i < cascade->count; i++ )
283 {
284 CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
285 CvHidHaarStageClassifier* hid_stage_classifier = out->stage_classifier + i;
286
287 hid_stage_classifier->count = stage_classifier->count;
288 hid_stage_classifier->threshold = stage_classifier->threshold - icv_stage_threshold_bias;
289 hid_stage_classifier->classifier = haar_classifier_ptr;
290 hid_stage_classifier->two_rects = 1;
291 haar_classifier_ptr += stage_classifier->count;
292
293 hid_stage_classifier->parent = (stage_classifier->parent == -1)
294 ? NULL : out->stage_classifier + stage_classifier->parent;
295 hid_stage_classifier->next = (stage_classifier->next == -1)
296 ? NULL : out->stage_classifier + stage_classifier->next;
297 hid_stage_classifier->child = (stage_classifier->child == -1)
298 ? NULL : out->stage_classifier + stage_classifier->child;
299
300 out->is_tree |= hid_stage_classifier->next != NULL;
301
302 for( j = 0; j < stage_classifier->count; j++ )
303 {
304 CvHaarClassifier* classifier = stage_classifier->classifier + j;
305 CvHidHaarClassifier* hid_classifier = hid_stage_classifier->classifier + j;
306 int node_count = classifier->count;
307 float* alpha_ptr = (float*)(haar_node_ptr + node_count);
308
309 hid_classifier->count = node_count;
310 hid_classifier->node = haar_node_ptr;
311 hid_classifier->alpha = alpha_ptr;
312
313 for( l = 0; l < node_count; l++ )
314 {
315 CvHidHaarTreeNode* node = hid_classifier->node + l;
316 CvHaarFeature* feature = classifier->haar_feature + l;
317 memset( node, -1, sizeof(*node) );
318 node->threshold = classifier->threshold[l];
319 node->left = classifier->left[l];
320 node->right = classifier->right[l];
321
322 if( fabs(feature->rect[2].weight) < DBL_EPSILON ||
323 feature->rect[2].r.width == 0 ||
324 feature->rect[2].r.height == 0 )
325 memset( &(node->feature.rect[2]), 0, sizeof(node->feature.rect[2]) );
326 else
327 hid_stage_classifier->two_rects = 0;
328 }
329
330 memcpy( alpha_ptr, classifier->alpha, (node_count+1)*sizeof(alpha_ptr[0]));
331 haar_node_ptr =
332 (CvHidHaarTreeNode*)cvAlignPtr(alpha_ptr+node_count+1, sizeof(void*));
333
334 out->is_stump_based &= node_count == 1;
335 }
336 }
337
338 {
339 int can_use_ipp = icvHaarClassifierInitAlloc_32f_p != 0 &&
340 icvHaarClassifierFree_32f_p != 0 &&
341 icvApplyHaarClassifier_32f_C1R_p != 0 &&
342 icvRectStdDev_32f_C1R_p != 0 &&
343 !out->has_tilted_features && !out->is_tree && out->is_stump_based;
344
345 if( can_use_ipp )
346 {
347 int ipp_datasize = cascade->count*sizeof(out->ipp_stages[0]);
348 float ipp_weight_scale=(float)(1./((orig_window_size.width-icv_object_win_border*2)*
349 (orig_window_size.height-icv_object_win_border*2)));
350
351 CV_CALL( out->ipp_stages = (void**)cvAlloc( ipp_datasize ));
352 memset( out->ipp_stages, 0, ipp_datasize );
353
354 CV_CALL( ipp_features = (CvRect*)cvAlloc( max_count*3*sizeof(ipp_features[0]) ));
355 CV_CALL( ipp_weights = (float*)cvAlloc( max_count*3*sizeof(ipp_weights[0]) ));
356 CV_CALL( ipp_thresholds = (float*)cvAlloc( max_count*sizeof(ipp_thresholds[0]) ));
357 CV_CALL( ipp_val1 = (float*)cvAlloc( max_count*sizeof(ipp_val1[0]) ));
358 CV_CALL( ipp_val2 = (float*)cvAlloc( max_count*sizeof(ipp_val2[0]) ));
359 CV_CALL( ipp_counts = (int*)cvAlloc( max_count*sizeof(ipp_counts[0]) ));
360
361 for( i = 0; i < cascade->count; i++ )
362 {
363 CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
364 for( j = 0, k = 0; j < stage_classifier->count; j++ )
365 {
366 CvHaarClassifier* classifier = stage_classifier->classifier + j;
367 int rect_count = 2 + (classifier->haar_feature->rect[2].r.width != 0);
368
369 ipp_thresholds[j] = classifier->threshold[0];
370 ipp_val1[j] = classifier->alpha[0];
371 ipp_val2[j] = classifier->alpha[1];
372 ipp_counts[j] = rect_count;
373
374 for( l = 0; l < rect_count; l++, k++ )
375 {
376 ipp_features[k] = classifier->haar_feature->rect[l].r;
377 //ipp_features[k].y = orig_window_size.height - ipp_features[k].y - ipp_features[k].height;
378 ipp_weights[k] = classifier->haar_feature->rect[l].weight*ipp_weight_scale;
379 }
380 }
381
382 if( icvHaarClassifierInitAlloc_32f_p( &out->ipp_stages[i],
383 ipp_features, ipp_weights, ipp_thresholds,
384 ipp_val1, ipp_val2, ipp_counts, stage_classifier->count ) < 0 )
385 break;
386 }
387
388 if( i < cascade->count )
389 {
390 for( j = 0; j < i; j++ )
391 if( icvHaarClassifierFree_32f_p && out->ipp_stages[i] )
392 icvHaarClassifierFree_32f_p( out->ipp_stages[i] );
393 cvFree( &out->ipp_stages );
394 }
395 }
396 }
397
398 cascade->hid_cascade = out;
399 assert( (char*)haar_node_ptr - (char*)out <= datasize );
400
401 __END__;
402
403 if( cvGetErrStatus() < 0 )
404 icvReleaseHidHaarClassifierCascade( &out );
405
406 cvFree( &ipp_features );
407 cvFree( &ipp_weights );
408 cvFree( &ipp_thresholds );
409 cvFree( &ipp_val1 );
410 cvFree( &ipp_val2 );
411 cvFree( &ipp_counts );
412
413 return out;
414 }
415
416
417 #define sum_elem_ptr(sum,row,col) \
418 ((sumtype*)CV_MAT_ELEM_PTR_FAST((sum),(row),(col),sizeof(sumtype)))
419
420 #define sqsum_elem_ptr(sqsum,row,col) \
421 ((sqsumtype*)CV_MAT_ELEM_PTR_FAST((sqsum),(row),(col),sizeof(sqsumtype)))
422
423 #define calc_sum(rect,offset) \
424 ((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
425
426
427 CV_IMPL void
cvSetImagesForHaarClassifierCascade(CvHaarClassifierCascade * _cascade,const CvArr * _sum,const CvArr * _sqsum,const CvArr * _tilted_sum,double scale)428 cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
429 const CvArr* _sum,
430 const CvArr* _sqsum,
431 const CvArr* _tilted_sum,
432 double scale )
433 {
434 CV_FUNCNAME("cvSetImagesForHaarClassifierCascade");
435
436 __BEGIN__;
437
438 CvMat sum_stub, *sum = (CvMat*)_sum;
439 CvMat sqsum_stub, *sqsum = (CvMat*)_sqsum;
440 CvMat tilted_stub, *tilted = (CvMat*)_tilted_sum;
441 CvHidHaarClassifierCascade* cascade;
442 int coi0 = 0, coi1 = 0;
443 int i;
444 CvRect equ_rect;
445 double weight_scale;
446
447 if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
448 CV_ERROR( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
449
450 if( scale <= 0 )
451 CV_ERROR( CV_StsOutOfRange, "Scale must be positive" );
452
453 CV_CALL( sum = cvGetMat( sum, &sum_stub, &coi0 ));
454 CV_CALL( sqsum = cvGetMat( sqsum, &sqsum_stub, &coi1 ));
455
456 if( coi0 || coi1 )
457 CV_ERROR( CV_BadCOI, "COI is not supported" );
458
459 if( !CV_ARE_SIZES_EQ( sum, sqsum ))
460 CV_ERROR( CV_StsUnmatchedSizes, "All integral images must have the same size" );
461
462 if( CV_MAT_TYPE(sqsum->type) != CV_64FC1 ||
463 CV_MAT_TYPE(sum->type) != CV_32SC1 )
464 CV_ERROR( CV_StsUnsupportedFormat,
465 "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
466
467 if( !_cascade->hid_cascade )
468 CV_CALL( icvCreateHidHaarClassifierCascade(_cascade) );
469
470 cascade = _cascade->hid_cascade;
471
472 if( cascade->has_tilted_features )
473 {
474 CV_CALL( tilted = cvGetMat( tilted, &tilted_stub, &coi1 ));
475
476 if( CV_MAT_TYPE(tilted->type) != CV_32SC1 )
477 CV_ERROR( CV_StsUnsupportedFormat,
478 "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
479
480 if( sum->step != tilted->step )
481 CV_ERROR( CV_StsUnmatchedSizes,
482 "Sum and tilted_sum must have the same stride (step, widthStep)" );
483
484 if( !CV_ARE_SIZES_EQ( sum, tilted ))
485 CV_ERROR( CV_StsUnmatchedSizes, "All integral images must have the same size" );
486 cascade->tilted = *tilted;
487 }
488
489 _cascade->scale = scale;
490 _cascade->real_window_size.width = cvRound( _cascade->orig_window_size.width * scale );
491 _cascade->real_window_size.height = cvRound( _cascade->orig_window_size.height * scale );
492
493 cascade->sum = *sum;
494 cascade->sqsum = *sqsum;
495
496 equ_rect.x = equ_rect.y = cvRound(scale);
497 equ_rect.width = cvRound((_cascade->orig_window_size.width-2)*scale);
498 equ_rect.height = cvRound((_cascade->orig_window_size.height-2)*scale);
499 weight_scale = 1./(equ_rect.width*equ_rect.height);
500 cascade->inv_window_area = weight_scale;
501
502 cascade->p0 = sum_elem_ptr(*sum, equ_rect.y, equ_rect.x);
503 cascade->p1 = sum_elem_ptr(*sum, equ_rect.y, equ_rect.x + equ_rect.width );
504 cascade->p2 = sum_elem_ptr(*sum, equ_rect.y + equ_rect.height, equ_rect.x );
505 cascade->p3 = sum_elem_ptr(*sum, equ_rect.y + equ_rect.height,
506 equ_rect.x + equ_rect.width );
507
508 cascade->pq0 = sqsum_elem_ptr(*sqsum, equ_rect.y, equ_rect.x);
509 cascade->pq1 = sqsum_elem_ptr(*sqsum, equ_rect.y, equ_rect.x + equ_rect.width );
510 cascade->pq2 = sqsum_elem_ptr(*sqsum, equ_rect.y + equ_rect.height, equ_rect.x );
511 cascade->pq3 = sqsum_elem_ptr(*sqsum, equ_rect.y + equ_rect.height,
512 equ_rect.x + equ_rect.width );
513
514 /* init pointers in haar features according to real window size and
515 given image pointers */
516 {
517 #ifdef _OPENMP
518 int max_threads = cvGetNumThreads();
519 #pragma omp parallel for num_threads(max_threads) schedule(dynamic)
520 #endif // _OPENMP
521 for( i = 0; i < _cascade->count; i++ )
522 {
523 int j, k, l;
524 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
525 {
526 for( l = 0; l < cascade->stage_classifier[i].classifier[j].count; l++ )
527 {
528 CvHaarFeature* feature =
529 &_cascade->stage_classifier[i].classifier[j].haar_feature[l];
530 /* CvHidHaarClassifier* classifier =
531 cascade->stage_classifier[i].classifier + j; */
532 CvHidHaarFeature* hidfeature =
533 &cascade->stage_classifier[i].classifier[j].node[l].feature;
534 double sum0 = 0, area0 = 0;
535 CvRect r[3];
536 #if CV_ADJUST_FEATURES
537 int base_w = -1, base_h = -1;
538 int new_base_w = 0, new_base_h = 0;
539 int kx, ky;
540 int flagx = 0, flagy = 0;
541 int x0 = 0, y0 = 0;
542 #endif
543 int nr;
544
545 /* align blocks */
546 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
547 {
548 if( !hidfeature->rect[k].p0 )
549 break;
550 #if CV_ADJUST_FEATURES
551 r[k] = feature->rect[k].r;
552 base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].width-1) );
553 base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].x - r[0].x-1) );
554 base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].height-1) );
555 base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].y - r[0].y-1) );
556 #endif
557 }
558
559 nr = k;
560
561 #if CV_ADJUST_FEATURES
562 base_w += 1;
563 base_h += 1;
564 kx = r[0].width / base_w;
565 ky = r[0].height / base_h;
566
567 if( kx <= 0 )
568 {
569 flagx = 1;
570 new_base_w = cvRound( r[0].width * scale ) / kx;
571 x0 = cvRound( r[0].x * scale );
572 }
573
574 if( ky <= 0 )
575 {
576 flagy = 1;
577 new_base_h = cvRound( r[0].height * scale ) / ky;
578 y0 = cvRound( r[0].y * scale );
579 }
580 #endif
581
582 for( k = 0; k < nr; k++ )
583 {
584 CvRect tr;
585 double correction_ratio;
586
587 #if CV_ADJUST_FEATURES
588 if( flagx )
589 {
590 tr.x = (r[k].x - r[0].x) * new_base_w / base_w + x0;
591 tr.width = r[k].width * new_base_w / base_w;
592 }
593 else
594 #endif
595 {
596 tr.x = cvRound( r[k].x * scale );
597 tr.width = cvRound( r[k].width * scale );
598 }
599
600 #if CV_ADJUST_FEATURES
601 if( flagy )
602 {
603 tr.y = (r[k].y - r[0].y) * new_base_h / base_h + y0;
604 tr.height = r[k].height * new_base_h / base_h;
605 }
606 else
607 #endif
608 {
609 tr.y = cvRound( r[k].y * scale );
610 tr.height = cvRound( r[k].height * scale );
611 }
612
613 #if CV_ADJUST_WEIGHTS
614 {
615 // RAINER START
616 const float orig_feature_size = (float)(feature->rect[k].r.width)*feature->rect[k].r.height;
617 const float orig_norm_size = (float)(_cascade->orig_window_size.width)*(_cascade->orig_window_size.height);
618 const float feature_size = float(tr.width*tr.height);
619 //const float normSize = float(equ_rect.width*equ_rect.height);
620 float target_ratio = orig_feature_size / orig_norm_size;
621 //float isRatio = featureSize / normSize;
622 //correctionRatio = targetRatio / isRatio / normSize;
623 correction_ratio = target_ratio / feature_size;
624 // RAINER END
625 }
626 #else
627 correction_ratio = weight_scale * (!feature->tilted ? 1 : 0.5);
628 #endif
629
630 if( !feature->tilted )
631 {
632 hidfeature->rect[k].p0 = sum_elem_ptr(*sum, tr.y, tr.x);
633 hidfeature->rect[k].p1 = sum_elem_ptr(*sum, tr.y, tr.x + tr.width);
634 hidfeature->rect[k].p2 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x);
635 hidfeature->rect[k].p3 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x + tr.width);
636 }
637 else
638 {
639 hidfeature->rect[k].p2 = sum_elem_ptr(*tilted, tr.y + tr.width, tr.x + tr.width);
640 hidfeature->rect[k].p3 = sum_elem_ptr(*tilted, tr.y + tr.width + tr.height,
641 tr.x + tr.width - tr.height);
642 hidfeature->rect[k].p0 = sum_elem_ptr(*tilted, tr.y, tr.x);
643 hidfeature->rect[k].p1 = sum_elem_ptr(*tilted, tr.y + tr.height, tr.x - tr.height);
644 }
645
646 hidfeature->rect[k].weight = (float)(feature->rect[k].weight * correction_ratio);
647
648 if( k == 0 )
649 area0 = tr.width * tr.height;
650 else
651 sum0 += hidfeature->rect[k].weight * tr.width * tr.height;
652 }
653
654 hidfeature->rect[0].weight = (float)(-sum0/area0);
655 } /* l */
656 } /* j */
657 }
658 }
659
660 __END__;
661 }
662
663
664 CV_INLINE
icvEvalHidHaarClassifier(CvHidHaarClassifier * classifier,double variance_norm_factor,size_t p_offset)665 double icvEvalHidHaarClassifier( CvHidHaarClassifier* classifier,
666 double variance_norm_factor,
667 size_t p_offset )
668 {
669 int idx = 0;
670 do
671 {
672 CvHidHaarTreeNode* node = classifier->node + idx;
673 double t = node->threshold * variance_norm_factor;
674
675 double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
676 sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
677
678 if( node->feature.rect[2].p0 )
679 sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
680
681 idx = sum < t ? node->left : node->right;
682 }
683 while( idx > 0 );
684 return classifier->alpha[-idx];
685 }
686
687
688 CV_IMPL int
cvRunHaarClassifierCascade(CvHaarClassifierCascade * _cascade,CvPoint pt,int start_stage)689 cvRunHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
690 CvPoint pt, int start_stage )
691 {
692 int result = -1;
693 CV_FUNCNAME("cvRunHaarClassifierCascade");
694
695 __BEGIN__;
696
697 int p_offset, pq_offset;
698 int i, j;
699 double mean, variance_norm_factor;
700 CvHidHaarClassifierCascade* cascade;
701
702 if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
703 CV_ERROR( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid cascade pointer" );
704
705 cascade = _cascade->hid_cascade;
706 if( !cascade )
707 CV_ERROR( CV_StsNullPtr, "Hidden cascade has not been created.\n"
708 "Use cvSetImagesForHaarClassifierCascade" );
709
710 if( pt.x < 0 || pt.y < 0 ||
711 pt.x + _cascade->real_window_size.width >= cascade->sum.width-2 ||
712 pt.y + _cascade->real_window_size.height >= cascade->sum.height-2 )
713 EXIT;
714
715 p_offset = pt.y * (cascade->sum.step/sizeof(sumtype)) + pt.x;
716 pq_offset = pt.y * (cascade->sqsum.step/sizeof(sqsumtype)) + pt.x;
717 mean = calc_sum(*cascade,p_offset)*cascade->inv_window_area;
718 variance_norm_factor = cascade->pq0[pq_offset] - cascade->pq1[pq_offset] -
719 cascade->pq2[pq_offset] + cascade->pq3[pq_offset];
720 variance_norm_factor = variance_norm_factor*cascade->inv_window_area - mean*mean;
721 if( variance_norm_factor >= 0. )
722 variance_norm_factor = sqrt(variance_norm_factor);
723 else
724 variance_norm_factor = 1.;
725
726 if( cascade->is_tree )
727 {
728 CvHidHaarStageClassifier* ptr;
729 assert( start_stage == 0 );
730
731 result = 1;
732 ptr = cascade->stage_classifier;
733
734 while( ptr )
735 {
736 double stage_sum = 0;
737
738 for( j = 0; j < ptr->count; j++ )
739 {
740 stage_sum += icvEvalHidHaarClassifier( ptr->classifier + j,
741 variance_norm_factor, p_offset );
742 }
743
744 if( stage_sum >= ptr->threshold )
745 {
746 ptr = ptr->child;
747 }
748 else
749 {
750 while( ptr && ptr->next == NULL ) ptr = ptr->parent;
751 if( ptr == NULL )
752 {
753 result = 0;
754 EXIT;
755 }
756 ptr = ptr->next;
757 }
758 }
759 }
760 else if( cascade->is_stump_based )
761 {
762 for( i = start_stage; i < cascade->count; i++ )
763 {
764 double stage_sum = 0;
765
766 if( cascade->stage_classifier[i].two_rects )
767 {
768 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
769 {
770 CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
771 CvHidHaarTreeNode* node = classifier->node;
772 double sum, t = node->threshold*variance_norm_factor, a, b;
773
774 sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
775 sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
776
777 a = classifier->alpha[0];
778 b = classifier->alpha[1];
779 stage_sum += sum < t ? a : b;
780 }
781 }
782 else
783 {
784 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
785 {
786 CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
787 CvHidHaarTreeNode* node = classifier->node;
788 double sum, t = node->threshold*variance_norm_factor, a, b;
789
790 sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
791 sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
792
793 if( node->feature.rect[2].p0 )
794 sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
795
796 a = classifier->alpha[0];
797 b = classifier->alpha[1];
798 stage_sum += sum < t ? a : b;
799 }
800 }
801
802 if( stage_sum < cascade->stage_classifier[i].threshold )
803 {
804 result = -i;
805 EXIT;
806 }
807 }
808 }
809 else
810 {
811 for( i = start_stage; i < cascade->count; i++ )
812 {
813 double stage_sum = 0;
814
815 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
816 {
817 stage_sum += icvEvalHidHaarClassifier(
818 cascade->stage_classifier[i].classifier + j,
819 variance_norm_factor, p_offset );
820 }
821
822 if( stage_sum < cascade->stage_classifier[i].threshold )
823 {
824 result = -i;
825 EXIT;
826 }
827 }
828 }
829
830 result = 1;
831
832 __END__;
833
834 return result;
835 }
836
837
is_equal(const void * _r1,const void * _r2,void *)838 static int is_equal( const void* _r1, const void* _r2, void* )
839 {
840 const CvRect* r1 = (const CvRect*)_r1;
841 const CvRect* r2 = (const CvRect*)_r2;
842 int distance = cvRound(r1->width*0.2);
843
844 return r2->x <= r1->x + distance &&
845 r2->x >= r1->x - distance &&
846 r2->y <= r1->y + distance &&
847 r2->y >= r1->y - distance &&
848 r2->width <= cvRound( r1->width * 1.2 ) &&
849 cvRound( r2->width * 1.2 ) >= r1->width;
850 }
851
852
853 #define VERY_ROUGH_SEARCH 0
854
855 CV_IMPL CvSeq*
cvHaarDetectObjects(const CvArr * _img,CvHaarClassifierCascade * cascade,CvMemStorage * storage,double scale_factor,int min_neighbors,int flags,CvSize min_size)856 cvHaarDetectObjects( const CvArr* _img,
857 CvHaarClassifierCascade* cascade,
858 CvMemStorage* storage, double scale_factor,
859 int min_neighbors, int flags, CvSize min_size )
860 {
861 int split_stage = 2;
862
863 CvMat stub, *img = (CvMat*)_img;
864 CvMat *temp = 0, *sum = 0, *tilted = 0, *sqsum = 0, *norm_img = 0, *sumcanny = 0, *img_small = 0;
865 CvSeq* result_seq = 0;
866 CvMemStorage* temp_storage = 0;
867 CvAvgComp* comps = 0;
868 CvSeq* seq_thread[CV_MAX_THREADS] = {0};
869 int i, max_threads = 0;
870
871 CV_FUNCNAME( "cvHaarDetectObjects" );
872
873 __BEGIN__;
874
875 CvSeq *seq = 0, *seq2 = 0, *idx_seq = 0, *big_seq = 0;
876 CvAvgComp result_comp = {{0,0,0,0},0};
877 double factor;
878 int npass = 2, coi;
879 bool do_canny_pruning = (flags & CV_HAAR_DO_CANNY_PRUNING) != 0;
880 bool find_biggest_object = (flags & CV_HAAR_FIND_BIGGEST_OBJECT) != 0;
881 bool rough_search = (flags & CV_HAAR_DO_ROUGH_SEARCH) != 0;
882
883 if( !CV_IS_HAAR_CLASSIFIER(cascade) )
884 CV_ERROR( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier cascade" );
885
886 if( !storage )
887 CV_ERROR( CV_StsNullPtr, "Null storage pointer" );
888
889 CV_CALL( img = cvGetMat( img, &stub, &coi ));
890 if( coi )
891 CV_ERROR( CV_BadCOI, "COI is not supported" );
892
893 if( CV_MAT_DEPTH(img->type) != CV_8U )
894 CV_ERROR( CV_StsUnsupportedFormat, "Only 8-bit images are supported" );
895
896 if( find_biggest_object )
897 flags &= ~CV_HAAR_SCALE_IMAGE;
898
899 CV_CALL( temp = cvCreateMat( img->rows, img->cols, CV_8UC1 ));
900 CV_CALL( sum = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
901 CV_CALL( sqsum = cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 ));
902 CV_CALL( temp_storage = cvCreateChildMemStorage( storage ));
903
904 if( !cascade->hid_cascade )
905 CV_CALL( icvCreateHidHaarClassifierCascade(cascade) );
906
907 if( cascade->hid_cascade->has_tilted_features )
908 tilted = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
909
910 seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvRect), temp_storage );
911 seq2 = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), temp_storage );
912 result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
913
914 max_threads = cvGetNumThreads();
915 if( max_threads > 1 )
916 for( i = 0; i < max_threads; i++ )
917 {
918 CvMemStorage* temp_storage_thread;
919 CV_CALL( temp_storage_thread = cvCreateMemStorage(0));
920 CV_CALL( seq_thread[i] = cvCreateSeq( 0, sizeof(CvSeq),
921 sizeof(CvRect), temp_storage_thread ));
922 }
923 else
924 seq_thread[0] = seq;
925
926 if( CV_MAT_CN(img->type) > 1 )
927 {
928 cvCvtColor( img, temp, CV_BGR2GRAY );
929 img = temp;
930 }
931
932 if( flags & CV_HAAR_FIND_BIGGEST_OBJECT )
933 flags &= ~(CV_HAAR_SCALE_IMAGE|CV_HAAR_DO_CANNY_PRUNING);
934
935 if( flags & CV_HAAR_SCALE_IMAGE )
936 {
937 CvSize win_size0 = cascade->orig_window_size;
938 int use_ipp = cascade->hid_cascade->ipp_stages != 0 &&
939 icvApplyHaarClassifier_32f_C1R_p != 0;
940
941 if( use_ipp )
942 CV_CALL( norm_img = cvCreateMat( img->rows, img->cols, CV_32FC1 ));
943 CV_CALL( img_small = cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 ));
944
945 for( factor = 1; ; factor *= scale_factor )
946 {
947 int strip_count, strip_size;
948 int ystep = factor > 2. ? 1 : 2;
949 CvSize win_size = { cvRound(win_size0.width*factor),
950 cvRound(win_size0.height*factor) };
951 CvSize sz = { cvRound( img->cols/factor ), cvRound( img->rows/factor ) };
952 CvSize sz1 = { sz.width - win_size0.width, sz.height - win_size0.height };
953 CvRect equ_rect = { icv_object_win_border, icv_object_win_border,
954 win_size0.width - icv_object_win_border*2,
955 win_size0.height - icv_object_win_border*2 };
956 CvMat img1, sum1, sqsum1, norm1, tilted1, mask1;
957 CvMat* _tilted = 0;
958
959 if( sz1.width <= 0 || sz1.height <= 0 )
960 break;
961 if( win_size.width < min_size.width || win_size.height < min_size.height )
962 continue;
963
964 img1 = cvMat( sz.height, sz.width, CV_8UC1, img_small->data.ptr );
965 sum1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, sum->data.ptr );
966 sqsum1 = cvMat( sz.height+1, sz.width+1, CV_64FC1, sqsum->data.ptr );
967 if( tilted )
968 {
969 tilted1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, tilted->data.ptr );
970 _tilted = &tilted1;
971 }
972 norm1 = cvMat( sz1.height, sz1.width, CV_32FC1, norm_img ? norm_img->data.ptr : 0 );
973 mask1 = cvMat( sz1.height, sz1.width, CV_8UC1, temp->data.ptr );
974
975 cvResize( img, &img1, CV_INTER_LINEAR );
976 cvIntegral( &img1, &sum1, &sqsum1, _tilted );
977
978 if( max_threads > 1 )
979 {
980 strip_count = MAX(MIN(sz1.height/ystep, max_threads*3), 1);
981 strip_size = (sz1.height + strip_count - 1)/strip_count;
982 strip_size = (strip_size / ystep)*ystep;
983 }
984 else
985 {
986 strip_count = 1;
987 strip_size = sz1.height;
988 }
989
990 if( !use_ipp )
991 cvSetImagesForHaarClassifierCascade( cascade, &sum1, &sqsum1, 0, 1. );
992 else
993 {
994 for( i = 0; i <= sz.height; i++ )
995 {
996 const int* isum = (int*)(sum1.data.ptr + sum1.step*i);
997 float* fsum = (float*)isum;
998 const int FLT_DELTA = -(1 << 24);
999 int j;
1000 for( j = 0; j <= sz.width; j++ )
1001 fsum[j] = (float)(isum[j] + FLT_DELTA);
1002 }
1003 }
1004
1005 #ifdef _OPENMP
1006 #pragma omp parallel for num_threads(max_threads) schedule(dynamic)
1007 #endif
1008 for( i = 0; i < strip_count; i++ )
1009 {
1010 int thread_id = cvGetThreadNum();
1011 int positive = 0;
1012 int y1 = i*strip_size, y2 = (i+1)*strip_size/* - ystep + 1*/;
1013 CvSize ssz;
1014 int x, y, j;
1015 if( i == strip_count - 1 || y2 > sz1.height )
1016 y2 = sz1.height;
1017 ssz = cvSize(sz1.width, y2 - y1);
1018
1019 if( use_ipp )
1020 {
1021 icvRectStdDev_32f_C1R_p(
1022 (float*)(sum1.data.ptr + y1*sum1.step), sum1.step,
1023 (double*)(sqsum1.data.ptr + y1*sqsum1.step), sqsum1.step,
1024 (float*)(norm1.data.ptr + y1*norm1.step), norm1.step, ssz, equ_rect );
1025
1026 positive = (ssz.width/ystep)*((ssz.height + ystep-1)/ystep);
1027 memset( mask1.data.ptr + y1*mask1.step, ystep == 1, mask1.height*mask1.step);
1028
1029 if( ystep > 1 )
1030 {
1031 for( y = y1, positive = 0; y < y2; y += ystep )
1032 for( x = 0; x < ssz.width; x += ystep )
1033 mask1.data.ptr[mask1.step*y + x] = (uchar)1;
1034 }
1035
1036 for( j = 0; j < cascade->count; j++ )
1037 {
1038 if( icvApplyHaarClassifier_32f_C1R_p(
1039 (float*)(sum1.data.ptr + y1*sum1.step), sum1.step,
1040 (float*)(norm1.data.ptr + y1*norm1.step), norm1.step,
1041 mask1.data.ptr + y1*mask1.step, mask1.step, ssz, &positive,
1042 cascade->hid_cascade->stage_classifier[j].threshold,
1043 cascade->hid_cascade->ipp_stages[j]) < 0 )
1044 {
1045 positive = 0;
1046 break;
1047 }
1048 if( positive <= 0 )
1049 break;
1050 }
1051 }
1052 else
1053 {
1054 for( y = y1, positive = 0; y < y2; y += ystep )
1055 for( x = 0; x < ssz.width; x += ystep )
1056 {
1057 mask1.data.ptr[mask1.step*y + x] =
1058 cvRunHaarClassifierCascade( cascade, cvPoint(x,y), 0 ) > 0;
1059 positive += mask1.data.ptr[mask1.step*y + x];
1060 }
1061 }
1062
1063 if( positive > 0 )
1064 {
1065 for( y = y1; y < y2; y += ystep )
1066 for( x = 0; x < ssz.width; x += ystep )
1067 if( mask1.data.ptr[mask1.step*y + x] != 0 )
1068 {
1069 CvRect obj_rect = { cvRound(x*factor), cvRound(y*factor),
1070 win_size.width, win_size.height };
1071 cvSeqPush( seq_thread[thread_id], &obj_rect );
1072 }
1073 }
1074 }
1075
1076 // gather the results
1077 if( max_threads > 1 )
1078 for( i = 0; i < max_threads; i++ )
1079 {
1080 CvSeq* s = seq_thread[i];
1081 int j, total = s->total;
1082 CvSeqBlock* b = s->first;
1083 for( j = 0; j < total; j += b->count, b = b->next )
1084 cvSeqPushMulti( seq, b->data, b->count );
1085 }
1086 }
1087 }
1088 else
1089 {
1090 int n_factors = 0;
1091 CvRect scan_roi_rect = {0,0,0,0};
1092 bool is_found = false, scan_roi = false;
1093
1094 cvIntegral( img, sum, sqsum, tilted );
1095
1096 if( do_canny_pruning )
1097 {
1098 sumcanny = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1099 cvCanny( img, temp, 0, 50, 3 );
1100 cvIntegral( temp, sumcanny );
1101 }
1102
1103 if( (unsigned)split_stage >= (unsigned)cascade->count ||
1104 cascade->hid_cascade->is_tree )
1105 {
1106 split_stage = cascade->count;
1107 npass = 1;
1108 }
1109
1110 for( n_factors = 0, factor = 1;
1111 factor*cascade->orig_window_size.width < img->cols - 10 &&
1112 factor*cascade->orig_window_size.height < img->rows - 10;
1113 n_factors++, factor *= scale_factor )
1114 ;
1115
1116 if( find_biggest_object )
1117 {
1118 scale_factor = 1./scale_factor;
1119 factor *= scale_factor;
1120 big_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvRect), temp_storage );
1121 }
1122 else
1123 factor = 1;
1124
1125 for( ; n_factors-- > 0 && !is_found; factor *= scale_factor )
1126 {
1127 const double ystep = MAX( 2, factor );
1128 CvSize win_size = { cvRound( cascade->orig_window_size.width * factor ),
1129 cvRound( cascade->orig_window_size.height * factor )};
1130 CvRect equ_rect = { 0, 0, 0, 0 };
1131 int *p0 = 0, *p1 = 0, *p2 = 0, *p3 = 0;
1132 int *pq0 = 0, *pq1 = 0, *pq2 = 0, *pq3 = 0;
1133 int pass, stage_offset = 0;
1134 int start_x = 0, start_y = 0;
1135 int end_x = cvRound((img->cols - win_size.width) / ystep);
1136 int end_y = cvRound((img->rows - win_size.height) / ystep);
1137
1138 if( win_size.width < min_size.width || win_size.height < min_size.height )
1139 {
1140 if( find_biggest_object )
1141 break;
1142 continue;
1143 }
1144
1145 cvSetImagesForHaarClassifierCascade( cascade, sum, sqsum, tilted, factor );
1146 cvZero( temp );
1147
1148 if( do_canny_pruning )
1149 {
1150 equ_rect.x = cvRound(win_size.width*0.15);
1151 equ_rect.y = cvRound(win_size.height*0.15);
1152 equ_rect.width = cvRound(win_size.width*0.7);
1153 equ_rect.height = cvRound(win_size.height*0.7);
1154
1155 p0 = (int*)(sumcanny->data.ptr + equ_rect.y*sumcanny->step) + equ_rect.x;
1156 p1 = (int*)(sumcanny->data.ptr + equ_rect.y*sumcanny->step)
1157 + equ_rect.x + equ_rect.width;
1158 p2 = (int*)(sumcanny->data.ptr + (equ_rect.y + equ_rect.height)*sumcanny->step) + equ_rect.x;
1159 p3 = (int*)(sumcanny->data.ptr + (equ_rect.y + equ_rect.height)*sumcanny->step)
1160 + equ_rect.x + equ_rect.width;
1161
1162 pq0 = (int*)(sum->data.ptr + equ_rect.y*sum->step) + equ_rect.x;
1163 pq1 = (int*)(sum->data.ptr + equ_rect.y*sum->step)
1164 + equ_rect.x + equ_rect.width;
1165 pq2 = (int*)(sum->data.ptr + (equ_rect.y + equ_rect.height)*sum->step) + equ_rect.x;
1166 pq3 = (int*)(sum->data.ptr + (equ_rect.y + equ_rect.height)*sum->step)
1167 + equ_rect.x + equ_rect.width;
1168 }
1169
1170 if( scan_roi )
1171 {
1172 //adjust start_height and stop_height
1173 start_y = cvRound(scan_roi_rect.y / ystep);
1174 end_y = cvRound((scan_roi_rect.y + scan_roi_rect.height - win_size.height) / ystep);
1175
1176 start_x = cvRound(scan_roi_rect.x / ystep);
1177 end_x = cvRound((scan_roi_rect.x + scan_roi_rect.width - win_size.width) / ystep);
1178 }
1179
1180 cascade->hid_cascade->count = split_stage;
1181
1182 for( pass = 0; pass < npass; pass++ )
1183 {
1184 #ifdef _OPENMP
1185 #pragma omp parallel for num_threads(max_threads) schedule(dynamic)
1186 #endif
1187 for( int _iy = start_y; _iy < end_y; _iy++ )
1188 {
1189 int thread_id = cvGetThreadNum();
1190 int iy = cvRound(_iy*ystep);
1191 int _ix, _xstep = 1;
1192 uchar* mask_row = temp->data.ptr + temp->step * iy;
1193
1194 for( _ix = start_x; _ix < end_x; _ix += _xstep )
1195 {
1196 int ix = cvRound(_ix*ystep); // it really should be ystep
1197
1198 if( pass == 0 )
1199 {
1200 int result;
1201 _xstep = 2;
1202
1203 if( do_canny_pruning )
1204 {
1205 int offset;
1206 int s, sq;
1207
1208 offset = iy*(sum->step/sizeof(p0[0])) + ix;
1209 s = p0[offset] - p1[offset] - p2[offset] + p3[offset];
1210 sq = pq0[offset] - pq1[offset] - pq2[offset] + pq3[offset];
1211 if( s < 100 || sq < 20 )
1212 continue;
1213 }
1214
1215 result = cvRunHaarClassifierCascade( cascade, cvPoint(ix,iy), 0 );
1216 if( result > 0 )
1217 {
1218 if( pass < npass - 1 )
1219 mask_row[ix] = 1;
1220 else
1221 {
1222 CvRect rect = cvRect(ix,iy,win_size.width,win_size.height);
1223 cvSeqPush( seq_thread[thread_id], &rect );
1224 }
1225 }
1226 if( result < 0 )
1227 _xstep = 1;
1228 }
1229 else if( mask_row[ix] )
1230 {
1231 int result = cvRunHaarClassifierCascade( cascade, cvPoint(ix,iy),
1232 stage_offset );
1233 if( result > 0 )
1234 {
1235 if( pass == npass - 1 )
1236 {
1237 CvRect rect = cvRect(ix,iy,win_size.width,win_size.height);
1238 cvSeqPush( seq_thread[thread_id], &rect );
1239 }
1240 }
1241 else
1242 mask_row[ix] = 0;
1243 }
1244 }
1245 }
1246 stage_offset = cascade->hid_cascade->count;
1247 cascade->hid_cascade->count = cascade->count;
1248 }
1249
1250 // gather the results
1251 if( max_threads > 1 )
1252 for( i = 0; i < max_threads; i++ )
1253 {
1254 CvSeq* s = seq_thread[i];
1255 int j, total = s->total;
1256 CvSeqBlock* b = s->first;
1257 for( j = 0; j < total; j += b->count, b = b->next )
1258 cvSeqPushMulti( seq, b->data, b->count );
1259 }
1260
1261 if( find_biggest_object )
1262 {
1263 CvSeq* bseq = min_neighbors > 0 ? big_seq : seq;
1264
1265 if( min_neighbors > 0 && !scan_roi )
1266 {
1267 // group retrieved rectangles in order to filter out noise
1268 int ncomp = cvSeqPartition( seq, 0, &idx_seq, is_equal, 0 );
1269 CV_CALL( comps = (CvAvgComp*)cvAlloc( (ncomp+1)*sizeof(comps[0])));
1270 memset( comps, 0, (ncomp+1)*sizeof(comps[0]));
1271
1272 #if VERY_ROUGH_SEARCH
1273 if( rough_search )
1274 {
1275 for( i = 0; i < seq->total; i++ )
1276 {
1277 CvRect r1 = *(CvRect*)cvGetSeqElem( seq, i );
1278 int idx = *(int*)cvGetSeqElem( idx_seq, i );
1279 assert( (unsigned)idx < (unsigned)ncomp );
1280
1281 comps[idx].neighbors++;
1282 comps[idx].rect.x += r1.x;
1283 comps[idx].rect.y += r1.y;
1284 comps[idx].rect.width += r1.width;
1285 comps[idx].rect.height += r1.height;
1286 }
1287
1288 // calculate average bounding box
1289 for( i = 0; i < ncomp; i++ )
1290 {
1291 int n = comps[i].neighbors;
1292 if( n >= min_neighbors )
1293 {
1294 CvAvgComp comp;
1295 comp.rect.x = (comps[i].rect.x*2 + n)/(2*n);
1296 comp.rect.y = (comps[i].rect.y*2 + n)/(2*n);
1297 comp.rect.width = (comps[i].rect.width*2 + n)/(2*n);
1298 comp.rect.height = (comps[i].rect.height*2 + n)/(2*n);
1299 comp.neighbors = n;
1300 cvSeqPush( bseq, &comp );
1301 }
1302 }
1303 }
1304 else
1305 #endif
1306 {
1307 for( i = 0 ; i <= ncomp; i++ )
1308 comps[i].rect.x = comps[i].rect.y = INT_MAX;
1309
1310 // count number of neighbors
1311 for( i = 0; i < seq->total; i++ )
1312 {
1313 CvRect r1 = *(CvRect*)cvGetSeqElem( seq, i );
1314 int idx = *(int*)cvGetSeqElem( idx_seq, i );
1315 assert( (unsigned)idx < (unsigned)ncomp );
1316
1317 comps[idx].neighbors++;
1318
1319 // rect.width and rect.height will store coordinate of right-bottom corner
1320 comps[idx].rect.x = MIN(comps[idx].rect.x, r1.x);
1321 comps[idx].rect.y = MIN(comps[idx].rect.y, r1.y);
1322 comps[idx].rect.width = MAX(comps[idx].rect.width, r1.x+r1.width-1);
1323 comps[idx].rect.height = MAX(comps[idx].rect.height, r1.y+r1.height-1);
1324 }
1325
1326 // calculate enclosing box
1327 for( i = 0; i < ncomp; i++ )
1328 {
1329 int n = comps[i].neighbors;
1330 if( n >= min_neighbors )
1331 {
1332 CvAvgComp comp;
1333 int t;
1334 double min_scale = rough_search ? 0.6 : 0.4;
1335 comp.rect.x = comps[i].rect.x;
1336 comp.rect.y = comps[i].rect.y;
1337 comp.rect.width = comps[i].rect.width - comps[i].rect.x + 1;
1338 comp.rect.height = comps[i].rect.height - comps[i].rect.y + 1;
1339
1340 // update min_size
1341 t = cvRound( comp.rect.width*min_scale );
1342 min_size.width = MAX( min_size.width, t );
1343
1344 t = cvRound( comp.rect.height*min_scale );
1345 min_size.height = MAX( min_size.height, t );
1346
1347 //expand the box by 20% because we could miss some neighbours
1348 //see 'is_equal' function
1349 #if 1
1350 int offset = cvRound(comp.rect.width * 0.2);
1351 int right = MIN( img->cols-1, comp.rect.x+comp.rect.width-1 + offset );
1352 int bottom = MIN( img->rows-1, comp.rect.y+comp.rect.height-1 + offset);
1353 comp.rect.x = MAX( comp.rect.x - offset, 0 );
1354 comp.rect.y = MAX( comp.rect.y - offset, 0 );
1355 comp.rect.width = right - comp.rect.x + 1;
1356 comp.rect.height = bottom - comp.rect.y + 1;
1357 #endif
1358
1359 comp.neighbors = n;
1360 cvSeqPush( bseq, &comp );
1361 }
1362 }
1363 }
1364
1365 cvFree( &comps );
1366 }
1367
1368 // extract the biggest rect
1369 if( bseq->total > 0 )
1370 {
1371 int max_area = 0;
1372 for( i = 0; i < bseq->total; i++ )
1373 {
1374 CvAvgComp* comp = (CvAvgComp*)cvGetSeqElem( bseq, i );
1375 int area = comp->rect.width * comp->rect.height;
1376 if( max_area < area )
1377 {
1378 max_area = area;
1379 result_comp.rect = comp->rect;
1380 result_comp.neighbors = bseq == seq ? 1 : comp->neighbors;
1381 }
1382 }
1383
1384 //Prepare information for further scanning inside the biggest rectangle
1385
1386 #if VERY_ROUGH_SEARCH
1387 // change scan ranges to roi in case of required
1388 if( !rough_search && !scan_roi )
1389 {
1390 scan_roi = true;
1391 scan_roi_rect = result_comp.rect;
1392 cvClearSeq(bseq);
1393 }
1394 else if( rough_search )
1395 is_found = true;
1396 #else
1397 if( !scan_roi )
1398 {
1399 scan_roi = true;
1400 scan_roi_rect = result_comp.rect;
1401 cvClearSeq(bseq);
1402 }
1403 #endif
1404 }
1405 }
1406 }
1407 }
1408
1409 if( min_neighbors == 0 && !find_biggest_object )
1410 {
1411 for( i = 0; i < seq->total; i++ )
1412 {
1413 CvRect* rect = (CvRect*)cvGetSeqElem( seq, i );
1414 CvAvgComp comp;
1415 comp.rect = *rect;
1416 comp.neighbors = 1;
1417 cvSeqPush( result_seq, &comp );
1418 }
1419 }
1420
1421 if( min_neighbors != 0
1422 #if VERY_ROUGH_SEARCH
1423 && (!find_biggest_object || !rough_search)
1424 #endif
1425 )
1426 {
1427 // group retrieved rectangles in order to filter out noise
1428 int ncomp = cvSeqPartition( seq, 0, &idx_seq, is_equal, 0 );
1429 CV_CALL( comps = (CvAvgComp*)cvAlloc( (ncomp+1)*sizeof(comps[0])));
1430 memset( comps, 0, (ncomp+1)*sizeof(comps[0]));
1431
1432 // count number of neighbors
1433 for( i = 0; i < seq->total; i++ )
1434 {
1435 CvRect r1 = *(CvRect*)cvGetSeqElem( seq, i );
1436 int idx = *(int*)cvGetSeqElem( idx_seq, i );
1437 assert( (unsigned)idx < (unsigned)ncomp );
1438
1439 comps[idx].neighbors++;
1440
1441 comps[idx].rect.x += r1.x;
1442 comps[idx].rect.y += r1.y;
1443 comps[idx].rect.width += r1.width;
1444 comps[idx].rect.height += r1.height;
1445 }
1446
1447 // calculate average bounding box
1448 for( i = 0; i < ncomp; i++ )
1449 {
1450 int n = comps[i].neighbors;
1451 if( n >= min_neighbors )
1452 {
1453 CvAvgComp comp;
1454 comp.rect.x = (comps[i].rect.x*2 + n)/(2*n);
1455 comp.rect.y = (comps[i].rect.y*2 + n)/(2*n);
1456 comp.rect.width = (comps[i].rect.width*2 + n)/(2*n);
1457 comp.rect.height = (comps[i].rect.height*2 + n)/(2*n);
1458 comp.neighbors = comps[i].neighbors;
1459
1460 cvSeqPush( seq2, &comp );
1461 }
1462 }
1463
1464 if( !find_biggest_object )
1465 {
1466 // filter out small face rectangles inside large face rectangles
1467 for( i = 0; i < seq2->total; i++ )
1468 {
1469 CvAvgComp r1 = *(CvAvgComp*)cvGetSeqElem( seq2, i );
1470 int j, flag = 1;
1471
1472 for( j = 0; j < seq2->total; j++ )
1473 {
1474 CvAvgComp r2 = *(CvAvgComp*)cvGetSeqElem( seq2, j );
1475 int distance = cvRound( r2.rect.width * 0.2 );
1476
1477 if( i != j &&
1478 r1.rect.x >= r2.rect.x - distance &&
1479 r1.rect.y >= r2.rect.y - distance &&
1480 r1.rect.x + r1.rect.width <= r2.rect.x + r2.rect.width + distance &&
1481 r1.rect.y + r1.rect.height <= r2.rect.y + r2.rect.height + distance &&
1482 (r2.neighbors > MAX( 3, r1.neighbors ) || r1.neighbors < 3) )
1483 {
1484 flag = 0;
1485 break;
1486 }
1487 }
1488
1489 if( flag )
1490 cvSeqPush( result_seq, &r1 );
1491 }
1492 }
1493 else
1494 {
1495 int max_area = 0;
1496 for( i = 0; i < seq2->total; i++ )
1497 {
1498 CvAvgComp* comp = (CvAvgComp*)cvGetSeqElem( seq2, i );
1499 int area = comp->rect.width * comp->rect.height;
1500 if( max_area < area )
1501 {
1502 max_area = area;
1503 result_comp = *comp;
1504 }
1505 }
1506 }
1507 }
1508
1509 if( find_biggest_object && result_comp.rect.width > 0 )
1510 cvSeqPush( result_seq, &result_comp );
1511
1512 __END__;
1513
1514 if( max_threads > 1 )
1515 for( i = 0; i < max_threads; i++ )
1516 {
1517 if( seq_thread[i] )
1518 cvReleaseMemStorage( &seq_thread[i]->storage );
1519 }
1520
1521 cvReleaseMemStorage( &temp_storage );
1522 cvReleaseMat( &sum );
1523 cvReleaseMat( &sqsum );
1524 cvReleaseMat( &tilted );
1525 cvReleaseMat( &temp );
1526 cvReleaseMat( &sumcanny );
1527 cvReleaseMat( &norm_img );
1528 cvReleaseMat( &img_small );
1529 cvFree( &comps );
1530
1531 return result_seq;
1532 }
1533
1534
1535 static CvHaarClassifierCascade*
icvLoadCascadeCART(const char ** input_cascade,int n,CvSize orig_window_size)1536 icvLoadCascadeCART( const char** input_cascade, int n, CvSize orig_window_size )
1537 {
1538 int i;
1539 CvHaarClassifierCascade* cascade = icvCreateHaarClassifierCascade(n);
1540 cascade->orig_window_size = orig_window_size;
1541
1542 for( i = 0; i < n; i++ )
1543 {
1544 int j, count, l;
1545 float threshold = 0;
1546 const char* stage = input_cascade[i];
1547 int dl = 0;
1548
1549 /* tree links */
1550 int parent = -1;
1551 int next = -1;
1552
1553 sscanf( stage, "%d%n", &count, &dl );
1554 stage += dl;
1555
1556 assert( count > 0 );
1557 cascade->stage_classifier[i].count = count;
1558 cascade->stage_classifier[i].classifier =
1559 (CvHaarClassifier*)cvAlloc( count*sizeof(cascade->stage_classifier[i].classifier[0]));
1560
1561 for( j = 0; j < count; j++ )
1562 {
1563 CvHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
1564 int k, rects = 0;
1565 char str[100];
1566
1567 sscanf( stage, "%d%n", &classifier->count, &dl );
1568 stage += dl;
1569
1570 classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1571 classifier->count * ( sizeof( *classifier->haar_feature ) +
1572 sizeof( *classifier->threshold ) +
1573 sizeof( *classifier->left ) +
1574 sizeof( *classifier->right ) ) +
1575 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1576 classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1577 classifier->left = (int*) (classifier->threshold + classifier->count);
1578 classifier->right = (int*) (classifier->left + classifier->count);
1579 classifier->alpha = (float*) (classifier->right + classifier->count);
1580
1581 for( l = 0; l < classifier->count; l++ )
1582 {
1583 sscanf( stage, "%d%n", &rects, &dl );
1584 stage += dl;
1585
1586 assert( rects >= 2 && rects <= CV_HAAR_FEATURE_MAX );
1587
1588 for( k = 0; k < rects; k++ )
1589 {
1590 CvRect r;
1591 int band = 0;
1592 sscanf( stage, "%d%d%d%d%d%f%n",
1593 &r.x, &r.y, &r.width, &r.height, &band,
1594 &(classifier->haar_feature[l].rect[k].weight), &dl );
1595 stage += dl;
1596 classifier->haar_feature[l].rect[k].r = r;
1597 }
1598 sscanf( stage, "%s%n", str, &dl );
1599 stage += dl;
1600
1601 classifier->haar_feature[l].tilted = strncmp( str, "tilted", 6 ) == 0;
1602
1603 for( k = rects; k < CV_HAAR_FEATURE_MAX; k++ )
1604 {
1605 memset( classifier->haar_feature[l].rect + k, 0,
1606 sizeof(classifier->haar_feature[l].rect[k]) );
1607 }
1608
1609 sscanf( stage, "%f%d%d%n", &(classifier->threshold[l]),
1610 &(classifier->left[l]),
1611 &(classifier->right[l]), &dl );
1612 stage += dl;
1613 }
1614 for( l = 0; l <= classifier->count; l++ )
1615 {
1616 sscanf( stage, "%f%n", &(classifier->alpha[l]), &dl );
1617 stage += dl;
1618 }
1619 }
1620
1621 sscanf( stage, "%f%n", &threshold, &dl );
1622 stage += dl;
1623
1624 cascade->stage_classifier[i].threshold = threshold;
1625
1626 /* load tree links */
1627 if( sscanf( stage, "%d%d%n", &parent, &next, &dl ) != 2 )
1628 {
1629 parent = i - 1;
1630 next = -1;
1631 }
1632 stage += dl;
1633
1634 cascade->stage_classifier[i].parent = parent;
1635 cascade->stage_classifier[i].next = next;
1636 cascade->stage_classifier[i].child = -1;
1637
1638 if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1639 {
1640 cascade->stage_classifier[parent].child = i;
1641 }
1642 }
1643
1644 return cascade;
1645 }
1646
1647 #ifndef _MAX_PATH
1648 #define _MAX_PATH 1024
1649 #endif
1650
1651 CV_IMPL CvHaarClassifierCascade*
cvLoadHaarClassifierCascade(const char * directory,CvSize orig_window_size)1652 cvLoadHaarClassifierCascade( const char* directory, CvSize orig_window_size )
1653 {
1654 const char** input_cascade = 0;
1655 CvHaarClassifierCascade *cascade = 0;
1656
1657 CV_FUNCNAME( "cvLoadHaarClassifierCascade" );
1658
1659 __BEGIN__;
1660
1661 int i, n;
1662 const char* slash;
1663 char name[_MAX_PATH];
1664 int size = 0;
1665 char* ptr = 0;
1666
1667 if( !directory )
1668 CV_ERROR( CV_StsNullPtr, "Null path is passed" );
1669
1670 n = (int)strlen(directory)-1;
1671 slash = directory[n] == '\\' || directory[n] == '/' ? "" : "/";
1672
1673 /* try to read the classifier from directory */
1674 for( n = 0; ; n++ )
1675 {
1676 sprintf( name, "%s%s%d/AdaBoostCARTHaarClassifier.txt", directory, slash, n );
1677 FILE* f = fopen( name, "rb" );
1678 if( !f )
1679 break;
1680 fseek( f, 0, SEEK_END );
1681 size += ftell( f ) + 1;
1682 fclose(f);
1683 }
1684
1685 if( n == 0 && slash[0] )
1686 {
1687 CV_CALL( cascade = (CvHaarClassifierCascade*)cvLoad( directory ));
1688 EXIT;
1689 }
1690 else if( n == 0 )
1691 CV_ERROR( CV_StsBadArg, "Invalid path" );
1692
1693 size += (n+1)*sizeof(char*);
1694 CV_CALL( input_cascade = (const char**)cvAlloc( size ));
1695 ptr = (char*)(input_cascade + n + 1);
1696
1697 for( i = 0; i < n; i++ )
1698 {
1699 sprintf( name, "%s/%d/AdaBoostCARTHaarClassifier.txt", directory, i );
1700 FILE* f = fopen( name, "rb" );
1701 if( !f )
1702 CV_ERROR( CV_StsError, "" );
1703 fseek( f, 0, SEEK_END );
1704 size = ftell( f );
1705 fseek( f, 0, SEEK_SET );
1706 fread( ptr, 1, size, f );
1707 fclose(f);
1708 input_cascade[i] = ptr;
1709 ptr += size;
1710 *ptr++ = '\0';
1711 }
1712
1713 input_cascade[n] = 0;
1714 cascade = icvLoadCascadeCART( input_cascade, n, orig_window_size );
1715
1716 __END__;
1717
1718 if( input_cascade )
1719 cvFree( &input_cascade );
1720
1721 if( cvGetErrStatus() < 0 )
1722 cvReleaseHaarClassifierCascade( &cascade );
1723
1724 return cascade;
1725 }
1726
1727
1728 CV_IMPL void
cvReleaseHaarClassifierCascade(CvHaarClassifierCascade ** _cascade)1729 cvReleaseHaarClassifierCascade( CvHaarClassifierCascade** _cascade )
1730 {
1731 if( _cascade && *_cascade )
1732 {
1733 int i, j;
1734 CvHaarClassifierCascade* cascade = *_cascade;
1735
1736 for( i = 0; i < cascade->count; i++ )
1737 {
1738 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
1739 cvFree( &cascade->stage_classifier[i].classifier[j].haar_feature );
1740 cvFree( &cascade->stage_classifier[i].classifier );
1741 }
1742 icvReleaseHidHaarClassifierCascade( &cascade->hid_cascade );
1743 cvFree( _cascade );
1744 }
1745 }
1746
1747
1748 /****************************************************************************************\
1749 * Persistence functions *
1750 \****************************************************************************************/
1751
1752 /* field names */
1753
1754 #define ICV_HAAR_SIZE_NAME "size"
1755 #define ICV_HAAR_STAGES_NAME "stages"
1756 #define ICV_HAAR_TREES_NAME "trees"
1757 #define ICV_HAAR_FEATURE_NAME "feature"
1758 #define ICV_HAAR_RECTS_NAME "rects"
1759 #define ICV_HAAR_TILTED_NAME "tilted"
1760 #define ICV_HAAR_THRESHOLD_NAME "threshold"
1761 #define ICV_HAAR_LEFT_NODE_NAME "left_node"
1762 #define ICV_HAAR_LEFT_VAL_NAME "left_val"
1763 #define ICV_HAAR_RIGHT_NODE_NAME "right_node"
1764 #define ICV_HAAR_RIGHT_VAL_NAME "right_val"
1765 #define ICV_HAAR_STAGE_THRESHOLD_NAME "stage_threshold"
1766 #define ICV_HAAR_PARENT_NAME "parent"
1767 #define ICV_HAAR_NEXT_NAME "next"
1768
1769 static int
icvIsHaarClassifier(const void * struct_ptr)1770 icvIsHaarClassifier( const void* struct_ptr )
1771 {
1772 return CV_IS_HAAR_CLASSIFIER( struct_ptr );
1773 }
1774
1775 static void*
icvReadHaarClassifier(CvFileStorage * fs,CvFileNode * node)1776 icvReadHaarClassifier( CvFileStorage* fs, CvFileNode* node )
1777 {
1778 CvHaarClassifierCascade* cascade = NULL;
1779
1780 CV_FUNCNAME( "cvReadHaarClassifier" );
1781
1782 __BEGIN__;
1783
1784 char buf[256];
1785 CvFileNode* seq_fn = NULL; /* sequence */
1786 CvFileNode* fn = NULL;
1787 CvFileNode* stages_fn = NULL;
1788 CvSeqReader stages_reader;
1789 int n;
1790 int i, j, k, l;
1791 int parent, next;
1792
1793 CV_CALL( stages_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_STAGES_NAME ) );
1794 if( !stages_fn || !CV_NODE_IS_SEQ( stages_fn->tag) )
1795 CV_ERROR( CV_StsError, "Invalid stages node" );
1796
1797 n = stages_fn->data.seq->total;
1798 CV_CALL( cascade = icvCreateHaarClassifierCascade(n) );
1799
1800 /* read size */
1801 CV_CALL( seq_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_SIZE_NAME ) );
1802 if( !seq_fn || !CV_NODE_IS_SEQ( seq_fn->tag ) || seq_fn->data.seq->total != 2 )
1803 CV_ERROR( CV_StsError, "size node is not a valid sequence." );
1804 CV_CALL( fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 0 ) );
1805 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1806 CV_ERROR( CV_StsError, "Invalid size node: width must be positive integer" );
1807 cascade->orig_window_size.width = fn->data.i;
1808 CV_CALL( fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 1 ) );
1809 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1810 CV_ERROR( CV_StsError, "Invalid size node: height must be positive integer" );
1811 cascade->orig_window_size.height = fn->data.i;
1812
1813 CV_CALL( cvStartReadSeq( stages_fn->data.seq, &stages_reader ) );
1814 for( i = 0; i < n; ++i )
1815 {
1816 CvFileNode* stage_fn;
1817 CvFileNode* trees_fn;
1818 CvSeqReader trees_reader;
1819
1820 stage_fn = (CvFileNode*) stages_reader.ptr;
1821 if( !CV_NODE_IS_MAP( stage_fn->tag ) )
1822 {
1823 sprintf( buf, "Invalid stage %d", i );
1824 CV_ERROR( CV_StsError, buf );
1825 }
1826
1827 CV_CALL( trees_fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_TREES_NAME ) );
1828 if( !trees_fn || !CV_NODE_IS_SEQ( trees_fn->tag )
1829 || trees_fn->data.seq->total <= 0 )
1830 {
1831 sprintf( buf, "Trees node is not a valid sequence. (stage %d)", i );
1832 CV_ERROR( CV_StsError, buf );
1833 }
1834
1835 CV_CALL( cascade->stage_classifier[i].classifier =
1836 (CvHaarClassifier*) cvAlloc( trees_fn->data.seq->total
1837 * sizeof( cascade->stage_classifier[i].classifier[0] ) ) );
1838 for( j = 0; j < trees_fn->data.seq->total; ++j )
1839 {
1840 cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
1841 }
1842 cascade->stage_classifier[i].count = trees_fn->data.seq->total;
1843
1844 CV_CALL( cvStartReadSeq( trees_fn->data.seq, &trees_reader ) );
1845 for( j = 0; j < trees_fn->data.seq->total; ++j )
1846 {
1847 CvFileNode* tree_fn;
1848 CvSeqReader tree_reader;
1849 CvHaarClassifier* classifier;
1850 int last_idx;
1851
1852 classifier = &cascade->stage_classifier[i].classifier[j];
1853 tree_fn = (CvFileNode*) trees_reader.ptr;
1854 if( !CV_NODE_IS_SEQ( tree_fn->tag ) || tree_fn->data.seq->total <= 0 )
1855 {
1856 sprintf( buf, "Tree node is not a valid sequence."
1857 " (stage %d, tree %d)", i, j );
1858 CV_ERROR( CV_StsError, buf );
1859 }
1860
1861 classifier->count = tree_fn->data.seq->total;
1862 CV_CALL( classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1863 classifier->count * ( sizeof( *classifier->haar_feature ) +
1864 sizeof( *classifier->threshold ) +
1865 sizeof( *classifier->left ) +
1866 sizeof( *classifier->right ) ) +
1867 (classifier->count + 1) * sizeof( *classifier->alpha ) ) );
1868 classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1869 classifier->left = (int*) (classifier->threshold + classifier->count);
1870 classifier->right = (int*) (classifier->left + classifier->count);
1871 classifier->alpha = (float*) (classifier->right + classifier->count);
1872
1873 CV_CALL( cvStartReadSeq( tree_fn->data.seq, &tree_reader ) );
1874 for( k = 0, last_idx = 0; k < tree_fn->data.seq->total; ++k )
1875 {
1876 CvFileNode* node_fn;
1877 CvFileNode* feature_fn;
1878 CvFileNode* rects_fn;
1879 CvSeqReader rects_reader;
1880
1881 node_fn = (CvFileNode*) tree_reader.ptr;
1882 if( !CV_NODE_IS_MAP( node_fn->tag ) )
1883 {
1884 sprintf( buf, "Tree node %d is not a valid map. (stage %d, tree %d)",
1885 k, i, j );
1886 CV_ERROR( CV_StsError, buf );
1887 }
1888 CV_CALL( feature_fn = cvGetFileNodeByName( fs, node_fn,
1889 ICV_HAAR_FEATURE_NAME ) );
1890 if( !feature_fn || !CV_NODE_IS_MAP( feature_fn->tag ) )
1891 {
1892 sprintf( buf, "Feature node is not a valid map. "
1893 "(stage %d, tree %d, node %d)", i, j, k );
1894 CV_ERROR( CV_StsError, buf );
1895 }
1896 CV_CALL( rects_fn = cvGetFileNodeByName( fs, feature_fn,
1897 ICV_HAAR_RECTS_NAME ) );
1898 if( !rects_fn || !CV_NODE_IS_SEQ( rects_fn->tag )
1899 || rects_fn->data.seq->total < 1
1900 || rects_fn->data.seq->total > CV_HAAR_FEATURE_MAX )
1901 {
1902 sprintf( buf, "Rects node is not a valid sequence. "
1903 "(stage %d, tree %d, node %d)", i, j, k );
1904 CV_ERROR( CV_StsError, buf );
1905 }
1906 CV_CALL( cvStartReadSeq( rects_fn->data.seq, &rects_reader ) );
1907 for( l = 0; l < rects_fn->data.seq->total; ++l )
1908 {
1909 CvFileNode* rect_fn;
1910 CvRect r;
1911
1912 rect_fn = (CvFileNode*) rects_reader.ptr;
1913 if( !CV_NODE_IS_SEQ( rect_fn->tag ) || rect_fn->data.seq->total != 5 )
1914 {
1915 sprintf( buf, "Rect %d is not a valid sequence. "
1916 "(stage %d, tree %d, node %d)", l, i, j, k );
1917 CV_ERROR( CV_StsError, buf );
1918 }
1919
1920 fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 0 );
1921 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1922 {
1923 sprintf( buf, "x coordinate must be non-negative integer. "
1924 "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1925 CV_ERROR( CV_StsError, buf );
1926 }
1927 r.x = fn->data.i;
1928 fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 1 );
1929 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1930 {
1931 sprintf( buf, "y coordinate must be non-negative integer. "
1932 "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1933 CV_ERROR( CV_StsError, buf );
1934 }
1935 r.y = fn->data.i;
1936 fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 2 );
1937 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1938 || r.x + fn->data.i > cascade->orig_window_size.width )
1939 {
1940 sprintf( buf, "width must be positive integer and "
1941 "(x + width) must not exceed window width. "
1942 "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1943 CV_ERROR( CV_StsError, buf );
1944 }
1945 r.width = fn->data.i;
1946 fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 3 );
1947 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1948 || r.y + fn->data.i > cascade->orig_window_size.height )
1949 {
1950 sprintf( buf, "height must be positive integer and "
1951 "(y + height) must not exceed window height. "
1952 "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1953 CV_ERROR( CV_StsError, buf );
1954 }
1955 r.height = fn->data.i;
1956 fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 4 );
1957 if( !CV_NODE_IS_REAL( fn->tag ) )
1958 {
1959 sprintf( buf, "weight must be real number. "
1960 "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1961 CV_ERROR( CV_StsError, buf );
1962 }
1963
1964 classifier->haar_feature[k].rect[l].weight = (float) fn->data.f;
1965 classifier->haar_feature[k].rect[l].r = r;
1966
1967 CV_NEXT_SEQ_ELEM( sizeof( *rect_fn ), rects_reader );
1968 } /* for each rect */
1969 for( l = rects_fn->data.seq->total; l < CV_HAAR_FEATURE_MAX; ++l )
1970 {
1971 classifier->haar_feature[k].rect[l].weight = 0;
1972 classifier->haar_feature[k].rect[l].r = cvRect( 0, 0, 0, 0 );
1973 }
1974
1975 CV_CALL( fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_TILTED_NAME));
1976 if( !fn || !CV_NODE_IS_INT( fn->tag ) )
1977 {
1978 sprintf( buf, "tilted must be 0 or 1. "
1979 "(stage %d, tree %d, node %d)", i, j, k );
1980 CV_ERROR( CV_StsError, buf );
1981 }
1982 classifier->haar_feature[k].tilted = ( fn->data.i != 0 );
1983 CV_CALL( fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_THRESHOLD_NAME));
1984 if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1985 {
1986 sprintf( buf, "threshold must be real number. "
1987 "(stage %d, tree %d, node %d)", i, j, k );
1988 CV_ERROR( CV_StsError, buf );
1989 }
1990 classifier->threshold[k] = (float) fn->data.f;
1991 CV_CALL( fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_NODE_NAME));
1992 if( fn )
1993 {
1994 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1995 || fn->data.i >= tree_fn->data.seq->total )
1996 {
1997 sprintf( buf, "left node must be valid node number. "
1998 "(stage %d, tree %d, node %d)", i, j, k );
1999 CV_ERROR( CV_StsError, buf );
2000 }
2001 /* left node */
2002 classifier->left[k] = fn->data.i;
2003 }
2004 else
2005 {
2006 CV_CALL( fn = cvGetFileNodeByName( fs, node_fn,
2007 ICV_HAAR_LEFT_VAL_NAME ) );
2008 if( !fn )
2009 {
2010 sprintf( buf, "left node or left value must be specified. "
2011 "(stage %d, tree %d, node %d)", i, j, k );
2012 CV_ERROR( CV_StsError, buf );
2013 }
2014 if( !CV_NODE_IS_REAL( fn->tag ) )
2015 {
2016 sprintf( buf, "left value must be real number. "
2017 "(stage %d, tree %d, node %d)", i, j, k );
2018 CV_ERROR( CV_StsError, buf );
2019 }
2020 /* left value */
2021 if( last_idx >= classifier->count + 1 )
2022 {
2023 sprintf( buf, "Tree structure is broken: too many values. "
2024 "(stage %d, tree %d, node %d)", i, j, k );
2025 CV_ERROR( CV_StsError, buf );
2026 }
2027 classifier->left[k] = -last_idx;
2028 classifier->alpha[last_idx++] = (float) fn->data.f;
2029 }
2030 CV_CALL( fn = cvGetFileNodeByName( fs, node_fn,ICV_HAAR_RIGHT_NODE_NAME));
2031 if( fn )
2032 {
2033 if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
2034 || fn->data.i >= tree_fn->data.seq->total )
2035 {
2036 sprintf( buf, "right node must be valid node number. "
2037 "(stage %d, tree %d, node %d)", i, j, k );
2038 CV_ERROR( CV_StsError, buf );
2039 }
2040 /* right node */
2041 classifier->right[k] = fn->data.i;
2042 }
2043 else
2044 {
2045 CV_CALL( fn = cvGetFileNodeByName( fs, node_fn,
2046 ICV_HAAR_RIGHT_VAL_NAME ) );
2047 if( !fn )
2048 {
2049 sprintf( buf, "right node or right value must be specified. "
2050 "(stage %d, tree %d, node %d)", i, j, k );
2051 CV_ERROR( CV_StsError, buf );
2052 }
2053 if( !CV_NODE_IS_REAL( fn->tag ) )
2054 {
2055 sprintf( buf, "right value must be real number. "
2056 "(stage %d, tree %d, node %d)", i, j, k );
2057 CV_ERROR( CV_StsError, buf );
2058 }
2059 /* right value */
2060 if( last_idx >= classifier->count + 1 )
2061 {
2062 sprintf( buf, "Tree structure is broken: too many values. "
2063 "(stage %d, tree %d, node %d)", i, j, k );
2064 CV_ERROR( CV_StsError, buf );
2065 }
2066 classifier->right[k] = -last_idx;
2067 classifier->alpha[last_idx++] = (float) fn->data.f;
2068 }
2069
2070 CV_NEXT_SEQ_ELEM( sizeof( *node_fn ), tree_reader );
2071 } /* for each node */
2072 if( last_idx != classifier->count + 1 )
2073 {
2074 sprintf( buf, "Tree structure is broken: too few values. "
2075 "(stage %d, tree %d)", i, j );
2076 CV_ERROR( CV_StsError, buf );
2077 }
2078
2079 CV_NEXT_SEQ_ELEM( sizeof( *tree_fn ), trees_reader );
2080 } /* for each tree */
2081
2082 CV_CALL( fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_STAGE_THRESHOLD_NAME));
2083 if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
2084 {
2085 sprintf( buf, "stage threshold must be real number. (stage %d)", i );
2086 CV_ERROR( CV_StsError, buf );
2087 }
2088 cascade->stage_classifier[i].threshold = (float) fn->data.f;
2089
2090 parent = i - 1;
2091 next = -1;
2092
2093 CV_CALL( fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_PARENT_NAME ) );
2094 if( !fn || !CV_NODE_IS_INT( fn->tag )
2095 || fn->data.i < -1 || fn->data.i >= cascade->count )
2096 {
2097 sprintf( buf, "parent must be integer number. (stage %d)", i );
2098 CV_ERROR( CV_StsError, buf );
2099 }
2100 parent = fn->data.i;
2101 CV_CALL( fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_NEXT_NAME ) );
2102 if( !fn || !CV_NODE_IS_INT( fn->tag )
2103 || fn->data.i < -1 || fn->data.i >= cascade->count )
2104 {
2105 sprintf( buf, "next must be integer number. (stage %d)", i );
2106 CV_ERROR( CV_StsError, buf );
2107 }
2108 next = fn->data.i;
2109
2110 cascade->stage_classifier[i].parent = parent;
2111 cascade->stage_classifier[i].next = next;
2112 cascade->stage_classifier[i].child = -1;
2113
2114 if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
2115 {
2116 cascade->stage_classifier[parent].child = i;
2117 }
2118
2119 CV_NEXT_SEQ_ELEM( sizeof( *stage_fn ), stages_reader );
2120 } /* for each stage */
2121
2122 __END__;
2123
2124 if( cvGetErrStatus() < 0 )
2125 {
2126 cvReleaseHaarClassifierCascade( &cascade );
2127 cascade = NULL;
2128 }
2129
2130 return cascade;
2131 }
2132
2133 static void
icvWriteHaarClassifier(CvFileStorage * fs,const char * name,const void * struct_ptr,CvAttrList attributes)2134 icvWriteHaarClassifier( CvFileStorage* fs, const char* name, const void* struct_ptr,
2135 CvAttrList attributes )
2136 {
2137 CV_FUNCNAME( "cvWriteHaarClassifier" );
2138
2139 __BEGIN__;
2140
2141 int i, j, k, l;
2142 char buf[256];
2143 const CvHaarClassifierCascade* cascade = (const CvHaarClassifierCascade*) struct_ptr;
2144
2145 /* TODO: parameters check */
2146
2147 CV_CALL( cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_HAAR, attributes ) );
2148
2149 CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_SIZE_NAME, CV_NODE_SEQ | CV_NODE_FLOW ) );
2150 CV_CALL( cvWriteInt( fs, NULL, cascade->orig_window_size.width ) );
2151 CV_CALL( cvWriteInt( fs, NULL, cascade->orig_window_size.height ) );
2152 CV_CALL( cvEndWriteStruct( fs ) ); /* size */
2153
2154 CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_STAGES_NAME, CV_NODE_SEQ ) );
2155 for( i = 0; i < cascade->count; ++i )
2156 {
2157 CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_MAP ) );
2158 sprintf( buf, "stage %d", i );
2159 CV_CALL( cvWriteComment( fs, buf, 1 ) );
2160
2161 CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_TREES_NAME, CV_NODE_SEQ ) );
2162
2163 for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2164 {
2165 CvHaarClassifier* tree = &cascade->stage_classifier[i].classifier[j];
2166
2167 CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_SEQ ) );
2168 sprintf( buf, "tree %d", j );
2169 CV_CALL( cvWriteComment( fs, buf, 1 ) );
2170
2171 for( k = 0; k < tree->count; ++k )
2172 {
2173 CvHaarFeature* feature = &tree->haar_feature[k];
2174
2175 CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_MAP ) );
2176 if( k )
2177 {
2178 sprintf( buf, "node %d", k );
2179 }
2180 else
2181 {
2182 sprintf( buf, "root node" );
2183 }
2184 CV_CALL( cvWriteComment( fs, buf, 1 ) );
2185
2186 CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_FEATURE_NAME, CV_NODE_MAP ) );
2187
2188 CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_RECTS_NAME, CV_NODE_SEQ ) );
2189 for( l = 0; l < CV_HAAR_FEATURE_MAX && feature->rect[l].r.width != 0; ++l )
2190 {
2191 CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_SEQ | CV_NODE_FLOW ) );
2192 CV_CALL( cvWriteInt( fs, NULL, feature->rect[l].r.x ) );
2193 CV_CALL( cvWriteInt( fs, NULL, feature->rect[l].r.y ) );
2194 CV_CALL( cvWriteInt( fs, NULL, feature->rect[l].r.width ) );
2195 CV_CALL( cvWriteInt( fs, NULL, feature->rect[l].r.height ) );
2196 CV_CALL( cvWriteReal( fs, NULL, feature->rect[l].weight ) );
2197 CV_CALL( cvEndWriteStruct( fs ) ); /* rect */
2198 }
2199 CV_CALL( cvEndWriteStruct( fs ) ); /* rects */
2200 CV_CALL( cvWriteInt( fs, ICV_HAAR_TILTED_NAME, feature->tilted ) );
2201 CV_CALL( cvEndWriteStruct( fs ) ); /* feature */
2202
2203 CV_CALL( cvWriteReal( fs, ICV_HAAR_THRESHOLD_NAME, tree->threshold[k]) );
2204
2205 if( tree->left[k] > 0 )
2206 {
2207 CV_CALL( cvWriteInt( fs, ICV_HAAR_LEFT_NODE_NAME, tree->left[k] ) );
2208 }
2209 else
2210 {
2211 CV_CALL( cvWriteReal( fs, ICV_HAAR_LEFT_VAL_NAME,
2212 tree->alpha[-tree->left[k]] ) );
2213 }
2214
2215 if( tree->right[k] > 0 )
2216 {
2217 CV_CALL( cvWriteInt( fs, ICV_HAAR_RIGHT_NODE_NAME, tree->right[k] ) );
2218 }
2219 else
2220 {
2221 CV_CALL( cvWriteReal( fs, ICV_HAAR_RIGHT_VAL_NAME,
2222 tree->alpha[-tree->right[k]] ) );
2223 }
2224
2225 CV_CALL( cvEndWriteStruct( fs ) ); /* split */
2226 }
2227
2228 CV_CALL( cvEndWriteStruct( fs ) ); /* tree */
2229 }
2230
2231 CV_CALL( cvEndWriteStruct( fs ) ); /* trees */
2232
2233 CV_CALL( cvWriteReal( fs, ICV_HAAR_STAGE_THRESHOLD_NAME,
2234 cascade->stage_classifier[i].threshold) );
2235
2236 CV_CALL( cvWriteInt( fs, ICV_HAAR_PARENT_NAME,
2237 cascade->stage_classifier[i].parent ) );
2238 CV_CALL( cvWriteInt( fs, ICV_HAAR_NEXT_NAME,
2239 cascade->stage_classifier[i].next ) );
2240
2241 CV_CALL( cvEndWriteStruct( fs ) ); /* stage */
2242 } /* for each stage */
2243
2244 CV_CALL( cvEndWriteStruct( fs ) ); /* stages */
2245 CV_CALL( cvEndWriteStruct( fs ) ); /* root */
2246
2247 __END__;
2248 }
2249
2250 static void*
icvCloneHaarClassifier(const void * struct_ptr)2251 icvCloneHaarClassifier( const void* struct_ptr )
2252 {
2253 CvHaarClassifierCascade* cascade = NULL;
2254
2255 CV_FUNCNAME( "cvCloneHaarClassifier" );
2256
2257 __BEGIN__;
2258
2259 int i, j, k, n;
2260 const CvHaarClassifierCascade* cascade_src =
2261 (const CvHaarClassifierCascade*) struct_ptr;
2262
2263 n = cascade_src->count;
2264 CV_CALL( cascade = icvCreateHaarClassifierCascade(n) );
2265 cascade->orig_window_size = cascade_src->orig_window_size;
2266
2267 for( i = 0; i < n; ++i )
2268 {
2269 cascade->stage_classifier[i].parent = cascade_src->stage_classifier[i].parent;
2270 cascade->stage_classifier[i].next = cascade_src->stage_classifier[i].next;
2271 cascade->stage_classifier[i].child = cascade_src->stage_classifier[i].child;
2272 cascade->stage_classifier[i].threshold = cascade_src->stage_classifier[i].threshold;
2273
2274 cascade->stage_classifier[i].count = 0;
2275 CV_CALL( cascade->stage_classifier[i].classifier =
2276 (CvHaarClassifier*) cvAlloc( cascade_src->stage_classifier[i].count
2277 * sizeof( cascade->stage_classifier[i].classifier[0] ) ) );
2278
2279 cascade->stage_classifier[i].count = cascade_src->stage_classifier[i].count;
2280
2281 for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2282 {
2283 cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
2284 }
2285
2286 for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2287 {
2288 const CvHaarClassifier* classifier_src =
2289 &cascade_src->stage_classifier[i].classifier[j];
2290 CvHaarClassifier* classifier =
2291 &cascade->stage_classifier[i].classifier[j];
2292
2293 classifier->count = classifier_src->count;
2294 CV_CALL( classifier->haar_feature = (CvHaarFeature*) cvAlloc(
2295 classifier->count * ( sizeof( *classifier->haar_feature ) +
2296 sizeof( *classifier->threshold ) +
2297 sizeof( *classifier->left ) +
2298 sizeof( *classifier->right ) ) +
2299 (classifier->count + 1) * sizeof( *classifier->alpha ) ) );
2300 classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
2301 classifier->left = (int*) (classifier->threshold + classifier->count);
2302 classifier->right = (int*) (classifier->left + classifier->count);
2303 classifier->alpha = (float*) (classifier->right + classifier->count);
2304 for( k = 0; k < classifier->count; ++k )
2305 {
2306 classifier->haar_feature[k] = classifier_src->haar_feature[k];
2307 classifier->threshold[k] = classifier_src->threshold[k];
2308 classifier->left[k] = classifier_src->left[k];
2309 classifier->right[k] = classifier_src->right[k];
2310 classifier->alpha[k] = classifier_src->alpha[k];
2311 }
2312 classifier->alpha[classifier->count] =
2313 classifier_src->alpha[classifier->count];
2314 }
2315 }
2316
2317 __END__;
2318
2319 return cascade;
2320 }
2321
2322
2323 CvType haar_type( CV_TYPE_NAME_HAAR, icvIsHaarClassifier,
2324 (CvReleaseFunc)cvReleaseHaarClassifierCascade,
2325 icvReadHaarClassifier, icvWriteHaarClassifier,
2326 icvCloneHaarClassifier );
2327
2328 /* End of file. */
2329