fixed many warnings from GCC 4.6.1
[profile/ivi/opencv.git] / modules / objdetect / src / haar.cpp
1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 //  By downloading, copying, installing or using the software you agree to this license.
6 //  If you do not agree to this license, do not download, install,
7 //  copy or use the software.
8 //
9 //
10 //                        Intel License Agreement
11 //                For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000, Intel Corporation, all rights reserved.
14 // Third party copyrights are property of their respective owners.
15 //
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
18 //
19 //   * Redistribution's of source code must retain the above copyright notice,
20 //     this list of conditions and the following disclaimer.
21 //
22 //   * Redistribution's in binary form must reproduce the above copyright notice,
23 //     this list of conditions and the following disclaimer in the documentation
24 //     and/or other materials provided with the distribution.
25 //
26 //   * The name of Intel Corporation may not be used to endorse or promote products
27 //     derived from this software without specific prior written permission.
28 //
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
39 //
40 //M*/
41
42 /* Haar features calculation */
43
44 #include "precomp.hpp"
45 #include <stdio.h>
46
47 /*#if CV_SSE2
48 #   if CV_SSE4 || defined __SSE4__
49 #       include <smmintrin.h>
50 #   else
51 #       define _mm_blendv_pd(a, b, m) _mm_xor_pd(a, _mm_and_pd(_mm_xor_pd(b, a), m))
52 #       define _mm_blendv_ps(a, b, m) _mm_xor_ps(a, _mm_and_ps(_mm_xor_ps(b, a), m))
53 #   endif
54 #if defined CV_ICC
55 #   define CV_HAAR_USE_SSE 1
56 #endif
57 #endif*/
58
59 /* these settings affect the quality of detection: change with care */
60 #define CV_ADJUST_FEATURES 1
61 #define CV_ADJUST_WEIGHTS  0
62
63 typedef int sumtype;
64 typedef double sqsumtype;
65
66 typedef struct CvHidHaarFeature
67 {
68     struct
69     {
70         sumtype *p0, *p1, *p2, *p3;
71         float weight;
72     }
73     rect[CV_HAAR_FEATURE_MAX];
74 }
75 CvHidHaarFeature;
76
77
78 typedef struct CvHidHaarTreeNode
79 {
80     CvHidHaarFeature feature;
81     float threshold;
82     int left;
83     int right;
84 }
85 CvHidHaarTreeNode;
86
87
88 typedef struct CvHidHaarClassifier
89 {
90     int count;
91     //CvHaarFeature* orig_feature;
92     CvHidHaarTreeNode* node;
93     float* alpha;
94 }
95 CvHidHaarClassifier;
96
97
98 typedef struct CvHidHaarStageClassifier
99 {
100     int  count;
101     float threshold;
102     CvHidHaarClassifier* classifier;
103     int two_rects;
104
105     struct CvHidHaarStageClassifier* next;
106     struct CvHidHaarStageClassifier* child;
107     struct CvHidHaarStageClassifier* parent;
108 }
109 CvHidHaarStageClassifier;
110
111
112 struct CvHidHaarClassifierCascade
113 {
114     int  count;
115     int  isStumpBased;
116     int  has_tilted_features;
117     int  is_tree;
118     double inv_window_area;
119     CvMat sum, sqsum, tilted;
120     CvHidHaarStageClassifier* stage_classifier;
121     sqsumtype *pq0, *pq1, *pq2, *pq3;
122     sumtype *p0, *p1, *p2, *p3;
123
124     void** ipp_stages;
125 };
126
127
128 const int icv_object_win_border = 1;
129 const float icv_stage_threshold_bias = 0.0001f;
130
131 static CvHaarClassifierCascade*
132 icvCreateHaarClassifierCascade( int stage_count )
133 {
134     CvHaarClassifierCascade* cascade = 0;
135
136     int block_size = sizeof(*cascade) + stage_count*sizeof(*cascade->stage_classifier);
137
138     if( stage_count <= 0 )
139         CV_Error( CV_StsOutOfRange, "Number of stages should be positive" );
140
141     cascade = (CvHaarClassifierCascade*)cvAlloc( block_size );
142     memset( cascade, 0, block_size );
143
144     cascade->stage_classifier = (CvHaarStageClassifier*)(cascade + 1);
145     cascade->flags = CV_HAAR_MAGIC_VAL;
146     cascade->count = stage_count;
147
148     return cascade;
149 }
150
151 static void
152 icvReleaseHidHaarClassifierCascade( CvHidHaarClassifierCascade** _cascade )
153 {
154     if( _cascade && *_cascade )
155     {
156 #ifdef HAVE_IPP
157         CvHidHaarClassifierCascade* cascade = *_cascade;
158         if( cascade->ipp_stages )
159         {
160             int i;
161             for( i = 0; i < cascade->count; i++ )
162             {
163                 if( cascade->ipp_stages[i] )
164                     ippiHaarClassifierFree_32f( (IppiHaarClassifier_32f*)cascade->ipp_stages[i] );
165             }
166         }
167         cvFree( &cascade->ipp_stages );
168 #endif
169         cvFree( _cascade );
170     }
171 }
172
173 /* create more efficient internal representation of haar classifier cascade */
174 static CvHidHaarClassifierCascade*
175 icvCreateHidHaarClassifierCascade( CvHaarClassifierCascade* cascade )
176 {
177     CvRect* ipp_features = 0;
178     float *ipp_weights = 0, *ipp_thresholds = 0, *ipp_val1 = 0, *ipp_val2 = 0;
179     int* ipp_counts = 0;
180
181     CvHidHaarClassifierCascade* out = 0;
182
183     int i, j, k, l;
184     int datasize;
185     int total_classifiers = 0;
186     int total_nodes = 0;
187     char errorstr[1000];
188     CvHidHaarClassifier* haar_classifier_ptr;
189     CvHidHaarTreeNode* haar_node_ptr;
190     CvSize orig_window_size;
191     int has_tilted_features = 0;
192     int max_count = 0;
193
194     if( !CV_IS_HAAR_CLASSIFIER(cascade) )
195         CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
196
197     if( cascade->hid_cascade )
198         CV_Error( CV_StsError, "hid_cascade has been already created" );
199
200     if( !cascade->stage_classifier )
201         CV_Error( CV_StsNullPtr, "" );
202
203     if( cascade->count <= 0 )
204         CV_Error( CV_StsOutOfRange, "Negative number of cascade stages" );
205
206     orig_window_size = cascade->orig_window_size;
207
208     /* check input structure correctness and calculate total memory size needed for
209        internal representation of the classifier cascade */
210     for( i = 0; i < cascade->count; i++ )
211     {
212         CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
213
214         if( !stage_classifier->classifier ||
215             stage_classifier->count <= 0 )
216         {
217             sprintf( errorstr, "header of the stage classifier #%d is invalid "
218                      "(has null pointers or non-positive classfier count)", i );
219             CV_Error( CV_StsError, errorstr );
220         }
221
222         max_count = MAX( max_count, stage_classifier->count );
223         total_classifiers += stage_classifier->count;
224
225         for( j = 0; j < stage_classifier->count; j++ )
226         {
227             CvHaarClassifier* classifier = stage_classifier->classifier + j;
228
229             total_nodes += classifier->count;
230             for( l = 0; l < classifier->count; l++ )
231             {
232                 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
233                 {
234                     if( classifier->haar_feature[l].rect[k].r.width )
235                     {
236                         CvRect r = classifier->haar_feature[l].rect[k].r;
237                         int tilted = classifier->haar_feature[l].tilted;
238                         has_tilted_features |= tilted != 0;
239                         if( r.width < 0 || r.height < 0 || r.y < 0 ||
240                             r.x + r.width > orig_window_size.width
241                             ||
242                             (!tilted &&
243                             (r.x < 0 || r.y + r.height > orig_window_size.height))
244                             ||
245                             (tilted && (r.x - r.height < 0 ||
246                             r.y + r.width + r.height > orig_window_size.height)))
247                         {
248                             sprintf( errorstr, "rectangle #%d of the classifier #%d of "
249                                      "the stage classifier #%d is not inside "
250                                      "the reference (original) cascade window", k, j, i );
251                             CV_Error( CV_StsNullPtr, errorstr );
252                         }
253                     }
254                 }
255             }
256         }
257     }
258
259     // this is an upper boundary for the whole hidden cascade size
260     datasize = sizeof(CvHidHaarClassifierCascade) +
261                sizeof(CvHidHaarStageClassifier)*cascade->count +
262                sizeof(CvHidHaarClassifier) * total_classifiers +
263                sizeof(CvHidHaarTreeNode) * total_nodes +
264                sizeof(void*)*(total_nodes + total_classifiers);
265
266     out = (CvHidHaarClassifierCascade*)cvAlloc( datasize );
267     memset( out, 0, sizeof(*out) );
268
269     /* init header */
270     out->count = cascade->count;
271     out->stage_classifier = (CvHidHaarStageClassifier*)(out + 1);
272     haar_classifier_ptr = (CvHidHaarClassifier*)(out->stage_classifier + cascade->count);
273     haar_node_ptr = (CvHidHaarTreeNode*)(haar_classifier_ptr + total_classifiers);
274
275     out->isStumpBased = 1;
276     out->has_tilted_features = has_tilted_features;
277     out->is_tree = 0;
278
279     /* initialize internal representation */
280     for( i = 0; i < cascade->count; i++ )
281     {
282         CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
283         CvHidHaarStageClassifier* hid_stage_classifier = out->stage_classifier + i;
284
285         hid_stage_classifier->count = stage_classifier->count;
286         hid_stage_classifier->threshold = stage_classifier->threshold - icv_stage_threshold_bias;
287         hid_stage_classifier->classifier = haar_classifier_ptr;
288         hid_stage_classifier->two_rects = 1;
289         haar_classifier_ptr += stage_classifier->count;
290
291         hid_stage_classifier->parent = (stage_classifier->parent == -1)
292             ? NULL : out->stage_classifier + stage_classifier->parent;
293         hid_stage_classifier->next = (stage_classifier->next == -1)
294             ? NULL : out->stage_classifier + stage_classifier->next;
295         hid_stage_classifier->child = (stage_classifier->child == -1)
296             ? NULL : out->stage_classifier + stage_classifier->child;
297
298         out->is_tree |= hid_stage_classifier->next != NULL;
299
300         for( j = 0; j < stage_classifier->count; j++ )
301         {
302             CvHaarClassifier* classifier = stage_classifier->classifier + j;
303             CvHidHaarClassifier* hid_classifier = hid_stage_classifier->classifier + j;
304             int node_count = classifier->count;
305             float* alpha_ptr = (float*)(haar_node_ptr + node_count);
306
307             hid_classifier->count = node_count;
308             hid_classifier->node = haar_node_ptr;
309             hid_classifier->alpha = alpha_ptr;
310
311             for( l = 0; l < node_count; l++ )
312             {
313                 CvHidHaarTreeNode* node = hid_classifier->node + l;
314                 CvHaarFeature* feature = classifier->haar_feature + l;
315                 memset( node, -1, sizeof(*node) );
316                 node->threshold = classifier->threshold[l];
317                 node->left = classifier->left[l];
318                 node->right = classifier->right[l];
319
320                 if( fabs(feature->rect[2].weight) < DBL_EPSILON ||
321                     feature->rect[2].r.width == 0 ||
322                     feature->rect[2].r.height == 0 )
323                     memset( &(node->feature.rect[2]), 0, sizeof(node->feature.rect[2]) );
324                 else
325                     hid_stage_classifier->two_rects = 0;
326             }
327
328             memcpy( alpha_ptr, classifier->alpha, (node_count+1)*sizeof(alpha_ptr[0]));
329             haar_node_ptr =
330                 (CvHidHaarTreeNode*)cvAlignPtr(alpha_ptr+node_count+1, sizeof(void*));
331
332             out->isStumpBased &= node_count == 1;
333         }
334     }
335
336 #ifdef HAVE_IPP
337     int can_use_ipp = !out->has_tilted_features && !out->is_tree && out->isStumpBased;
338
339     if( can_use_ipp )
340     {
341         int ipp_datasize = cascade->count*sizeof(out->ipp_stages[0]);
342         float ipp_weight_scale=(float)(1./((orig_window_size.width-icv_object_win_border*2)*
343             (orig_window_size.height-icv_object_win_border*2)));
344
345         out->ipp_stages = (void**)cvAlloc( ipp_datasize );
346         memset( out->ipp_stages, 0, ipp_datasize );
347
348         ipp_features = (CvRect*)cvAlloc( max_count*3*sizeof(ipp_features[0]) );
349         ipp_weights = (float*)cvAlloc( max_count*3*sizeof(ipp_weights[0]) );
350         ipp_thresholds = (float*)cvAlloc( max_count*sizeof(ipp_thresholds[0]) );
351         ipp_val1 = (float*)cvAlloc( max_count*sizeof(ipp_val1[0]) );
352         ipp_val2 = (float*)cvAlloc( max_count*sizeof(ipp_val2[0]) );
353         ipp_counts = (int*)cvAlloc( max_count*sizeof(ipp_counts[0]) );
354
355         for( i = 0; i < cascade->count; i++ )
356         {
357             CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
358             for( j = 0, k = 0; j < stage_classifier->count; j++ )
359             {
360                 CvHaarClassifier* classifier = stage_classifier->classifier + j;
361                 int rect_count = 2 + (classifier->haar_feature->rect[2].r.width != 0);
362
363                 ipp_thresholds[j] = classifier->threshold[0];
364                 ipp_val1[j] = classifier->alpha[0];
365                 ipp_val2[j] = classifier->alpha[1];
366                 ipp_counts[j] = rect_count;
367
368                 for( l = 0; l < rect_count; l++, k++ )
369                 {
370                     ipp_features[k] = classifier->haar_feature->rect[l].r;
371                     //ipp_features[k].y = orig_window_size.height - ipp_features[k].y - ipp_features[k].height;
372                     ipp_weights[k] = classifier->haar_feature->rect[l].weight*ipp_weight_scale;
373                 }
374             }
375
376             if( ippiHaarClassifierInitAlloc_32f( (IppiHaarClassifier_32f**)&out->ipp_stages[i],
377                 (const IppiRect*)ipp_features, ipp_weights, ipp_thresholds,
378                 ipp_val1, ipp_val2, ipp_counts, stage_classifier->count ) < 0 )
379                 break;
380         }
381
382         if( i < cascade->count )
383         {
384             for( j = 0; j < i; j++ )
385                 if( out->ipp_stages[i] )
386                     ippiHaarClassifierFree_32f( (IppiHaarClassifier_32f*)out->ipp_stages[i] );
387             cvFree( &out->ipp_stages );
388         }
389     }
390 #endif
391
392     cascade->hid_cascade = out;
393     assert( (char*)haar_node_ptr - (char*)out <= datasize );
394
395     cvFree( &ipp_features );
396     cvFree( &ipp_weights );
397     cvFree( &ipp_thresholds );
398     cvFree( &ipp_val1 );
399     cvFree( &ipp_val2 );
400     cvFree( &ipp_counts );
401
402     return out;
403 }
404
405
406 #define sum_elem_ptr(sum,row,col)  \
407     ((sumtype*)CV_MAT_ELEM_PTR_FAST((sum),(row),(col),sizeof(sumtype)))
408
409 #define sqsum_elem_ptr(sqsum,row,col)  \
410     ((sqsumtype*)CV_MAT_ELEM_PTR_FAST((sqsum),(row),(col),sizeof(sqsumtype)))
411
412 #define calc_sum(rect,offset) \
413     ((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
414
415
416 CV_IMPL void
417 cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
418                                      const CvArr* _sum,
419                                      const CvArr* _sqsum,
420                                      const CvArr* _tilted_sum,
421                                      double scale )
422 {
423     CvMat sum_stub, *sum = (CvMat*)_sum;
424     CvMat sqsum_stub, *sqsum = (CvMat*)_sqsum;
425     CvMat tilted_stub, *tilted = (CvMat*)_tilted_sum;
426     CvHidHaarClassifierCascade* cascade;
427     int coi0 = 0, coi1 = 0;
428     int i;
429     CvRect equRect;
430     double weight_scale;
431
432     if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
433         CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
434
435     if( scale <= 0 )
436         CV_Error( CV_StsOutOfRange, "Scale must be positive" );
437
438     sum = cvGetMat( sum, &sum_stub, &coi0 );
439     sqsum = cvGetMat( sqsum, &sqsum_stub, &coi1 );
440
441     if( coi0 || coi1 )
442         CV_Error( CV_BadCOI, "COI is not supported" );
443
444     if( !CV_ARE_SIZES_EQ( sum, sqsum ))
445         CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
446
447     if( CV_MAT_TYPE(sqsum->type) != CV_64FC1 ||
448         CV_MAT_TYPE(sum->type) != CV_32SC1 )
449         CV_Error( CV_StsUnsupportedFormat,
450         "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
451
452     if( !_cascade->hid_cascade )
453         icvCreateHidHaarClassifierCascade(_cascade);
454
455     cascade = _cascade->hid_cascade;
456
457     if( cascade->has_tilted_features )
458     {
459         tilted = cvGetMat( tilted, &tilted_stub, &coi1 );
460
461         if( CV_MAT_TYPE(tilted->type) != CV_32SC1 )
462             CV_Error( CV_StsUnsupportedFormat,
463             "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
464
465         if( sum->step != tilted->step )
466             CV_Error( CV_StsUnmatchedSizes,
467             "Sum and tilted_sum must have the same stride (step, widthStep)" );
468
469         if( !CV_ARE_SIZES_EQ( sum, tilted ))
470             CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
471         cascade->tilted = *tilted;
472     }
473
474     _cascade->scale = scale;
475     _cascade->real_window_size.width = cvRound( _cascade->orig_window_size.width * scale );
476     _cascade->real_window_size.height = cvRound( _cascade->orig_window_size.height * scale );
477
478     cascade->sum = *sum;
479     cascade->sqsum = *sqsum;
480
481     equRect.x = equRect.y = cvRound(scale);
482     equRect.width = cvRound((_cascade->orig_window_size.width-2)*scale);
483     equRect.height = cvRound((_cascade->orig_window_size.height-2)*scale);
484     weight_scale = 1./(equRect.width*equRect.height);
485     cascade->inv_window_area = weight_scale;
486
487     cascade->p0 = sum_elem_ptr(*sum, equRect.y, equRect.x);
488     cascade->p1 = sum_elem_ptr(*sum, equRect.y, equRect.x + equRect.width );
489     cascade->p2 = sum_elem_ptr(*sum, equRect.y + equRect.height, equRect.x );
490     cascade->p3 = sum_elem_ptr(*sum, equRect.y + equRect.height,
491                                      equRect.x + equRect.width );
492
493     cascade->pq0 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x);
494     cascade->pq1 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x + equRect.width );
495     cascade->pq2 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height, equRect.x );
496     cascade->pq3 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height,
497                                           equRect.x + equRect.width );
498
499     /* init pointers in haar features according to real window size and
500        given image pointers */
501     for( i = 0; i < _cascade->count; i++ )
502     {
503         int j, k, l;
504         for( j = 0; j < cascade->stage_classifier[i].count; j++ )
505         {
506             for( l = 0; l < cascade->stage_classifier[i].classifier[j].count; l++ )
507             {
508                 CvHaarFeature* feature =
509                     &_cascade->stage_classifier[i].classifier[j].haar_feature[l];
510                 /* CvHidHaarClassifier* classifier =
511                     cascade->stage_classifier[i].classifier + j; */
512                 CvHidHaarFeature* hidfeature =
513                     &cascade->stage_classifier[i].classifier[j].node[l].feature;
514                 double sum0 = 0, area0 = 0;
515                 CvRect r[3];
516
517                 int base_w = -1, base_h = -1;
518                 int new_base_w = 0, new_base_h = 0;
519                 int kx, ky;
520                 int flagx = 0, flagy = 0;
521                 int x0 = 0, y0 = 0;
522                 int nr;
523
524                 /* align blocks */
525                 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
526                 {
527                     if( !hidfeature->rect[k].p0 )
528                         break;
529                     r[k] = feature->rect[k].r;
530                     base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].width-1) );
531                     base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].x - r[0].x-1) );
532                     base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].height-1) );
533                     base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].y - r[0].y-1) );
534                 }
535
536                 nr = k;
537
538                 base_w += 1;
539                 base_h += 1;
540                 kx = r[0].width / base_w;
541                 ky = r[0].height / base_h;
542
543                 if( kx <= 0 )
544                 {
545                     flagx = 1;
546                     new_base_w = cvRound( r[0].width * scale ) / kx;
547                     x0 = cvRound( r[0].x * scale );
548                 }
549
550                 if( ky <= 0 )
551                 {
552                     flagy = 1;
553                     new_base_h = cvRound( r[0].height * scale ) / ky;
554                     y0 = cvRound( r[0].y * scale );
555                 }
556
557                 for( k = 0; k < nr; k++ )
558                 {
559                     CvRect tr;
560                     double correction_ratio;
561
562                     if( flagx )
563                     {
564                         tr.x = (r[k].x - r[0].x) * new_base_w / base_w + x0;
565                         tr.width = r[k].width * new_base_w / base_w;
566                     }
567                     else
568                     {
569                         tr.x = cvRound( r[k].x * scale );
570                         tr.width = cvRound( r[k].width * scale );
571                     }
572
573                     if( flagy )
574                     {
575                         tr.y = (r[k].y - r[0].y) * new_base_h / base_h + y0;
576                         tr.height = r[k].height * new_base_h / base_h;
577                     }
578                     else
579                     {
580                         tr.y = cvRound( r[k].y * scale );
581                         tr.height = cvRound( r[k].height * scale );
582                     }
583
584 #if CV_ADJUST_WEIGHTS
585                     {
586                     // RAINER START
587                     const float orig_feature_size =  (float)(feature->rect[k].r.width)*feature->rect[k].r.height;
588                     const float orig_norm_size = (float)(_cascade->orig_window_size.width)*(_cascade->orig_window_size.height);
589                     const float feature_size = float(tr.width*tr.height);
590                     //const float normSize    = float(equRect.width*equRect.height);
591                     float target_ratio = orig_feature_size / orig_norm_size;
592                     //float isRatio = featureSize / normSize;
593                     //correctionRatio = targetRatio / isRatio / normSize;
594                     correction_ratio = target_ratio / feature_size;
595                     // RAINER END
596                     }
597 #else
598                     correction_ratio = weight_scale * (!feature->tilted ? 1 : 0.5);
599 #endif
600
601                     if( !feature->tilted )
602                     {
603                         hidfeature->rect[k].p0 = sum_elem_ptr(*sum, tr.y, tr.x);
604                         hidfeature->rect[k].p1 = sum_elem_ptr(*sum, tr.y, tr.x + tr.width);
605                         hidfeature->rect[k].p2 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x);
606                         hidfeature->rect[k].p3 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x + tr.width);
607                     }
608                     else
609                     {
610                         hidfeature->rect[k].p2 = sum_elem_ptr(*tilted, tr.y + tr.width, tr.x + tr.width);
611                         hidfeature->rect[k].p3 = sum_elem_ptr(*tilted, tr.y + tr.width + tr.height,
612                                                               tr.x + tr.width - tr.height);
613                         hidfeature->rect[k].p0 = sum_elem_ptr(*tilted, tr.y, tr.x);
614                         hidfeature->rect[k].p1 = sum_elem_ptr(*tilted, tr.y + tr.height, tr.x - tr.height);
615                     }
616
617                     hidfeature->rect[k].weight = (float)(feature->rect[k].weight * correction_ratio);
618
619                     if( k == 0 )
620                         area0 = tr.width * tr.height;
621                     else
622                         sum0 += hidfeature->rect[k].weight * tr.width * tr.height;
623                 }
624
625                 hidfeature->rect[0].weight = (float)(-sum0/area0);
626             } /* l */
627         } /* j */
628     }
629 }
630
631
632 CV_INLINE
633 double icvEvalHidHaarClassifier( CvHidHaarClassifier* classifier,
634                                  double variance_norm_factor,
635                                  size_t p_offset )
636 {
637     int idx = 0;
638     do
639     {
640         CvHidHaarTreeNode* node = classifier->node + idx;
641         double t = node->threshold * variance_norm_factor;
642
643         double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
644         sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
645
646         if( node->feature.rect[2].p0 )
647             sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
648
649         idx = sum < t ? node->left : node->right;
650     }
651     while( idx > 0 );
652     return classifier->alpha[-idx];
653 }
654
655
656 CV_IMPL int
657 cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
658                                CvPoint pt, double& stage_sum, int start_stage )
659 {
660     int p_offset, pq_offset;
661     int i, j;
662     double mean, variance_norm_factor;
663     CvHidHaarClassifierCascade* cascade;
664
665     if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
666         CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid cascade pointer" );
667
668     cascade = _cascade->hid_cascade;
669     if( !cascade )
670         CV_Error( CV_StsNullPtr, "Hidden cascade has not been created.\n"
671             "Use cvSetImagesForHaarClassifierCascade" );
672
673     if( pt.x < 0 || pt.y < 0 ||
674         pt.x + _cascade->real_window_size.width >= cascade->sum.width ||
675         pt.y + _cascade->real_window_size.height >= cascade->sum.height )
676         return -1;
677
678     p_offset = pt.y * (cascade->sum.step/sizeof(sumtype)) + pt.x;
679     pq_offset = pt.y * (cascade->sqsum.step/sizeof(sqsumtype)) + pt.x;
680     mean = calc_sum(*cascade,p_offset)*cascade->inv_window_area;
681     variance_norm_factor = cascade->pq0[pq_offset] - cascade->pq1[pq_offset] -
682                            cascade->pq2[pq_offset] + cascade->pq3[pq_offset];
683     variance_norm_factor = variance_norm_factor*cascade->inv_window_area - mean*mean;
684     if( variance_norm_factor >= 0. )
685         variance_norm_factor = sqrt(variance_norm_factor);
686     else
687         variance_norm_factor = 1.;
688
689     if( cascade->is_tree )
690     {
691         CvHidHaarStageClassifier* ptr = cascade->stage_classifier;
692         assert( start_stage == 0 );
693
694         while( ptr )
695         {
696             stage_sum = 0.0;
697
698             for( j = 0; j < ptr->count; j++ )
699             {
700                 stage_sum += icvEvalHidHaarClassifier( ptr->classifier + j,
701                     variance_norm_factor, p_offset );
702             }
703
704             if( stage_sum >= ptr->threshold )
705             {
706                 ptr = ptr->child;
707             }
708             else
709             {
710                 while( ptr && ptr->next == NULL ) ptr = ptr->parent;
711                 if( ptr == NULL )
712                     return 0;
713                 ptr = ptr->next;
714             }
715         }
716     }
717     else if( cascade->isStumpBased )
718     {
719         for( i = start_stage; i < cascade->count; i++ )
720         {
721 #ifndef CV_HAAR_USE_SSE
722             stage_sum = 0.0;
723 #else
724             __m128d stage_sum = _mm_setzero_pd();
725 #endif
726
727             if( cascade->stage_classifier[i].two_rects )
728             {
729                 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
730                 {
731                     CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
732                     CvHidHaarTreeNode* node = classifier->node;
733 #ifndef CV_HAAR_USE_SSE
734                     double t = node->threshold*variance_norm_factor;
735                     double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
736                     sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
737                     stage_sum += classifier->alpha[sum >= t];
738 #else
739                     // ayasin - NHM perf optim. Avoid use of costly flaky jcc
740                     __m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
741                     __m128d a = _mm_set_sd(classifier->alpha[0]);
742                     __m128d b = _mm_set_sd(classifier->alpha[1]);
743                     __m128d sum = _mm_set_sd(calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight +
744                                              calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight);
745                     t = _mm_cmpgt_sd(t, sum);
746                     stage_sum = _mm_add_sd(stage_sum, _mm_blendv_pd(b, a, t));
747 #endif
748                 }
749             }
750             else
751             {
752                 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
753                 {
754                     CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
755                     CvHidHaarTreeNode* node = classifier->node;
756 #ifndef CV_HAAR_USE_SSE
757                     double t = node->threshold*variance_norm_factor;
758                     double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
759                     sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
760                     if( node->feature.rect[2].p0 )
761                         sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
762                     
763                     stage_sum += classifier->alpha[sum >= t];
764 #else
765                     // ayasin - NHM perf optim. Avoid use of costly flaky jcc
766                     __m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
767                     __m128d a = _mm_set_sd(classifier->alpha[0]);
768                     __m128d b = _mm_set_sd(classifier->alpha[1]);
769                     double _sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
770                     _sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
771                     if( node->feature.rect[2].p0 )
772                         _sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
773                     __m128d sum = _mm_set_sd(_sum);
774                     
775                     t = _mm_cmpgt_sd(t, sum);
776                     stage_sum = _mm_add_sd(stage_sum, _mm_blendv_pd(b, a, t));
777 #endif
778                 }
779             }
780
781 #ifndef CV_HAAR_USE_SSE
782             if( stage_sum < cascade->stage_classifier[i].threshold )
783 #else
784             __m128d i_threshold = _mm_set_sd(cascade->stage_classifier[i].threshold);
785             if( _mm_comilt_sd(stage_sum, i_threshold) )
786 #endif
787                 return -i;
788         }
789     }
790     else
791     {
792         for( i = start_stage; i < cascade->count; i++ )
793         {
794             stage_sum = 0.0;
795
796             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
797             {
798                 stage_sum += icvEvalHidHaarClassifier(
799                     cascade->stage_classifier[i].classifier + j,
800                     variance_norm_factor, p_offset );
801             }
802
803             if( stage_sum < cascade->stage_classifier[i].threshold )
804                 return -i;
805         }
806     }
807     return 1;
808 }
809
810 CV_IMPL int
811 cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
812                             CvPoint pt, int start_stage )
813 {
814     double stage_sum;
815     return cvRunHaarClassifierCascadeSum(_cascade, pt, stage_sum, start_stage);
816 }
817
818 namespace cv
819 {
820
821 struct HaarDetectObjects_ScaleImage_Invoker
822 {
823     HaarDetectObjects_ScaleImage_Invoker( const CvHaarClassifierCascade* _cascade,
824                                           int _stripSize, double _factor,
825                                           const Mat& _sum1, const Mat& _sqsum1, Mat* _norm1,
826                                           Mat* _mask1, Rect _equRect, ConcurrentRectVector& _vec, 
827                                           std::vector<int>& _levels, std::vector<double>& _weights,
828                                           bool _outputLevels  )
829     {
830         cascade = _cascade;
831         stripSize = _stripSize;
832         factor = _factor;
833         sum1 = _sum1;
834         sqsum1 = _sqsum1;
835         norm1 = _norm1;
836         mask1 = _mask1;
837         equRect = _equRect;
838         vec = &_vec;
839         rejectLevels = _outputLevels ? &_levels : 0;
840         levelWeights = _outputLevels ? &_weights : 0;
841     }
842     
843     void operator()( const BlockedRange& range ) const
844     {
845         Size winSize0 = cascade->orig_window_size;
846         Size winSize(cvRound(winSize0.width*factor), cvRound(winSize0.height*factor));
847         int y1 = range.begin()*stripSize, y2 = min(range.end()*stripSize, sum1.rows - 1 - winSize0.height);
848         
849         if (y2 <= y1 || sum1.cols <= 1 + winSize0.width)
850             return;
851         
852         Size ssz(sum1.cols - 1 - winSize0.width, y2 - y1);
853         int x, y, ystep = factor > 2 ? 1 : 2;
854         
855     #ifdef HAVE_IPP
856         if( cascade->hid_cascade->ipp_stages )
857         {
858             IppiRect iequRect = {equRect.x, equRect.y, equRect.width, equRect.height};
859             ippiRectStdDev_32f_C1R(sum1.ptr<float>(y1), sum1.step,
860                                    sqsum1.ptr<double>(y1), sqsum1.step,
861                                    norm1->ptr<float>(y1), norm1->step,
862                                    ippiSize(ssz.width, ssz.height), iequRect );
863             
864             int positive = (ssz.width/ystep)*((ssz.height + ystep-1)/ystep);
865
866             if( ystep == 1 )
867                 (*mask1) = Scalar::all(1);
868             else
869                 for( y = y1; y < y2; y++ )
870                 {
871                     uchar* mask1row = mask1->ptr(y);
872                     memset( mask1row, 0, ssz.width );
873                     
874                     if( y % ystep == 0 )
875                         for( x = 0; x < ssz.width; x += ystep )
876                             mask1row[x] = (uchar)1;
877                 }
878             
879             for( int j = 0; j < cascade->count; j++ )
880             {
881                 if( ippiApplyHaarClassifier_32f_C1R(
882                             sum1.ptr<float>(y1), sum1.step,
883                             norm1->ptr<float>(y1), norm1->step,
884                             mask1->ptr<uchar>(y1), mask1->step,
885                             ippiSize(ssz.width, ssz.height), &positive,
886                             cascade->hid_cascade->stage_classifier[j].threshold,
887                             (IppiHaarClassifier_32f*)cascade->hid_cascade->ipp_stages[j]) < 0 )
888                     positive = 0;
889                 if( positive <= 0 )
890                     break;
891             }
892             
893             if( positive > 0 )
894                 for( y = y1; y < y2; y += ystep )
895                 {
896                     uchar* mask1row = mask1->ptr(y);
897                     for( x = 0; x < ssz.width; x += ystep )
898                         if( mask1row[x] != 0 )
899                         {
900                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
901                                                 winSize.width, winSize.height));
902                             if( --positive == 0 )
903                                 break;
904                         }
905                     if( positive == 0 )
906                         break;
907                 }
908         }
909         else
910 #endif
911             for( y = y1; y < y2; y += ystep )
912                 for( x = 0; x < ssz.width; x += ystep )
913                 {
914                     double gypWeight;
915                     int result = cvRunHaarClassifierCascadeSum( cascade, cvPoint(x,y), gypWeight, 0 );
916                     if( rejectLevels )
917                     {
918                         if( result == 1 )
919                             result = -1*cascade->count;
920                         if( cascade->count + result < 4 )
921                         {
922                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
923                                            winSize.width, winSize.height));
924                             rejectLevels->push_back(-result);
925                             levelWeights->push_back(gypWeight);
926                         }
927                     }
928                     else
929                     {
930                         if( result > 0 )
931                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
932                                            winSize.width, winSize.height)); 
933                     }
934                 }
935     }
936     
937     const CvHaarClassifierCascade* cascade;
938     int stripSize;
939     double factor;
940     Mat sum1, sqsum1, *norm1, *mask1;
941     Rect equRect;
942     ConcurrentRectVector* vec;
943     std::vector<int>* rejectLevels;
944     std::vector<double>* levelWeights;
945 };
946     
947
948 struct HaarDetectObjects_ScaleCascade_Invoker
949 {
950     HaarDetectObjects_ScaleCascade_Invoker( const CvHaarClassifierCascade* _cascade,
951                                             Size _winsize, const Range& _xrange, double _ystep,
952                                             size_t _sumstep, const int** _p, const int** _pq,
953                                             ConcurrentRectVector& _vec )
954     {
955         cascade = _cascade;
956         winsize = _winsize;
957         xrange = _xrange;
958         ystep = _ystep;
959         sumstep = _sumstep;
960         p = _p; pq = _pq;
961         vec = &_vec;
962     }
963     
964     void operator()( const BlockedRange& range ) const
965     {
966         int iy, startY = range.begin(), endY = range.end();
967         const int *p0 = p[0], *p1 = p[1], *p2 = p[2], *p3 = p[3];
968         const int *pq0 = pq[0], *pq1 = pq[1], *pq2 = pq[2], *pq3 = pq[3];
969         bool doCannyPruning = p0 != 0;
970         int sstep = (int)(sumstep/sizeof(p0[0]));
971         
972         for( iy = startY; iy < endY; iy++ )
973         {
974             int ix, y = cvRound(iy*ystep), ixstep = 1;
975             for( ix = xrange.start; ix < xrange.end; ix += ixstep )
976             {
977                 int x = cvRound(ix*ystep); // it should really be ystep, not ixstep
978                 
979                 if( doCannyPruning )
980                 {
981                     int offset = y*sstep + x;
982                     int s = p0[offset] - p1[offset] - p2[offset] + p3[offset];
983                     int sq = pq0[offset] - pq1[offset] - pq2[offset] + pq3[offset];
984                     if( s < 100 || sq < 20 )
985                     {
986                         ixstep = 2;
987                         continue;
988                     }
989                 }
990                 
991                 int result = cvRunHaarClassifierCascade( cascade, cvPoint(x, y), 0 );
992                 if( result > 0 )
993                     vec->push_back(Rect(x, y, winsize.width, winsize.height));
994                 ixstep = result != 0 ? 1 : 2;
995             }
996         }
997     }
998     
999     const CvHaarClassifierCascade* cascade;
1000     double ystep;
1001     size_t sumstep;
1002     Size winsize;
1003     Range xrange;
1004     const int** p;
1005     const int** pq;
1006     ConcurrentRectVector* vec;
1007 };
1008     
1009     
1010 }
1011     
1012
1013 CvSeq*
1014 cvHaarDetectObjectsForROC( const CvArr* _img, 
1015                      CvHaarClassifierCascade* cascade, CvMemStorage* storage,
1016                      std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
1017                      double scaleFactor, int minNeighbors, int flags, 
1018                      CvSize minSize, CvSize maxSize, bool outputRejectLevels )
1019 {
1020     const double GROUP_EPS = 0.2;
1021     CvMat stub, *img = (CvMat*)_img;
1022     cv::Ptr<CvMat> temp, sum, tilted, sqsum, normImg, sumcanny, imgSmall;
1023     CvSeq* result_seq = 0;
1024     cv::Ptr<CvMemStorage> temp_storage;
1025
1026     cv::ConcurrentRectVector allCandidates;
1027     std::vector<cv::Rect> rectList;
1028     std::vector<int> rweights;
1029     double factor;
1030     int coi;
1031     bool doCannyPruning = (flags & CV_HAAR_DO_CANNY_PRUNING) != 0;
1032     bool findBiggestObject = (flags & CV_HAAR_FIND_BIGGEST_OBJECT) != 0;
1033     bool roughSearch = (flags & CV_HAAR_DO_ROUGH_SEARCH) != 0;
1034
1035     if( !CV_IS_HAAR_CLASSIFIER(cascade) )
1036         CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier cascade" );
1037
1038     if( !storage )
1039         CV_Error( CV_StsNullPtr, "Null storage pointer" );
1040
1041     img = cvGetMat( img, &stub, &coi );
1042     if( coi )
1043         CV_Error( CV_BadCOI, "COI is not supported" );
1044
1045     if( CV_MAT_DEPTH(img->type) != CV_8U )
1046         CV_Error( CV_StsUnsupportedFormat, "Only 8-bit images are supported" );
1047     
1048     if( scaleFactor <= 1 )
1049         CV_Error( CV_StsOutOfRange, "scale factor must be > 1" );
1050
1051     if( findBiggestObject )
1052         flags &= ~CV_HAAR_SCALE_IMAGE;
1053     
1054     if( maxSize.height == 0 || maxSize.width == 0 )
1055     {
1056         maxSize.height = img->rows;
1057         maxSize.width = img->cols;
1058     }
1059
1060     temp = cvCreateMat( img->rows, img->cols, CV_8UC1 );
1061     sum = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1062     sqsum = cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 );
1063
1064     if( !cascade->hid_cascade )
1065         icvCreateHidHaarClassifierCascade(cascade);
1066
1067     if( cascade->hid_cascade->has_tilted_features )
1068         tilted = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1069
1070     result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
1071
1072     if( CV_MAT_CN(img->type) > 1 )
1073     {
1074         cvCvtColor( img, temp, CV_BGR2GRAY );
1075         img = temp;
1076     }
1077
1078     if( findBiggestObject )
1079         flags &= ~(CV_HAAR_SCALE_IMAGE|CV_HAAR_DO_CANNY_PRUNING);
1080
1081     if( flags & CV_HAAR_SCALE_IMAGE )
1082     {
1083         CvSize winSize0 = cascade->orig_window_size;
1084 #ifdef HAVE_IPP
1085         int use_ipp = cascade->hid_cascade->ipp_stages != 0;
1086
1087         if( use_ipp )
1088             normImg = cvCreateMat( img->rows, img->cols, CV_32FC1 );
1089 #endif
1090         imgSmall = cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 );
1091
1092         for( factor = 1; ; factor *= scaleFactor )
1093         {
1094             CvSize winSize = { cvRound(winSize0.width*factor),
1095                                 cvRound(winSize0.height*factor) };
1096             CvSize sz = { cvRound( img->cols/factor ), cvRound( img->rows/factor ) };
1097             CvSize sz1 = { sz.width - winSize0.width + 1, sz.height - winSize0.height + 1 };
1098
1099             CvRect equRect = { icv_object_win_border, icv_object_win_border,
1100                 winSize0.width - icv_object_win_border*2,
1101                 winSize0.height - icv_object_win_border*2 };
1102
1103             CvMat img1, sum1, sqsum1, norm1, tilted1, mask1;
1104             CvMat* _tilted = 0;
1105
1106             if( sz1.width <= 0 || sz1.height <= 0 )
1107                 break;
1108             if( winSize.width > maxSize.width || winSize.height > maxSize.height )
1109                 break;
1110             if( winSize.width < minSize.width || winSize.height < minSize.height )
1111                 continue;
1112
1113             img1 = cvMat( sz.height, sz.width, CV_8UC1, imgSmall->data.ptr );
1114             sum1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, sum->data.ptr );
1115             sqsum1 = cvMat( sz.height+1, sz.width+1, CV_64FC1, sqsum->data.ptr );
1116             if( tilted )
1117             {
1118                 tilted1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, tilted->data.ptr );
1119                 _tilted = &tilted1;
1120             }
1121             norm1 = cvMat( sz1.height, sz1.width, CV_32FC1, normImg ? normImg->data.ptr : 0 );
1122             mask1 = cvMat( sz1.height, sz1.width, CV_8UC1, temp->data.ptr );
1123
1124             cvResize( img, &img1, CV_INTER_LINEAR );
1125             cvIntegral( &img1, &sum1, &sqsum1, _tilted );
1126
1127             int ystep = factor > 2 ? 1 : 2;
1128         #ifdef HAVE_TBB
1129             const int LOCS_PER_THREAD = 1000;
1130             int stripCount = ((sz1.width/ystep)*(sz1.height + ystep-1)/ystep + LOCS_PER_THREAD/2)/LOCS_PER_THREAD;
1131             stripCount = std::min(std::max(stripCount, 1), 100);
1132         #else
1133             const int stripCount = 1;
1134         #endif
1135             
1136 #ifdef HAVE_IPP
1137             if( use_ipp )
1138             {
1139                 cv::Mat fsum(sum1.rows, sum1.cols, CV_32F, sum1.data.ptr, sum1.step);
1140                 cv::Mat(&sum1).convertTo(fsum, CV_32F, 1, -(1<<24));
1141             }
1142             else
1143 #endif
1144                 cvSetImagesForHaarClassifierCascade( cascade, &sum1, &sqsum1, _tilted, 1. );            
1145             
1146             cv::Mat _norm1(&norm1), _mask1(&mask1);
1147             cv::parallel_for(cv::BlockedRange(0, stripCount),
1148                          cv::HaarDetectObjects_ScaleImage_Invoker(cascade,
1149                                 (((sz1.height + stripCount - 1)/stripCount + ystep-1)/ystep)*ystep,
1150                                 factor, cv::Mat(&sum1), cv::Mat(&sqsum1), &_norm1, &_mask1,
1151                                 cv::Rect(equRect), allCandidates, rejectLevels, levelWeights, outputRejectLevels));
1152         }
1153     }
1154     else
1155     {
1156         int n_factors = 0;
1157         cv::Rect scanROI;
1158
1159         cvIntegral( img, sum, sqsum, tilted );
1160
1161         if( doCannyPruning )
1162         {
1163             sumcanny = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1164             cvCanny( img, temp, 0, 50, 3 );
1165             cvIntegral( temp, sumcanny );
1166         }
1167
1168         for( n_factors = 0, factor = 1;
1169              factor*cascade->orig_window_size.width < img->cols - 10 &&
1170              factor*cascade->orig_window_size.height < img->rows - 10;
1171              n_factors++, factor *= scaleFactor )
1172             ;
1173
1174         if( findBiggestObject )
1175         {
1176             scaleFactor = 1./scaleFactor;
1177             factor *= scaleFactor;
1178         }
1179         else
1180             factor = 1;
1181
1182         for( ; n_factors-- > 0; factor *= scaleFactor )
1183         {
1184             const double ystep = std::max( 2., factor );
1185             CvSize winSize = { cvRound( cascade->orig_window_size.width * factor ),
1186                                 cvRound( cascade->orig_window_size.height * factor )};
1187             CvRect equRect = { 0, 0, 0, 0 };
1188             int *p[4] = {0,0,0,0};
1189             int *pq[4] = {0,0,0,0};
1190             int startX = 0, startY = 0;
1191             int endX = cvRound((img->cols - winSize.width) / ystep);
1192             int endY = cvRound((img->rows - winSize.height) / ystep);
1193
1194             if( winSize.width < minSize.width || winSize.height < minSize.height )
1195             {
1196                 if( findBiggestObject )
1197                     break;
1198                 continue;
1199             }
1200
1201             cvSetImagesForHaarClassifierCascade( cascade, sum, sqsum, tilted, factor );
1202             cvZero( temp );
1203
1204             if( doCannyPruning )
1205             {
1206                 equRect.x = cvRound(winSize.width*0.15);
1207                 equRect.y = cvRound(winSize.height*0.15);
1208                 equRect.width = cvRound(winSize.width*0.7);
1209                 equRect.height = cvRound(winSize.height*0.7);
1210
1211                 p[0] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step) + equRect.x;
1212                 p[1] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step)
1213                             + equRect.x + equRect.width;
1214                 p[2] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step) + equRect.x;
1215                 p[3] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step)
1216                             + equRect.x + equRect.width;
1217
1218                 pq[0] = (int*)(sum->data.ptr + equRect.y*sum->step) + equRect.x;
1219                 pq[1] = (int*)(sum->data.ptr + equRect.y*sum->step)
1220                             + equRect.x + equRect.width;
1221                 pq[2] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step) + equRect.x;
1222                 pq[3] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step)
1223                             + equRect.x + equRect.width;
1224             }
1225
1226             if( scanROI.area() > 0 )
1227             {
1228                 //adjust start_height and stop_height
1229                 startY = cvRound(scanROI.y / ystep);
1230                 endY = cvRound((scanROI.y + scanROI.height - winSize.height) / ystep);
1231
1232                 startX = cvRound(scanROI.x / ystep);
1233                 endX = cvRound((scanROI.x + scanROI.width - winSize.width) / ystep);
1234             }
1235
1236             cv::parallel_for(cv::BlockedRange(startY, endY),
1237                 cv::HaarDetectObjects_ScaleCascade_Invoker(cascade, winSize, cv::Range(startX, endX),
1238                                                            ystep, sum->step, (const int**)p,
1239                                                            (const int**)pq, allCandidates ));
1240
1241             if( findBiggestObject && !allCandidates.empty() && scanROI.area() == 0 )
1242             {
1243                 rectList.resize(allCandidates.size());
1244                 std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1245                 
1246                 groupRectangles(rectList, std::max(minNeighbors, 1), GROUP_EPS);
1247                 
1248                 if( !rectList.empty() )
1249                 {
1250                     size_t i, sz = rectList.size();
1251                     cv::Rect maxRect;
1252                     
1253                     for( i = 0; i < sz; i++ )
1254                     {
1255                         if( rectList[i].area() > maxRect.area() )
1256                             maxRect = rectList[i];
1257                     }
1258                     
1259                     allCandidates.push_back(maxRect);
1260                     
1261                     scanROI = maxRect;
1262                     int dx = cvRound(maxRect.width*GROUP_EPS);
1263                     int dy = cvRound(maxRect.height*GROUP_EPS);
1264                     scanROI.x = std::max(scanROI.x - dx, 0);
1265                     scanROI.y = std::max(scanROI.y - dy, 0);
1266                     scanROI.width = std::min(scanROI.width + dx*2, img->cols-1-scanROI.x);
1267                     scanROI.height = std::min(scanROI.height + dy*2, img->rows-1-scanROI.y);
1268                 
1269                     double minScale = roughSearch ? 0.6 : 0.4;
1270                     minSize.width = cvRound(maxRect.width*minScale);
1271                     minSize.height = cvRound(maxRect.height*minScale);
1272                 }
1273             }
1274         }
1275     }
1276
1277     rectList.resize(allCandidates.size());
1278     if(!allCandidates.empty())
1279         std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1280     
1281     if( minNeighbors != 0 || findBiggestObject )
1282     {
1283         if( outputRejectLevels )
1284         {
1285             groupRectangles(rectList, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
1286         }
1287         else
1288         {
1289             groupRectangles(rectList, rweights, std::max(minNeighbors, 1), GROUP_EPS);
1290         }
1291     }
1292     else
1293         rweights.resize(rectList.size(),0);
1294         
1295     if( findBiggestObject && rectList.size() )
1296     {
1297         CvAvgComp result_comp = {{0,0,0,0},0};
1298         
1299         for( size_t i = 0; i < rectList.size(); i++ )
1300         {
1301             cv::Rect r = rectList[i];
1302             if( r.area() > cv::Rect(result_comp.rect).area() )
1303             {
1304                 result_comp.rect = r;
1305                 result_comp.neighbors = rweights[i];
1306             }
1307         }
1308         cvSeqPush( result_seq, &result_comp );
1309     }
1310     else
1311     {
1312         for( size_t i = 0; i < rectList.size(); i++ )
1313         {
1314             CvAvgComp c;
1315             c.rect = rectList[i];
1316             c.neighbors = !rweights.empty() ? rweights[i] : 0;
1317             cvSeqPush( result_seq, &c );
1318         }
1319     }
1320
1321     return result_seq;
1322 }
1323
1324 CV_IMPL CvSeq*
1325 cvHaarDetectObjects( const CvArr* _img, 
1326                      CvHaarClassifierCascade* cascade, CvMemStorage* storage,
1327                      double scaleFactor,
1328                      int minNeighbors, int flags, CvSize minSize, CvSize maxSize )
1329 {
1330     std::vector<int> fakeLevels;
1331     std::vector<double> fakeWeights;
1332     return cvHaarDetectObjectsForROC( _img, cascade, storage, fakeLevels, fakeWeights, 
1333                                 scaleFactor, minNeighbors, flags, minSize, maxSize, false );
1334
1335 }
1336
1337
1338 static CvHaarClassifierCascade*
1339 icvLoadCascadeCART( const char** input_cascade, int n, CvSize orig_window_size )
1340 {
1341     int i;
1342     CvHaarClassifierCascade* cascade = icvCreateHaarClassifierCascade(n);
1343     cascade->orig_window_size = orig_window_size;
1344
1345     for( i = 0; i < n; i++ )
1346     {
1347         int j, count, l;
1348         float threshold = 0;
1349         const char* stage = input_cascade[i];
1350         int dl = 0;
1351
1352         /* tree links */
1353         int parent = -1;
1354         int next = -1;
1355
1356         sscanf( stage, "%d%n", &count, &dl );
1357         stage += dl;
1358
1359         assert( count > 0 );
1360         cascade->stage_classifier[i].count = count;
1361         cascade->stage_classifier[i].classifier =
1362             (CvHaarClassifier*)cvAlloc( count*sizeof(cascade->stage_classifier[i].classifier[0]));
1363
1364         for( j = 0; j < count; j++ )
1365         {
1366             CvHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
1367             int k, rects = 0;
1368             char str[100];
1369
1370             sscanf( stage, "%d%n", &classifier->count, &dl );
1371             stage += dl;
1372
1373             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1374                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1375                                       sizeof( *classifier->threshold ) +
1376                                       sizeof( *classifier->left ) +
1377                                       sizeof( *classifier->right ) ) +
1378                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1379             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1380             classifier->left = (int*) (classifier->threshold + classifier->count);
1381             classifier->right = (int*) (classifier->left + classifier->count);
1382             classifier->alpha = (float*) (classifier->right + classifier->count);
1383
1384             for( l = 0; l < classifier->count; l++ )
1385             {
1386                 sscanf( stage, "%d%n", &rects, &dl );
1387                 stage += dl;
1388
1389                 assert( rects >= 2 && rects <= CV_HAAR_FEATURE_MAX );
1390
1391                 for( k = 0; k < rects; k++ )
1392                 {
1393                     CvRect r;
1394                     int band = 0;
1395                     sscanf( stage, "%d%d%d%d%d%f%n",
1396                             &r.x, &r.y, &r.width, &r.height, &band,
1397                             &(classifier->haar_feature[l].rect[k].weight), &dl );
1398                     stage += dl;
1399                     classifier->haar_feature[l].rect[k].r = r;
1400                 }
1401                 sscanf( stage, "%s%n", str, &dl );
1402                 stage += dl;
1403
1404                 classifier->haar_feature[l].tilted = strncmp( str, "tilted", 6 ) == 0;
1405
1406                 for( k = rects; k < CV_HAAR_FEATURE_MAX; k++ )
1407                 {
1408                     memset( classifier->haar_feature[l].rect + k, 0,
1409                             sizeof(classifier->haar_feature[l].rect[k]) );
1410                 }
1411
1412                 sscanf( stage, "%f%d%d%n", &(classifier->threshold[l]),
1413                                        &(classifier->left[l]),
1414                                        &(classifier->right[l]), &dl );
1415                 stage += dl;
1416             }
1417             for( l = 0; l <= classifier->count; l++ )
1418             {
1419                 sscanf( stage, "%f%n", &(classifier->alpha[l]), &dl );
1420                 stage += dl;
1421             }
1422         }
1423
1424         sscanf( stage, "%f%n", &threshold, &dl );
1425         stage += dl;
1426
1427         cascade->stage_classifier[i].threshold = threshold;
1428
1429         /* load tree links */
1430         if( sscanf( stage, "%d%d%n", &parent, &next, &dl ) != 2 )
1431         {
1432             parent = i - 1;
1433             next = -1;
1434         }
1435         stage += dl;
1436
1437         cascade->stage_classifier[i].parent = parent;
1438         cascade->stage_classifier[i].next = next;
1439         cascade->stage_classifier[i].child = -1;
1440
1441         if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1442         {
1443             cascade->stage_classifier[parent].child = i;
1444         }
1445     }
1446
1447     return cascade;
1448 }
1449
1450 #ifndef _MAX_PATH
1451 #define _MAX_PATH 1024
1452 #endif
1453
1454 CV_IMPL CvHaarClassifierCascade*
1455 cvLoadHaarClassifierCascade( const char* directory, CvSize orig_window_size )
1456 {
1457     const char** input_cascade = 0;
1458     CvHaarClassifierCascade *cascade = 0;
1459
1460     int i, n;
1461     const char* slash;
1462     char name[_MAX_PATH];
1463     int size = 0;
1464     char* ptr = 0;
1465
1466     if( !directory )
1467         CV_Error( CV_StsNullPtr, "Null path is passed" );
1468
1469     n = (int)strlen(directory)-1;
1470     slash = directory[n] == '\\' || directory[n] == '/' ? "" : "/";
1471
1472     /* try to read the classifier from directory */
1473     for( n = 0; ; n++ )
1474     {
1475         sprintf( name, "%s%s%d/AdaBoostCARTHaarClassifier.txt", directory, slash, n );
1476         FILE* f = fopen( name, "rb" );
1477         if( !f )
1478             break;
1479         fseek( f, 0, SEEK_END );
1480         size += ftell( f ) + 1;
1481         fclose(f);
1482     }
1483
1484     if( n == 0 && slash[0] )
1485         return (CvHaarClassifierCascade*)cvLoad( directory );
1486
1487     if( n == 0 )
1488         CV_Error( CV_StsBadArg, "Invalid path" );
1489
1490     size += (n+1)*sizeof(char*);
1491     input_cascade = (const char**)cvAlloc( size );
1492     ptr = (char*)(input_cascade + n + 1);
1493
1494     for( i = 0; i < n; i++ )
1495     {
1496         sprintf( name, "%s/%d/AdaBoostCARTHaarClassifier.txt", directory, i );
1497         FILE* f = fopen( name, "rb" );
1498         if( !f )
1499             CV_Error( CV_StsError, "" );
1500         fseek( f, 0, SEEK_END );
1501         size = ftell( f );
1502         fseek( f, 0, SEEK_SET );
1503         fread( ptr, 1, size, f );
1504         fclose(f);
1505         input_cascade[i] = ptr;
1506         ptr += size;
1507         *ptr++ = '\0';
1508     }
1509
1510     input_cascade[n] = 0;
1511     cascade = icvLoadCascadeCART( input_cascade, n, orig_window_size );
1512
1513     if( input_cascade )
1514         cvFree( &input_cascade );
1515
1516     return cascade;
1517 }
1518
1519
1520 CV_IMPL void
1521 cvReleaseHaarClassifierCascade( CvHaarClassifierCascade** _cascade )
1522 {
1523     if( _cascade && *_cascade )
1524     {
1525         int i, j;
1526         CvHaarClassifierCascade* cascade = *_cascade;
1527
1528         for( i = 0; i < cascade->count; i++ )
1529         {
1530             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
1531                 cvFree( &cascade->stage_classifier[i].classifier[j].haar_feature );
1532             cvFree( &cascade->stage_classifier[i].classifier );
1533         }
1534         icvReleaseHidHaarClassifierCascade( &cascade->hid_cascade );
1535         cvFree( _cascade );
1536     }
1537 }
1538
1539
1540 /****************************************************************************************\
1541 *                                  Persistence functions                                 *
1542 \****************************************************************************************/
1543
1544 /* field names */
1545
1546 #define ICV_HAAR_SIZE_NAME            "size"
1547 #define ICV_HAAR_STAGES_NAME          "stages"
1548 #define ICV_HAAR_TREES_NAME             "trees"
1549 #define ICV_HAAR_FEATURE_NAME             "feature"
1550 #define ICV_HAAR_RECTS_NAME                 "rects"
1551 #define ICV_HAAR_TILTED_NAME                "tilted"
1552 #define ICV_HAAR_THRESHOLD_NAME           "threshold"
1553 #define ICV_HAAR_LEFT_NODE_NAME           "left_node"
1554 #define ICV_HAAR_LEFT_VAL_NAME            "left_val"
1555 #define ICV_HAAR_RIGHT_NODE_NAME          "right_node"
1556 #define ICV_HAAR_RIGHT_VAL_NAME           "right_val"
1557 #define ICV_HAAR_STAGE_THRESHOLD_NAME   "stage_threshold"
1558 #define ICV_HAAR_PARENT_NAME            "parent"
1559 #define ICV_HAAR_NEXT_NAME              "next"
1560
1561 static int
1562 icvIsHaarClassifier( const void* struct_ptr )
1563 {
1564     return CV_IS_HAAR_CLASSIFIER( struct_ptr );
1565 }
1566
1567 static void*
1568 icvReadHaarClassifier( CvFileStorage* fs, CvFileNode* node )
1569 {
1570     CvHaarClassifierCascade* cascade = NULL;
1571
1572     char buf[256];
1573     CvFileNode* seq_fn = NULL; /* sequence */
1574     CvFileNode* fn = NULL;
1575     CvFileNode* stages_fn = NULL;
1576     CvSeqReader stages_reader;
1577     int n;
1578     int i, j, k, l;
1579     int parent, next;
1580
1581     stages_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_STAGES_NAME );
1582     if( !stages_fn || !CV_NODE_IS_SEQ( stages_fn->tag) )
1583         CV_Error( CV_StsError, "Invalid stages node" );
1584
1585     n = stages_fn->data.seq->total;
1586     cascade = icvCreateHaarClassifierCascade(n);
1587
1588     /* read size */
1589     seq_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_SIZE_NAME );
1590     if( !seq_fn || !CV_NODE_IS_SEQ( seq_fn->tag ) || seq_fn->data.seq->total != 2 )
1591         CV_Error( CV_StsError, "size node is not a valid sequence." );
1592     fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 0 );
1593     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1594         CV_Error( CV_StsError, "Invalid size node: width must be positive integer" );
1595     cascade->orig_window_size.width = fn->data.i;
1596     fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 1 );
1597     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1598         CV_Error( CV_StsError, "Invalid size node: height must be positive integer" );
1599     cascade->orig_window_size.height = fn->data.i;
1600
1601     cvStartReadSeq( stages_fn->data.seq, &stages_reader );
1602     for( i = 0; i < n; ++i )
1603     {
1604         CvFileNode* stage_fn;
1605         CvFileNode* trees_fn;
1606         CvSeqReader trees_reader;
1607
1608         stage_fn = (CvFileNode*) stages_reader.ptr;
1609         if( !CV_NODE_IS_MAP( stage_fn->tag ) )
1610         {
1611             sprintf( buf, "Invalid stage %d", i );
1612             CV_Error( CV_StsError, buf );
1613         }
1614
1615         trees_fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_TREES_NAME );
1616         if( !trees_fn || !CV_NODE_IS_SEQ( trees_fn->tag )
1617             || trees_fn->data.seq->total <= 0 )
1618         {
1619             sprintf( buf, "Trees node is not a valid sequence. (stage %d)", i );
1620             CV_Error( CV_StsError, buf );
1621         }
1622
1623         cascade->stage_classifier[i].classifier =
1624             (CvHaarClassifier*) cvAlloc( trees_fn->data.seq->total
1625                 * sizeof( cascade->stage_classifier[i].classifier[0] ) );
1626         for( j = 0; j < trees_fn->data.seq->total; ++j )
1627         {
1628             cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
1629         }
1630         cascade->stage_classifier[i].count = trees_fn->data.seq->total;
1631
1632         cvStartReadSeq( trees_fn->data.seq, &trees_reader );
1633         for( j = 0; j < trees_fn->data.seq->total; ++j )
1634         {
1635             CvFileNode* tree_fn;
1636             CvSeqReader tree_reader;
1637             CvHaarClassifier* classifier;
1638             int last_idx;
1639
1640             classifier = &cascade->stage_classifier[i].classifier[j];
1641             tree_fn = (CvFileNode*) trees_reader.ptr;
1642             if( !CV_NODE_IS_SEQ( tree_fn->tag ) || tree_fn->data.seq->total <= 0 )
1643             {
1644                 sprintf( buf, "Tree node is not a valid sequence."
1645                          " (stage %d, tree %d)", i, j );
1646                 CV_Error( CV_StsError, buf );
1647             }
1648
1649             classifier->count = tree_fn->data.seq->total;
1650             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1651                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1652                                       sizeof( *classifier->threshold ) +
1653                                       sizeof( *classifier->left ) +
1654                                       sizeof( *classifier->right ) ) +
1655                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1656             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1657             classifier->left = (int*) (classifier->threshold + classifier->count);
1658             classifier->right = (int*) (classifier->left + classifier->count);
1659             classifier->alpha = (float*) (classifier->right + classifier->count);
1660
1661             cvStartReadSeq( tree_fn->data.seq, &tree_reader );
1662             for( k = 0, last_idx = 0; k < tree_fn->data.seq->total; ++k )
1663             {
1664                 CvFileNode* node_fn;
1665                 CvFileNode* feature_fn;
1666                 CvFileNode* rects_fn;
1667                 CvSeqReader rects_reader;
1668
1669                 node_fn = (CvFileNode*) tree_reader.ptr;
1670                 if( !CV_NODE_IS_MAP( node_fn->tag ) )
1671                 {
1672                     sprintf( buf, "Tree node %d is not a valid map. (stage %d, tree %d)",
1673                              k, i, j );
1674                     CV_Error( CV_StsError, buf );
1675                 }
1676                 feature_fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_FEATURE_NAME );
1677                 if( !feature_fn || !CV_NODE_IS_MAP( feature_fn->tag ) )
1678                 {
1679                     sprintf( buf, "Feature node is not a valid map. "
1680                              "(stage %d, tree %d, node %d)", i, j, k );
1681                     CV_Error( CV_StsError, buf );
1682                 }
1683                 rects_fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_RECTS_NAME );
1684                 if( !rects_fn || !CV_NODE_IS_SEQ( rects_fn->tag )
1685                     || rects_fn->data.seq->total < 1
1686                     || rects_fn->data.seq->total > CV_HAAR_FEATURE_MAX )
1687                 {
1688                     sprintf( buf, "Rects node is not a valid sequence. "
1689                              "(stage %d, tree %d, node %d)", i, j, k );
1690                     CV_Error( CV_StsError, buf );
1691                 }
1692                 cvStartReadSeq( rects_fn->data.seq, &rects_reader );
1693                 for( l = 0; l < rects_fn->data.seq->total; ++l )
1694                 {
1695                     CvFileNode* rect_fn;
1696                     CvRect r;
1697
1698                     rect_fn = (CvFileNode*) rects_reader.ptr;
1699                     if( !CV_NODE_IS_SEQ( rect_fn->tag ) || rect_fn->data.seq->total != 5 )
1700                     {
1701                         sprintf( buf, "Rect %d is not a valid sequence. "
1702                                  "(stage %d, tree %d, node %d)", l, i, j, k );
1703                         CV_Error( CV_StsError, buf );
1704                     }
1705
1706                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 0 );
1707                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1708                     {
1709                         sprintf( buf, "x coordinate must be non-negative integer. "
1710                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1711                         CV_Error( CV_StsError, buf );
1712                     }
1713                     r.x = fn->data.i;
1714                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 1 );
1715                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1716                     {
1717                         sprintf( buf, "y coordinate must be non-negative integer. "
1718                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1719                         CV_Error( CV_StsError, buf );
1720                     }
1721                     r.y = fn->data.i;
1722                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 2 );
1723                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1724                         || r.x + fn->data.i > cascade->orig_window_size.width )
1725                     {
1726                         sprintf( buf, "width must be positive integer and "
1727                                  "(x + width) must not exceed window width. "
1728                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1729                         CV_Error( CV_StsError, buf );
1730                     }
1731                     r.width = fn->data.i;
1732                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 3 );
1733                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1734                         || r.y + fn->data.i > cascade->orig_window_size.height )
1735                     {
1736                         sprintf( buf, "height must be positive integer and "
1737                                  "(y + height) must not exceed window height. "
1738                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1739                         CV_Error( CV_StsError, buf );
1740                     }
1741                     r.height = fn->data.i;
1742                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 4 );
1743                     if( !CV_NODE_IS_REAL( fn->tag ) )
1744                     {
1745                         sprintf( buf, "weight must be real number. "
1746                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1747                         CV_Error( CV_StsError, buf );
1748                     }
1749
1750                     classifier->haar_feature[k].rect[l].weight = (float) fn->data.f;
1751                     classifier->haar_feature[k].rect[l].r = r;
1752
1753                     CV_NEXT_SEQ_ELEM( sizeof( *rect_fn ), rects_reader );
1754                 } /* for each rect */
1755                 for( l = rects_fn->data.seq->total; l < CV_HAAR_FEATURE_MAX; ++l )
1756                 {
1757                     classifier->haar_feature[k].rect[l].weight = 0;
1758                     classifier->haar_feature[k].rect[l].r = cvRect( 0, 0, 0, 0 );
1759                 }
1760
1761                 fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_TILTED_NAME);
1762                 if( !fn || !CV_NODE_IS_INT( fn->tag ) )
1763                 {
1764                     sprintf( buf, "tilted must be 0 or 1. "
1765                              "(stage %d, tree %d, node %d)", i, j, k );
1766                     CV_Error( CV_StsError, buf );
1767                 }
1768                 classifier->haar_feature[k].tilted = ( fn->data.i != 0 );
1769                 fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_THRESHOLD_NAME);
1770                 if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1771                 {
1772                     sprintf( buf, "threshold must be real number. "
1773                              "(stage %d, tree %d, node %d)", i, j, k );
1774                     CV_Error( CV_StsError, buf );
1775                 }
1776                 classifier->threshold[k] = (float) fn->data.f;
1777                 fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_NODE_NAME);
1778                 if( fn )
1779                 {
1780                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1781                         || fn->data.i >= tree_fn->data.seq->total )
1782                     {
1783                         sprintf( buf, "left node must be valid node number. "
1784                                  "(stage %d, tree %d, node %d)", i, j, k );
1785                         CV_Error( CV_StsError, buf );
1786                     }
1787                     /* left node */
1788                     classifier->left[k] = fn->data.i;
1789                 }
1790                 else
1791                 {
1792                     fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_VAL_NAME );
1793                     if( !fn )
1794                     {
1795                         sprintf( buf, "left node or left value must be specified. "
1796                                  "(stage %d, tree %d, node %d)", i, j, k );
1797                         CV_Error( CV_StsError, buf );
1798                     }
1799                     if( !CV_NODE_IS_REAL( fn->tag ) )
1800                     {
1801                         sprintf( buf, "left value must be real number. "
1802                                  "(stage %d, tree %d, node %d)", i, j, k );
1803                         CV_Error( CV_StsError, buf );
1804                     }
1805                     /* left value */
1806                     if( last_idx >= classifier->count + 1 )
1807                     {
1808                         sprintf( buf, "Tree structure is broken: too many values. "
1809                                  "(stage %d, tree %d, node %d)", i, j, k );
1810                         CV_Error( CV_StsError, buf );
1811                     }
1812                     classifier->left[k] = -last_idx;
1813                     classifier->alpha[last_idx++] = (float) fn->data.f;
1814                 }
1815                 fn = cvGetFileNodeByName( fs, node_fn,ICV_HAAR_RIGHT_NODE_NAME);
1816                 if( fn )
1817                 {
1818                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1819                         || fn->data.i >= tree_fn->data.seq->total )
1820                     {
1821                         sprintf( buf, "right node must be valid node number. "
1822                                  "(stage %d, tree %d, node %d)", i, j, k );
1823                         CV_Error( CV_StsError, buf );
1824                     }
1825                     /* right node */
1826                     classifier->right[k] = fn->data.i;
1827                 }
1828                 else
1829                 {
1830                     fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_RIGHT_VAL_NAME );
1831                     if( !fn )
1832                     {
1833                         sprintf( buf, "right node or right value must be specified. "
1834                                  "(stage %d, tree %d, node %d)", i, j, k );
1835                         CV_Error( CV_StsError, buf );
1836                     }
1837                     if( !CV_NODE_IS_REAL( fn->tag ) )
1838                     {
1839                         sprintf( buf, "right value must be real number. "
1840                                  "(stage %d, tree %d, node %d)", i, j, k );
1841                         CV_Error( CV_StsError, buf );
1842                     }
1843                     /* right value */
1844                     if( last_idx >= classifier->count + 1 )
1845                     {
1846                         sprintf( buf, "Tree structure is broken: too many values. "
1847                                  "(stage %d, tree %d, node %d)", i, j, k );
1848                         CV_Error( CV_StsError, buf );
1849                     }
1850                     classifier->right[k] = -last_idx;
1851                     classifier->alpha[last_idx++] = (float) fn->data.f;
1852                 }
1853
1854                 CV_NEXT_SEQ_ELEM( sizeof( *node_fn ), tree_reader );
1855             } /* for each node */
1856             if( last_idx != classifier->count + 1 )
1857             {
1858                 sprintf( buf, "Tree structure is broken: too few values. "
1859                          "(stage %d, tree %d)", i, j );
1860                 CV_Error( CV_StsError, buf );
1861             }
1862
1863             CV_NEXT_SEQ_ELEM( sizeof( *tree_fn ), trees_reader );
1864         } /* for each tree */
1865
1866         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_STAGE_THRESHOLD_NAME);
1867         if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1868         {
1869             sprintf( buf, "stage threshold must be real number. (stage %d)", i );
1870             CV_Error( CV_StsError, buf );
1871         }
1872         cascade->stage_classifier[i].threshold = (float) fn->data.f;
1873
1874         parent = i - 1;
1875         next = -1;
1876
1877         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_PARENT_NAME );
1878         if( !fn || !CV_NODE_IS_INT( fn->tag )
1879             || fn->data.i < -1 || fn->data.i >= cascade->count )
1880         {
1881             sprintf( buf, "parent must be integer number. (stage %d)", i );
1882             CV_Error( CV_StsError, buf );
1883         }
1884         parent = fn->data.i;
1885         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_NEXT_NAME );
1886         if( !fn || !CV_NODE_IS_INT( fn->tag )
1887             || fn->data.i < -1 || fn->data.i >= cascade->count )
1888         {
1889             sprintf( buf, "next must be integer number. (stage %d)", i );
1890             CV_Error( CV_StsError, buf );
1891         }
1892         next = fn->data.i;
1893
1894         cascade->stage_classifier[i].parent = parent;
1895         cascade->stage_classifier[i].next = next;
1896         cascade->stage_classifier[i].child = -1;
1897
1898         if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1899         {
1900             cascade->stage_classifier[parent].child = i;
1901         }
1902
1903         CV_NEXT_SEQ_ELEM( sizeof( *stage_fn ), stages_reader );
1904     } /* for each stage */
1905
1906     return cascade;
1907 }
1908
1909 static void
1910 icvWriteHaarClassifier( CvFileStorage* fs, const char* name, const void* struct_ptr,
1911                         CvAttrList attributes )
1912 {
1913     int i, j, k, l;
1914     char buf[256];
1915     const CvHaarClassifierCascade* cascade = (const CvHaarClassifierCascade*) struct_ptr;
1916
1917     /* TODO: parameters check */
1918
1919     cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_HAAR, attributes );
1920
1921     cvStartWriteStruct( fs, ICV_HAAR_SIZE_NAME, CV_NODE_SEQ | CV_NODE_FLOW );
1922     cvWriteInt( fs, NULL, cascade->orig_window_size.width );
1923     cvWriteInt( fs, NULL, cascade->orig_window_size.height );
1924     cvEndWriteStruct( fs ); /* size */
1925
1926     cvStartWriteStruct( fs, ICV_HAAR_STAGES_NAME, CV_NODE_SEQ );
1927     for( i = 0; i < cascade->count; ++i )
1928     {
1929         cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1930         sprintf( buf, "stage %d", i );
1931         cvWriteComment( fs, buf, 1 );
1932
1933         cvStartWriteStruct( fs, ICV_HAAR_TREES_NAME, CV_NODE_SEQ );
1934
1935         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
1936         {
1937             CvHaarClassifier* tree = &cascade->stage_classifier[i].classifier[j];
1938
1939             cvStartWriteStruct( fs, NULL, CV_NODE_SEQ );
1940             sprintf( buf, "tree %d", j );
1941             cvWriteComment( fs, buf, 1 );
1942
1943             for( k = 0; k < tree->count; ++k )
1944             {
1945                 CvHaarFeature* feature = &tree->haar_feature[k];
1946
1947                 cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1948                 if( k )
1949                 {
1950                     sprintf( buf, "node %d", k );
1951                 }
1952                 else
1953                 {
1954                     sprintf( buf, "root node" );
1955                 }
1956                 cvWriteComment( fs, buf, 1 );
1957
1958                 cvStartWriteStruct( fs, ICV_HAAR_FEATURE_NAME, CV_NODE_MAP );
1959
1960                 cvStartWriteStruct( fs, ICV_HAAR_RECTS_NAME, CV_NODE_SEQ );
1961                 for( l = 0; l < CV_HAAR_FEATURE_MAX && feature->rect[l].r.width != 0; ++l )
1962                 {
1963                     cvStartWriteStruct( fs, NULL, CV_NODE_SEQ | CV_NODE_FLOW );
1964                     cvWriteInt(  fs, NULL, feature->rect[l].r.x );
1965                     cvWriteInt(  fs, NULL, feature->rect[l].r.y );
1966                     cvWriteInt(  fs, NULL, feature->rect[l].r.width );
1967                     cvWriteInt(  fs, NULL, feature->rect[l].r.height );
1968                     cvWriteReal( fs, NULL, feature->rect[l].weight );
1969                     cvEndWriteStruct( fs ); /* rect */
1970                 }
1971                 cvEndWriteStruct( fs ); /* rects */
1972                 cvWriteInt( fs, ICV_HAAR_TILTED_NAME, feature->tilted );
1973                 cvEndWriteStruct( fs ); /* feature */
1974
1975                 cvWriteReal( fs, ICV_HAAR_THRESHOLD_NAME, tree->threshold[k]);
1976
1977                 if( tree->left[k] > 0 )
1978                 {
1979                     cvWriteInt( fs, ICV_HAAR_LEFT_NODE_NAME, tree->left[k] );
1980                 }
1981                 else
1982                 {
1983                     cvWriteReal( fs, ICV_HAAR_LEFT_VAL_NAME,
1984                         tree->alpha[-tree->left[k]] );
1985                 }
1986
1987                 if( tree->right[k] > 0 )
1988                 {
1989                     cvWriteInt( fs, ICV_HAAR_RIGHT_NODE_NAME, tree->right[k] );
1990                 }
1991                 else
1992                 {
1993                     cvWriteReal( fs, ICV_HAAR_RIGHT_VAL_NAME,
1994                         tree->alpha[-tree->right[k]] );
1995                 }
1996
1997                 cvEndWriteStruct( fs ); /* split */
1998             }
1999
2000             cvEndWriteStruct( fs ); /* tree */
2001         }
2002
2003         cvEndWriteStruct( fs ); /* trees */
2004
2005         cvWriteReal( fs, ICV_HAAR_STAGE_THRESHOLD_NAME, cascade->stage_classifier[i].threshold);
2006         cvWriteInt( fs, ICV_HAAR_PARENT_NAME, cascade->stage_classifier[i].parent );
2007         cvWriteInt( fs, ICV_HAAR_NEXT_NAME, cascade->stage_classifier[i].next );
2008
2009         cvEndWriteStruct( fs ); /* stage */
2010     } /* for each stage */
2011
2012     cvEndWriteStruct( fs ); /* stages */
2013     cvEndWriteStruct( fs ); /* root */
2014 }
2015
2016 static void*
2017 icvCloneHaarClassifier( const void* struct_ptr )
2018 {
2019     CvHaarClassifierCascade* cascade = NULL;
2020
2021     int i, j, k, n;
2022     const CvHaarClassifierCascade* cascade_src =
2023         (const CvHaarClassifierCascade*) struct_ptr;
2024
2025     n = cascade_src->count;
2026     cascade = icvCreateHaarClassifierCascade(n);
2027     cascade->orig_window_size = cascade_src->orig_window_size;
2028
2029     for( i = 0; i < n; ++i )
2030     {
2031         cascade->stage_classifier[i].parent = cascade_src->stage_classifier[i].parent;
2032         cascade->stage_classifier[i].next = cascade_src->stage_classifier[i].next;
2033         cascade->stage_classifier[i].child = cascade_src->stage_classifier[i].child;
2034         cascade->stage_classifier[i].threshold = cascade_src->stage_classifier[i].threshold;
2035
2036         cascade->stage_classifier[i].count = 0;
2037         cascade->stage_classifier[i].classifier =
2038             (CvHaarClassifier*) cvAlloc( cascade_src->stage_classifier[i].count
2039                 * sizeof( cascade->stage_classifier[i].classifier[0] ) );
2040
2041         cascade->stage_classifier[i].count = cascade_src->stage_classifier[i].count;
2042
2043         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2044             cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
2045
2046         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2047         {
2048             const CvHaarClassifier* classifier_src =
2049                 &cascade_src->stage_classifier[i].classifier[j];
2050             CvHaarClassifier* classifier =
2051                 &cascade->stage_classifier[i].classifier[j];
2052
2053             classifier->count = classifier_src->count;
2054             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
2055                 classifier->count * ( sizeof( *classifier->haar_feature ) +
2056                                       sizeof( *classifier->threshold ) +
2057                                       sizeof( *classifier->left ) +
2058                                       sizeof( *classifier->right ) ) +
2059                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
2060             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
2061             classifier->left = (int*) (classifier->threshold + classifier->count);
2062             classifier->right = (int*) (classifier->left + classifier->count);
2063             classifier->alpha = (float*) (classifier->right + classifier->count);
2064             for( k = 0; k < classifier->count; ++k )
2065             {
2066                 classifier->haar_feature[k] = classifier_src->haar_feature[k];
2067                 classifier->threshold[k] = classifier_src->threshold[k];
2068                 classifier->left[k] = classifier_src->left[k];
2069                 classifier->right[k] = classifier_src->right[k];
2070                 classifier->alpha[k] = classifier_src->alpha[k];
2071             }
2072             classifier->alpha[classifier->count] =
2073                 classifier_src->alpha[classifier->count];
2074         }
2075     }
2076
2077     return cascade;
2078 }
2079
2080
2081 CvType haar_type( CV_TYPE_NAME_HAAR, icvIsHaarClassifier,
2082                   (CvReleaseFunc)cvReleaseHaarClassifierCascade,
2083                   icvReadHaarClassifier, icvWriteHaarClassifier,
2084                   icvCloneHaarClassifier );
2085
2086 #if 0
2087 namespace cv
2088 {
2089
2090 HaarClassifierCascade::HaarClassifierCascade() {}
2091 HaarClassifierCascade::HaarClassifierCascade(const String& filename)
2092 { load(filename); }
2093     
2094 bool HaarClassifierCascade::load(const String& filename)
2095 {
2096     cascade = Ptr<CvHaarClassifierCascade>((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
2097     return (CvHaarClassifierCascade*)cascade != 0;
2098 }
2099
2100 void HaarClassifierCascade::detectMultiScale( const Mat& image,
2101                        Vector<Rect>& objects, double scaleFactor,
2102                        int minNeighbors, int flags,
2103                        Size minSize )
2104 {
2105     MemStorage storage(cvCreateMemStorage(0));
2106     CvMat _image = image;
2107     CvSeq* _objects = cvHaarDetectObjects( &_image, cascade, storage, scaleFactor,
2108                                            minNeighbors, flags, minSize );
2109     Seq<Rect>(_objects).copyTo(objects);
2110 }
2111
2112 int HaarClassifierCascade::runAt(Point pt, int startStage, int) const
2113 {
2114     return cvRunHaarClassifierCascade(cascade, pt, startStage);
2115 }
2116
2117 void HaarClassifierCascade::setImages( const Mat& sum, const Mat& sqsum,
2118                                        const Mat& tilted, double scale )
2119 {
2120     CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted;
2121     cvSetImagesForHaarClassifierCascade( cascade, &_sum, &_sqsum, &_tilted, scale );
2122 }
2123
2124 }
2125 #endif
2126
2127 /* End of file. */