fixed bug #1389
[profile/ivi/opencv.git] / modules / objdetect / src / haar.cpp
1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 //  By downloading, copying, installing or using the software you agree to this license.
6 //  If you do not agree to this license, do not download, install,
7 //  copy or use the software.
8 //
9 //
10 //                        Intel License Agreement
11 //                For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000, Intel Corporation, all rights reserved.
14 // Third party copyrights are property of their respective owners.
15 //
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
18 //
19 //   * Redistribution's of source code must retain the above copyright notice,
20 //     this list of conditions and the following disclaimer.
21 //
22 //   * Redistribution's in binary form must reproduce the above copyright notice,
23 //     this list of conditions and the following disclaimer in the documentation
24 //     and/or other materials provided with the distribution.
25 //
26 //   * The name of Intel Corporation may not be used to endorse or promote products
27 //     derived from this software without specific prior written permission.
28 //
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
39 //
40 //M*/
41
42 /* Haar features calculation */
43
44 #include "precomp.hpp"
45 #include <stdio.h>
46
47 /*#if CV_SSE2
48 #   if CV_SSE4 || defined __SSE4__
49 #       include <smmintrin.h>
50 #   else
51 #       define _mm_blendv_pd(a, b, m) _mm_xor_pd(a, _mm_and_pd(_mm_xor_pd(b, a), m))
52 #       define _mm_blendv_ps(a, b, m) _mm_xor_ps(a, _mm_and_ps(_mm_xor_ps(b, a), m))
53 #   endif
54 #if defined CV_ICC
55 #   define CV_HAAR_USE_SSE 1
56 #endif
57 #endif*/
58
59 /* these settings affect the quality of detection: change with care */
60 #define CV_ADJUST_FEATURES 1
61 #define CV_ADJUST_WEIGHTS  0
62
63 typedef int sumtype;
64 typedef double sqsumtype;
65
66 typedef struct CvHidHaarFeature
67 {
68     struct
69     {
70         sumtype *p0, *p1, *p2, *p3;
71         float weight;
72     }
73     rect[CV_HAAR_FEATURE_MAX];
74 }
75 CvHidHaarFeature;
76
77
78 typedef struct CvHidHaarTreeNode
79 {
80     CvHidHaarFeature feature;
81     float threshold;
82     int left;
83     int right;
84 }
85 CvHidHaarTreeNode;
86
87
88 typedef struct CvHidHaarClassifier
89 {
90     int count;
91     //CvHaarFeature* orig_feature;
92     CvHidHaarTreeNode* node;
93     float* alpha;
94 }
95 CvHidHaarClassifier;
96
97
98 typedef struct CvHidHaarStageClassifier
99 {
100     int  count;
101     float threshold;
102     CvHidHaarClassifier* classifier;
103     int two_rects;
104
105     struct CvHidHaarStageClassifier* next;
106     struct CvHidHaarStageClassifier* child;
107     struct CvHidHaarStageClassifier* parent;
108 }
109 CvHidHaarStageClassifier;
110
111
112 struct CvHidHaarClassifierCascade
113 {
114     int  count;
115     int  isStumpBased;
116     int  has_tilted_features;
117     int  is_tree;
118     double inv_window_area;
119     CvMat sum, sqsum, tilted;
120     CvHidHaarStageClassifier* stage_classifier;
121     sqsumtype *pq0, *pq1, *pq2, *pq3;
122     sumtype *p0, *p1, *p2, *p3;
123
124     void** ipp_stages;
125 };
126
127
128 const int icv_object_win_border = 1;
129 const float icv_stage_threshold_bias = 0.0001f;
130
131 static CvHaarClassifierCascade*
132 icvCreateHaarClassifierCascade( int stage_count )
133 {
134     CvHaarClassifierCascade* cascade = 0;
135
136     int block_size = sizeof(*cascade) + stage_count*sizeof(*cascade->stage_classifier);
137
138     if( stage_count <= 0 )
139         CV_Error( CV_StsOutOfRange, "Number of stages should be positive" );
140
141     cascade = (CvHaarClassifierCascade*)cvAlloc( block_size );
142     memset( cascade, 0, block_size );
143
144     cascade->stage_classifier = (CvHaarStageClassifier*)(cascade + 1);
145     cascade->flags = CV_HAAR_MAGIC_VAL;
146     cascade->count = stage_count;
147
148     return cascade;
149 }
150
151 static void
152 icvReleaseHidHaarClassifierCascade( CvHidHaarClassifierCascade** _cascade )
153 {
154     if( _cascade && *_cascade )
155     {
156 #ifdef HAVE_IPP
157         CvHidHaarClassifierCascade* cascade = *_cascade;
158         if( cascade->ipp_stages )
159         {
160             int i;
161             for( i = 0; i < cascade->count; i++ )
162             {
163                 if( cascade->ipp_stages[i] )
164                     ippiHaarClassifierFree_32f( (IppiHaarClassifier_32f*)cascade->ipp_stages[i] );
165             }
166         }
167         cvFree( &cascade->ipp_stages );
168 #endif
169         cvFree( _cascade );
170     }
171 }
172
173 /* create more efficient internal representation of haar classifier cascade */
174 static CvHidHaarClassifierCascade*
175 icvCreateHidHaarClassifierCascade( CvHaarClassifierCascade* cascade )
176 {
177     CvRect* ipp_features = 0;
178     float *ipp_weights = 0, *ipp_thresholds = 0, *ipp_val1 = 0, *ipp_val2 = 0;
179     int* ipp_counts = 0;
180
181     CvHidHaarClassifierCascade* out = 0;
182
183     int i, j, k, l;
184     int datasize;
185     int total_classifiers = 0;
186     int total_nodes = 0;
187     char errorstr[1000];
188     CvHidHaarClassifier* haar_classifier_ptr;
189     CvHidHaarTreeNode* haar_node_ptr;
190     CvSize orig_window_size;
191     int has_tilted_features = 0;
192     int max_count = 0;
193
194     if( !CV_IS_HAAR_CLASSIFIER(cascade) )
195         CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
196
197     if( cascade->hid_cascade )
198         CV_Error( CV_StsError, "hid_cascade has been already created" );
199
200     if( !cascade->stage_classifier )
201         CV_Error( CV_StsNullPtr, "" );
202
203     if( cascade->count <= 0 )
204         CV_Error( CV_StsOutOfRange, "Negative number of cascade stages" );
205
206     orig_window_size = cascade->orig_window_size;
207
208     /* check input structure correctness and calculate total memory size needed for
209        internal representation of the classifier cascade */
210     for( i = 0; i < cascade->count; i++ )
211     {
212         CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
213
214         if( !stage_classifier->classifier ||
215             stage_classifier->count <= 0 )
216         {
217             sprintf( errorstr, "header of the stage classifier #%d is invalid "
218                      "(has null pointers or non-positive classfier count)", i );
219             CV_Error( CV_StsError, errorstr );
220         }
221
222         max_count = MAX( max_count, stage_classifier->count );
223         total_classifiers += stage_classifier->count;
224
225         for( j = 0; j < stage_classifier->count; j++ )
226         {
227             CvHaarClassifier* classifier = stage_classifier->classifier + j;
228
229             total_nodes += classifier->count;
230             for( l = 0; l < classifier->count; l++ )
231             {
232                 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
233                 {
234                     if( classifier->haar_feature[l].rect[k].r.width )
235                     {
236                         CvRect r = classifier->haar_feature[l].rect[k].r;
237                         int tilted = classifier->haar_feature[l].tilted;
238                         has_tilted_features |= tilted != 0;
239                         if( r.width < 0 || r.height < 0 || r.y < 0 ||
240                             r.x + r.width > orig_window_size.width
241                             ||
242                             (!tilted &&
243                             (r.x < 0 || r.y + r.height > orig_window_size.height))
244                             ||
245                             (tilted && (r.x - r.height < 0 ||
246                             r.y + r.width + r.height > orig_window_size.height)))
247                         {
248                             sprintf( errorstr, "rectangle #%d of the classifier #%d of "
249                                      "the stage classifier #%d is not inside "
250                                      "the reference (original) cascade window", k, j, i );
251                             CV_Error( CV_StsNullPtr, errorstr );
252                         }
253                     }
254                 }
255             }
256         }
257     }
258
259     // this is an upper boundary for the whole hidden cascade size
260     datasize = sizeof(CvHidHaarClassifierCascade) +
261                sizeof(CvHidHaarStageClassifier)*cascade->count +
262                sizeof(CvHidHaarClassifier) * total_classifiers +
263                sizeof(CvHidHaarTreeNode) * total_nodes +
264                sizeof(void*)*(total_nodes + total_classifiers);
265
266     out = (CvHidHaarClassifierCascade*)cvAlloc( datasize );
267     memset( out, 0, sizeof(*out) );
268
269     /* init header */
270     out->count = cascade->count;
271     out->stage_classifier = (CvHidHaarStageClassifier*)(out + 1);
272     haar_classifier_ptr = (CvHidHaarClassifier*)(out->stage_classifier + cascade->count);
273     haar_node_ptr = (CvHidHaarTreeNode*)(haar_classifier_ptr + total_classifiers);
274
275     out->isStumpBased = 1;
276     out->has_tilted_features = has_tilted_features;
277     out->is_tree = 0;
278
279     /* initialize internal representation */
280     for( i = 0; i < cascade->count; i++ )
281     {
282         CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
283         CvHidHaarStageClassifier* hid_stage_classifier = out->stage_classifier + i;
284
285         hid_stage_classifier->count = stage_classifier->count;
286         hid_stage_classifier->threshold = stage_classifier->threshold - icv_stage_threshold_bias;
287         hid_stage_classifier->classifier = haar_classifier_ptr;
288         hid_stage_classifier->two_rects = 1;
289         haar_classifier_ptr += stage_classifier->count;
290
291         hid_stage_classifier->parent = (stage_classifier->parent == -1)
292             ? NULL : out->stage_classifier + stage_classifier->parent;
293         hid_stage_classifier->next = (stage_classifier->next == -1)
294             ? NULL : out->stage_classifier + stage_classifier->next;
295         hid_stage_classifier->child = (stage_classifier->child == -1)
296             ? NULL : out->stage_classifier + stage_classifier->child;
297
298         out->is_tree |= hid_stage_classifier->next != NULL;
299
300         for( j = 0; j < stage_classifier->count; j++ )
301         {
302             CvHaarClassifier* classifier = stage_classifier->classifier + j;
303             CvHidHaarClassifier* hid_classifier = hid_stage_classifier->classifier + j;
304             int node_count = classifier->count;
305             float* alpha_ptr = (float*)(haar_node_ptr + node_count);
306
307             hid_classifier->count = node_count;
308             hid_classifier->node = haar_node_ptr;
309             hid_classifier->alpha = alpha_ptr;
310
311             for( l = 0; l < node_count; l++ )
312             {
313                 CvHidHaarTreeNode* node = hid_classifier->node + l;
314                 CvHaarFeature* feature = classifier->haar_feature + l;
315                 memset( node, -1, sizeof(*node) );
316                 node->threshold = classifier->threshold[l];
317                 node->left = classifier->left[l];
318                 node->right = classifier->right[l];
319
320                 if( fabs(feature->rect[2].weight) < DBL_EPSILON ||
321                     feature->rect[2].r.width == 0 ||
322                     feature->rect[2].r.height == 0 )
323                     memset( &(node->feature.rect[2]), 0, sizeof(node->feature.rect[2]) );
324                 else
325                     hid_stage_classifier->two_rects = 0;
326             }
327
328             memcpy( alpha_ptr, classifier->alpha, (node_count+1)*sizeof(alpha_ptr[0]));
329             haar_node_ptr =
330                 (CvHidHaarTreeNode*)cvAlignPtr(alpha_ptr+node_count+1, sizeof(void*));
331
332             out->isStumpBased &= node_count == 1;
333         }
334     }
335
336 #ifdef HAVE_IPP
337     int can_use_ipp = !out->has_tilted_features && !out->is_tree && out->isStumpBased;
338
339     if( can_use_ipp )
340     {
341         int ipp_datasize = cascade->count*sizeof(out->ipp_stages[0]);
342         float ipp_weight_scale=(float)(1./((orig_window_size.width-icv_object_win_border*2)*
343             (orig_window_size.height-icv_object_win_border*2)));
344
345         out->ipp_stages = (void**)cvAlloc( ipp_datasize );
346         memset( out->ipp_stages, 0, ipp_datasize );
347
348         ipp_features = (CvRect*)cvAlloc( max_count*3*sizeof(ipp_features[0]) );
349         ipp_weights = (float*)cvAlloc( max_count*3*sizeof(ipp_weights[0]) );
350         ipp_thresholds = (float*)cvAlloc( max_count*sizeof(ipp_thresholds[0]) );
351         ipp_val1 = (float*)cvAlloc( max_count*sizeof(ipp_val1[0]) );
352         ipp_val2 = (float*)cvAlloc( max_count*sizeof(ipp_val2[0]) );
353         ipp_counts = (int*)cvAlloc( max_count*sizeof(ipp_counts[0]) );
354
355         for( i = 0; i < cascade->count; i++ )
356         {
357             CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
358             for( j = 0, k = 0; j < stage_classifier->count; j++ )
359             {
360                 CvHaarClassifier* classifier = stage_classifier->classifier + j;
361                 int rect_count = 2 + (classifier->haar_feature->rect[2].r.width != 0);
362
363                 ipp_thresholds[j] = classifier->threshold[0];
364                 ipp_val1[j] = classifier->alpha[0];
365                 ipp_val2[j] = classifier->alpha[1];
366                 ipp_counts[j] = rect_count;
367
368                 for( l = 0; l < rect_count; l++, k++ )
369                 {
370                     ipp_features[k] = classifier->haar_feature->rect[l].r;
371                     //ipp_features[k].y = orig_window_size.height - ipp_features[k].y - ipp_features[k].height;
372                     ipp_weights[k] = classifier->haar_feature->rect[l].weight*ipp_weight_scale;
373                 }
374             }
375
376             if( ippiHaarClassifierInitAlloc_32f( (IppiHaarClassifier_32f**)&out->ipp_stages[i],
377                 (const IppiRect*)ipp_features, ipp_weights, ipp_thresholds,
378                 ipp_val1, ipp_val2, ipp_counts, stage_classifier->count ) < 0 )
379                 break;
380         }
381
382         if( i < cascade->count )
383         {
384             for( j = 0; j < i; j++ )
385                 if( out->ipp_stages[i] )
386                     ippiHaarClassifierFree_32f( (IppiHaarClassifier_32f*)out->ipp_stages[i] );
387             cvFree( &out->ipp_stages );
388         }
389     }
390 #endif
391
392     cascade->hid_cascade = out;
393     assert( (char*)haar_node_ptr - (char*)out <= datasize );
394
395     cvFree( &ipp_features );
396     cvFree( &ipp_weights );
397     cvFree( &ipp_thresholds );
398     cvFree( &ipp_val1 );
399     cvFree( &ipp_val2 );
400     cvFree( &ipp_counts );
401
402     return out;
403 }
404
405
406 #define sum_elem_ptr(sum,row,col)  \
407     ((sumtype*)CV_MAT_ELEM_PTR_FAST((sum),(row),(col),sizeof(sumtype)))
408
409 #define sqsum_elem_ptr(sqsum,row,col)  \
410     ((sqsumtype*)CV_MAT_ELEM_PTR_FAST((sqsum),(row),(col),sizeof(sqsumtype)))
411
412 #define calc_sum(rect,offset) \
413     ((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
414
415
416 CV_IMPL void
417 cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
418                                      const CvArr* _sum,
419                                      const CvArr* _sqsum,
420                                      const CvArr* _tilted_sum,
421                                      double scale )
422 {
423     CvMat sum_stub, *sum = (CvMat*)_sum;
424     CvMat sqsum_stub, *sqsum = (CvMat*)_sqsum;
425     CvMat tilted_stub, *tilted = (CvMat*)_tilted_sum;
426     CvHidHaarClassifierCascade* cascade;
427     int coi0 = 0, coi1 = 0;
428     int i;
429     CvRect equRect;
430     double weight_scale;
431
432     if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
433         CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
434
435     if( scale <= 0 )
436         CV_Error( CV_StsOutOfRange, "Scale must be positive" );
437
438     sum = cvGetMat( sum, &sum_stub, &coi0 );
439     sqsum = cvGetMat( sqsum, &sqsum_stub, &coi1 );
440
441     if( coi0 || coi1 )
442         CV_Error( CV_BadCOI, "COI is not supported" );
443
444     if( !CV_ARE_SIZES_EQ( sum, sqsum ))
445         CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
446
447     if( CV_MAT_TYPE(sqsum->type) != CV_64FC1 ||
448         CV_MAT_TYPE(sum->type) != CV_32SC1 )
449         CV_Error( CV_StsUnsupportedFormat,
450         "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
451
452     if( !_cascade->hid_cascade )
453         icvCreateHidHaarClassifierCascade(_cascade);
454
455     cascade = _cascade->hid_cascade;
456
457     if( cascade->has_tilted_features )
458     {
459         tilted = cvGetMat( tilted, &tilted_stub, &coi1 );
460
461         if( CV_MAT_TYPE(tilted->type) != CV_32SC1 )
462             CV_Error( CV_StsUnsupportedFormat,
463             "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
464
465         if( sum->step != tilted->step )
466             CV_Error( CV_StsUnmatchedSizes,
467             "Sum and tilted_sum must have the same stride (step, widthStep)" );
468
469         if( !CV_ARE_SIZES_EQ( sum, tilted ))
470             CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
471         cascade->tilted = *tilted;
472     }
473
474     _cascade->scale = scale;
475     _cascade->real_window_size.width = cvRound( _cascade->orig_window_size.width * scale );
476     _cascade->real_window_size.height = cvRound( _cascade->orig_window_size.height * scale );
477
478     cascade->sum = *sum;
479     cascade->sqsum = *sqsum;
480
481     equRect.x = equRect.y = cvRound(scale);
482     equRect.width = cvRound((_cascade->orig_window_size.width-2)*scale);
483     equRect.height = cvRound((_cascade->orig_window_size.height-2)*scale);
484     weight_scale = 1./(equRect.width*equRect.height);
485     cascade->inv_window_area = weight_scale;
486
487     cascade->p0 = sum_elem_ptr(*sum, equRect.y, equRect.x);
488     cascade->p1 = sum_elem_ptr(*sum, equRect.y, equRect.x + equRect.width );
489     cascade->p2 = sum_elem_ptr(*sum, equRect.y + equRect.height, equRect.x );
490     cascade->p3 = sum_elem_ptr(*sum, equRect.y + equRect.height,
491                                      equRect.x + equRect.width );
492
493     cascade->pq0 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x);
494     cascade->pq1 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x + equRect.width );
495     cascade->pq2 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height, equRect.x );
496     cascade->pq3 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height,
497                                           equRect.x + equRect.width );
498
499     /* init pointers in haar features according to real window size and
500        given image pointers */
501     for( i = 0; i < _cascade->count; i++ )
502     {
503         int j, k, l;
504         for( j = 0; j < cascade->stage_classifier[i].count; j++ )
505         {
506             for( l = 0; l < cascade->stage_classifier[i].classifier[j].count; l++ )
507             {
508                 CvHaarFeature* feature =
509                     &_cascade->stage_classifier[i].classifier[j].haar_feature[l];
510                 /* CvHidHaarClassifier* classifier =
511                     cascade->stage_classifier[i].classifier + j; */
512                 CvHidHaarFeature* hidfeature =
513                     &cascade->stage_classifier[i].classifier[j].node[l].feature;
514                 double sum0 = 0, area0 = 0;
515                 CvRect r[3];
516
517                 int base_w = -1, base_h = -1;
518                 int new_base_w = 0, new_base_h = 0;
519                 int kx, ky;
520                 int flagx = 0, flagy = 0;
521                 int x0 = 0, y0 = 0;
522                 int nr;
523
524                 /* align blocks */
525                 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
526                 {
527                     if( !hidfeature->rect[k].p0 )
528                         break;
529                     r[k] = feature->rect[k].r;
530                     base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].width-1) );
531                     base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].x - r[0].x-1) );
532                     base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].height-1) );
533                     base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].y - r[0].y-1) );
534                 }
535
536                 nr = k;
537
538                 base_w += 1;
539                 base_h += 1;
540                 kx = r[0].width / base_w;
541                 ky = r[0].height / base_h;
542
543                 if( kx <= 0 )
544                 {
545                     flagx = 1;
546                     new_base_w = cvRound( r[0].width * scale ) / kx;
547                     x0 = cvRound( r[0].x * scale );
548                 }
549
550                 if( ky <= 0 )
551                 {
552                     flagy = 1;
553                     new_base_h = cvRound( r[0].height * scale ) / ky;
554                     y0 = cvRound( r[0].y * scale );
555                 }
556
557                 for( k = 0; k < nr; k++ )
558                 {
559                     CvRect tr;
560                     double correction_ratio;
561
562                     if( flagx )
563                     {
564                         tr.x = (r[k].x - r[0].x) * new_base_w / base_w + x0;
565                         tr.width = r[k].width * new_base_w / base_w;
566                     }
567                     else
568                     {
569                         tr.x = cvRound( r[k].x * scale );
570                         tr.width = cvRound( r[k].width * scale );
571                     }
572
573                     if( flagy )
574                     {
575                         tr.y = (r[k].y - r[0].y) * new_base_h / base_h + y0;
576                         tr.height = r[k].height * new_base_h / base_h;
577                     }
578                     else
579                     {
580                         tr.y = cvRound( r[k].y * scale );
581                         tr.height = cvRound( r[k].height * scale );
582                     }
583
584 #if CV_ADJUST_WEIGHTS
585                     {
586                     // RAINER START
587                     const float orig_feature_size =  (float)(feature->rect[k].r.width)*feature->rect[k].r.height;
588                     const float orig_norm_size = (float)(_cascade->orig_window_size.width)*(_cascade->orig_window_size.height);
589                     const float feature_size = float(tr.width*tr.height);
590                     //const float normSize    = float(equRect.width*equRect.height);
591                     float target_ratio = orig_feature_size / orig_norm_size;
592                     //float isRatio = featureSize / normSize;
593                     //correctionRatio = targetRatio / isRatio / normSize;
594                     correction_ratio = target_ratio / feature_size;
595                     // RAINER END
596                     }
597 #else
598                     correction_ratio = weight_scale * (!feature->tilted ? 1 : 0.5);
599 #endif
600
601                     if( !feature->tilted )
602                     {
603                         hidfeature->rect[k].p0 = sum_elem_ptr(*sum, tr.y, tr.x);
604                         hidfeature->rect[k].p1 = sum_elem_ptr(*sum, tr.y, tr.x + tr.width);
605                         hidfeature->rect[k].p2 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x);
606                         hidfeature->rect[k].p3 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x + tr.width);
607                     }
608                     else
609                     {
610                         hidfeature->rect[k].p2 = sum_elem_ptr(*tilted, tr.y + tr.width, tr.x + tr.width);
611                         hidfeature->rect[k].p3 = sum_elem_ptr(*tilted, tr.y + tr.width + tr.height,
612                                                               tr.x + tr.width - tr.height);
613                         hidfeature->rect[k].p0 = sum_elem_ptr(*tilted, tr.y, tr.x);
614                         hidfeature->rect[k].p1 = sum_elem_ptr(*tilted, tr.y + tr.height, tr.x - tr.height);
615                     }
616
617                     hidfeature->rect[k].weight = (float)(feature->rect[k].weight * correction_ratio);
618
619                     if( k == 0 )
620                         area0 = tr.width * tr.height;
621                     else
622                         sum0 += hidfeature->rect[k].weight * tr.width * tr.height;
623                 }
624
625                 hidfeature->rect[0].weight = (float)(-sum0/area0);
626             } /* l */
627         } /* j */
628     }
629 }
630
631
632 CV_INLINE
633 double icvEvalHidHaarClassifier( CvHidHaarClassifier* classifier,
634                                  double variance_norm_factor,
635                                  size_t p_offset )
636 {
637     int idx = 0;
638     do
639     {
640         CvHidHaarTreeNode* node = classifier->node + idx;
641         double t = node->threshold * variance_norm_factor;
642
643         double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
644         sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
645
646         if( node->feature.rect[2].p0 )
647             sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
648
649         idx = sum < t ? node->left : node->right;
650     }
651     while( idx > 0 );
652     return classifier->alpha[-idx];
653 }
654
655
656 CV_IMPL int
657 cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
658                                CvPoint pt, double& stage_sum, int start_stage )
659 {
660     int result = -1;
661
662     int p_offset, pq_offset;
663     int i, j;
664     double mean, variance_norm_factor;
665     CvHidHaarClassifierCascade* cascade;
666
667     if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
668         CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid cascade pointer" );
669
670     cascade = _cascade->hid_cascade;
671     if( !cascade )
672         CV_Error( CV_StsNullPtr, "Hidden cascade has not been created.\n"
673             "Use cvSetImagesForHaarClassifierCascade" );
674
675     if( pt.x < 0 || pt.y < 0 ||
676         pt.x + _cascade->real_window_size.width >= cascade->sum.width ||
677         pt.y + _cascade->real_window_size.height >= cascade->sum.height )
678         return -1;
679
680     p_offset = pt.y * (cascade->sum.step/sizeof(sumtype)) + pt.x;
681     pq_offset = pt.y * (cascade->sqsum.step/sizeof(sqsumtype)) + pt.x;
682     mean = calc_sum(*cascade,p_offset)*cascade->inv_window_area;
683     variance_norm_factor = cascade->pq0[pq_offset] - cascade->pq1[pq_offset] -
684                            cascade->pq2[pq_offset] + cascade->pq3[pq_offset];
685     variance_norm_factor = variance_norm_factor*cascade->inv_window_area - mean*mean;
686     if( variance_norm_factor >= 0. )
687         variance_norm_factor = sqrt(variance_norm_factor);
688     else
689         variance_norm_factor = 1.;
690
691     if( cascade->is_tree )
692     {
693         CvHidHaarStageClassifier* ptr;
694         assert( start_stage == 0 );
695
696         result = 1;
697         ptr = cascade->stage_classifier;
698
699         while( ptr )
700         {
701             stage_sum = 0.0;
702
703             for( j = 0; j < ptr->count; j++ )
704             {
705                 stage_sum += icvEvalHidHaarClassifier( ptr->classifier + j,
706                     variance_norm_factor, p_offset );
707             }
708
709             if( stage_sum >= ptr->threshold )
710             {
711                 ptr = ptr->child;
712             }
713             else
714             {
715                 while( ptr && ptr->next == NULL ) ptr = ptr->parent;
716                 if( ptr == NULL )
717                     return 0;
718                 ptr = ptr->next;
719             }
720         }
721     }
722     else if( cascade->isStumpBased )
723     {
724         for( i = start_stage; i < cascade->count; i++ )
725         {
726 #ifndef CV_HAAR_USE_SSE
727             stage_sum = 0.0;
728 #else
729             __m128d stage_sum = _mm_setzero_pd();
730 #endif
731
732             if( cascade->stage_classifier[i].two_rects )
733             {
734                 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
735                 {
736                     CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
737                     CvHidHaarTreeNode* node = classifier->node;
738 #ifndef CV_HAAR_USE_SSE
739                     double t = node->threshold*variance_norm_factor;
740                     double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
741                     sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
742                     stage_sum += classifier->alpha[sum >= t];
743 #else
744                     // ayasin - NHM perf optim. Avoid use of costly flaky jcc
745                     __m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
746                     __m128d a = _mm_set_sd(classifier->alpha[0]);
747                     __m128d b = _mm_set_sd(classifier->alpha[1]);
748                     __m128d sum = _mm_set_sd(calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight +
749                                              calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight);
750                     t = _mm_cmpgt_sd(t, sum);
751                     stage_sum = _mm_add_sd(stage_sum, _mm_blendv_pd(b, a, t));
752 #endif
753                 }
754             }
755             else
756             {
757                 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
758                 {
759                     CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
760                     CvHidHaarTreeNode* node = classifier->node;
761 #ifndef CV_HAAR_USE_SSE
762                     double t = node->threshold*variance_norm_factor;
763                     double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
764                     sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
765                     if( node->feature.rect[2].p0 )
766                         sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
767                     
768                     stage_sum += classifier->alpha[sum >= t];
769 #else
770                     // ayasin - NHM perf optim. Avoid use of costly flaky jcc
771                     __m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
772                     __m128d a = _mm_set_sd(classifier->alpha[0]);
773                     __m128d b = _mm_set_sd(classifier->alpha[1]);
774                     double _sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
775                     _sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
776                     if( node->feature.rect[2].p0 )
777                         _sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
778                     __m128d sum = _mm_set_sd(_sum);
779                     
780                     t = _mm_cmpgt_sd(t, sum);
781                     stage_sum = _mm_add_sd(stage_sum, _mm_blendv_pd(b, a, t));
782 #endif
783                 }
784             }
785
786 #ifndef CV_HAAR_USE_SSE
787             if( stage_sum < cascade->stage_classifier[i].threshold )
788 #else
789             __m128d i_threshold = _mm_set_sd(cascade->stage_classifier[i].threshold);
790             if( _mm_comilt_sd(stage_sum, i_threshold) )
791 #endif
792                 return -i;
793         }
794     }
795     else
796     {
797         for( i = start_stage; i < cascade->count; i++ )
798         {
799             stage_sum = 0.0;
800
801             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
802             {
803                 stage_sum += icvEvalHidHaarClassifier(
804                     cascade->stage_classifier[i].classifier + j,
805                     variance_norm_factor, p_offset );
806             }
807
808             if( stage_sum < cascade->stage_classifier[i].threshold )
809                 return -i;
810         }
811     }
812     return 1;
813 }
814
815 CV_IMPL int
816 cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
817                             CvPoint pt, int start_stage )
818 {
819     double stage_sum;
820     return cvRunHaarClassifierCascadeSum(_cascade, pt, stage_sum, start_stage);
821 }
822
823 namespace cv
824 {
825
826 struct HaarDetectObjects_ScaleImage_Invoker
827 {
828     HaarDetectObjects_ScaleImage_Invoker( const CvHaarClassifierCascade* _cascade,
829                                           int _stripSize, double _factor,
830                                           const Mat& _sum1, const Mat& _sqsum1, Mat* _norm1,
831                                           Mat* _mask1, Rect _equRect, ConcurrentRectVector& _vec, 
832                                           std::vector<int>& _levels, std::vector<double>& _weights,
833                                           bool _outputLevels  )
834     {
835         cascade = _cascade;
836         stripSize = _stripSize;
837         factor = _factor;
838         sum1 = _sum1;
839         sqsum1 = _sqsum1;
840         norm1 = _norm1;
841         mask1 = _mask1;
842         equRect = _equRect;
843         vec = &_vec;
844         rejectLevels = _outputLevels ? &_levels : 0;
845         levelWeights = _outputLevels ? &_weights : 0;
846     }
847     
848     void operator()( const BlockedRange& range ) const
849     {
850         Size winSize0 = cascade->orig_window_size;
851         Size winSize(cvRound(winSize0.width*factor), cvRound(winSize0.height*factor));
852         int y1 = range.begin()*stripSize, y2 = min(range.end()*stripSize, sum1.rows - 1 - winSize0.height);
853         
854         if (y2 <= y1 || sum1.cols <= 1 + winSize0.width)
855             return;
856         
857         Size ssz(sum1.cols - 1 - winSize0.width, y2 - y1);
858         int x, y, ystep = factor > 2 ? 1 : 2;
859         
860     #ifdef HAVE_IPP
861         if( cascade->hid_cascade->ipp_stages )
862         {
863             IppiRect iequRect = {equRect.x, equRect.y, equRect.width, equRect.height};
864             ippiRectStdDev_32f_C1R(sum1.ptr<float>(y1), sum1.step,
865                                    sqsum1.ptr<double>(y1), sqsum1.step,
866                                    norm1->ptr<float>(y1), norm1->step,
867                                    ippiSize(ssz.width, ssz.height), iequRect );
868             
869             int positive = (ssz.width/ystep)*((ssz.height + ystep-1)/ystep);
870
871             if( ystep == 1 )
872                 (*mask1) = Scalar::all(1);
873             else
874                 for( y = y1; y < y2; y++ )
875                 {
876                     uchar* mask1row = mask1->ptr(y);
877                     memset( mask1row, 0, ssz.width );
878                     
879                     if( y % ystep == 0 )
880                         for( x = 0; x < ssz.width; x += ystep )
881                             mask1row[x] = (uchar)1;
882                 }
883             
884             for( int j = 0; j < cascade->count; j++ )
885             {
886                 if( ippiApplyHaarClassifier_32f_C1R(
887                             sum1.ptr<float>(y1), sum1.step,
888                             norm1->ptr<float>(y1), norm1->step,
889                             mask1->ptr<uchar>(y1), mask1->step,
890                             ippiSize(ssz.width, ssz.height), &positive,
891                             cascade->hid_cascade->stage_classifier[j].threshold,
892                             (IppiHaarClassifier_32f*)cascade->hid_cascade->ipp_stages[j]) < 0 )
893                     positive = 0;
894                 if( positive <= 0 )
895                     break;
896             }
897             
898             if( positive > 0 )
899                 for( y = y1; y < y2; y += ystep )
900                 {
901                     uchar* mask1row = mask1->ptr(y);
902                     for( x = 0; x < ssz.width; x += ystep )
903                         if( mask1row[x] != 0 )
904                         {
905                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
906                                                 winSize.width, winSize.height));
907                             if( --positive == 0 )
908                                 break;
909                         }
910                     if( positive == 0 )
911                         break;
912                 }
913         }
914         else
915 #endif
916             for( y = y1; y < y2; y += ystep )
917                 for( x = 0; x < ssz.width; x += ystep )
918                 {
919                     double gypWeight;
920                     int result = cvRunHaarClassifierCascadeSum( cascade, cvPoint(x,y), gypWeight, 0 );
921                     if( rejectLevels )
922                     {
923                         if( result == 1 )
924                             result = -1*cascade->count;
925                         if( cascade->count + result < 4 )
926                         {
927                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
928                                            winSize.width, winSize.height));
929                             rejectLevels->push_back(-result);
930                             levelWeights->push_back(gypWeight);
931                         }
932                     }
933                     else
934                     {
935                         if( result > 0 )
936                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
937                                            winSize.width, winSize.height)); 
938                     }
939                 }
940     }
941     
942     const CvHaarClassifierCascade* cascade;
943     int stripSize;
944     double factor;
945     Mat sum1, sqsum1, *norm1, *mask1;
946     Rect equRect;
947     ConcurrentRectVector* vec;
948     std::vector<int>* rejectLevels;
949     std::vector<double>* levelWeights;
950 };
951     
952
953 struct HaarDetectObjects_ScaleCascade_Invoker
954 {
955     HaarDetectObjects_ScaleCascade_Invoker( const CvHaarClassifierCascade* _cascade,
956                                             Size _winsize, const Range& _xrange, double _ystep,
957                                             size_t _sumstep, const int** _p, const int** _pq,
958                                             ConcurrentRectVector& _vec )
959     {
960         cascade = _cascade;
961         winsize = _winsize;
962         xrange = _xrange;
963         ystep = _ystep;
964         sumstep = _sumstep;
965         p = _p; pq = _pq;
966         vec = &_vec;
967     }
968     
969     void operator()( const BlockedRange& range ) const
970     {
971         int iy, startY = range.begin(), endY = range.end();
972         const int *p0 = p[0], *p1 = p[1], *p2 = p[2], *p3 = p[3];
973         const int *pq0 = pq[0], *pq1 = pq[1], *pq2 = pq[2], *pq3 = pq[3];
974         bool doCannyPruning = p0 != 0;
975         int sstep = (int)(sumstep/sizeof(p0[0]));
976         
977         for( iy = startY; iy < endY; iy++ )
978         {
979             int ix, y = cvRound(iy*ystep), ixstep = 1;
980             for( ix = xrange.start; ix < xrange.end; ix += ixstep )
981             {
982                 int x = cvRound(ix*ystep); // it should really be ystep, not ixstep
983                 
984                 if( doCannyPruning )
985                 {
986                     int offset = y*sstep + x;
987                     int s = p0[offset] - p1[offset] - p2[offset] + p3[offset];
988                     int sq = pq0[offset] - pq1[offset] - pq2[offset] + pq3[offset];
989                     if( s < 100 || sq < 20 )
990                     {
991                         ixstep = 2;
992                         continue;
993                     }
994                 }
995                 
996                 int result = cvRunHaarClassifierCascade( cascade, cvPoint(x, y), 0 );
997                 if( result > 0 )
998                     vec->push_back(Rect(x, y, winsize.width, winsize.height));
999                 ixstep = result != 0 ? 1 : 2;
1000             }
1001         }
1002     }
1003     
1004     const CvHaarClassifierCascade* cascade;
1005     double ystep;
1006     size_t sumstep;
1007     Size winsize;
1008     Range xrange;
1009     const int** p;
1010     const int** pq;
1011     ConcurrentRectVector* vec;
1012 };
1013     
1014     
1015 }
1016     
1017
1018 CvSeq*
1019 cvHaarDetectObjectsForROC( const CvArr* _img, 
1020                      CvHaarClassifierCascade* cascade, CvMemStorage* storage,
1021                      std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
1022                      double scaleFactor, int minNeighbors, int flags, 
1023                      CvSize minSize, CvSize maxSize, bool outputRejectLevels )
1024 {
1025     const double GROUP_EPS = 0.2;
1026     CvMat stub, *img = (CvMat*)_img;
1027     cv::Ptr<CvMat> temp, sum, tilted, sqsum, normImg, sumcanny, imgSmall;
1028     CvSeq* result_seq = 0;
1029     cv::Ptr<CvMemStorage> temp_storage;
1030
1031     cv::ConcurrentRectVector allCandidates;
1032     std::vector<cv::Rect> rectList;
1033     std::vector<int> rweights;
1034     double factor;
1035     int coi;
1036     bool doCannyPruning = (flags & CV_HAAR_DO_CANNY_PRUNING) != 0;
1037     bool findBiggestObject = (flags & CV_HAAR_FIND_BIGGEST_OBJECT) != 0;
1038     bool roughSearch = (flags & CV_HAAR_DO_ROUGH_SEARCH) != 0;
1039
1040     if( !CV_IS_HAAR_CLASSIFIER(cascade) )
1041         CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier cascade" );
1042
1043     if( !storage )
1044         CV_Error( CV_StsNullPtr, "Null storage pointer" );
1045
1046     img = cvGetMat( img, &stub, &coi );
1047     if( coi )
1048         CV_Error( CV_BadCOI, "COI is not supported" );
1049
1050     if( CV_MAT_DEPTH(img->type) != CV_8U )
1051         CV_Error( CV_StsUnsupportedFormat, "Only 8-bit images are supported" );
1052     
1053     if( scaleFactor <= 1 )
1054         CV_Error( CV_StsOutOfRange, "scale factor must be > 1" );
1055
1056     if( findBiggestObject )
1057         flags &= ~CV_HAAR_SCALE_IMAGE;
1058     
1059     if( maxSize.height == 0 || maxSize.width == 0 )
1060     {
1061         maxSize.height = img->rows;
1062         maxSize.width = img->cols;
1063     }
1064
1065     temp = cvCreateMat( img->rows, img->cols, CV_8UC1 );
1066     sum = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1067     sqsum = cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 );
1068
1069     if( !cascade->hid_cascade )
1070         icvCreateHidHaarClassifierCascade(cascade);
1071
1072     if( cascade->hid_cascade->has_tilted_features )
1073         tilted = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1074
1075     result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
1076
1077     if( CV_MAT_CN(img->type) > 1 )
1078     {
1079         cvCvtColor( img, temp, CV_BGR2GRAY );
1080         img = temp;
1081     }
1082
1083     if( findBiggestObject )
1084         flags &= ~(CV_HAAR_SCALE_IMAGE|CV_HAAR_DO_CANNY_PRUNING);
1085
1086     if( flags & CV_HAAR_SCALE_IMAGE )
1087     {
1088         CvSize winSize0 = cascade->orig_window_size;
1089 #ifdef HAVE_IPP
1090         int use_ipp = cascade->hid_cascade->ipp_stages != 0;
1091
1092         if( use_ipp )
1093             normImg = cvCreateMat( img->rows, img->cols, CV_32FC1 );
1094 #endif
1095         imgSmall = cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 );
1096
1097         for( factor = 1; ; factor *= scaleFactor )
1098         {
1099             CvSize winSize = { cvRound(winSize0.width*factor),
1100                                 cvRound(winSize0.height*factor) };
1101             CvSize sz = { cvRound( img->cols/factor ), cvRound( img->rows/factor ) };
1102             CvSize sz1 = { sz.width - winSize0.width + 1, sz.height - winSize0.height + 1 };
1103
1104             CvRect equRect = { icv_object_win_border, icv_object_win_border,
1105                 winSize0.width - icv_object_win_border*2,
1106                 winSize0.height - icv_object_win_border*2 };
1107
1108             CvMat img1, sum1, sqsum1, norm1, tilted1, mask1;
1109             CvMat* _tilted = 0;
1110
1111             if( sz1.width <= 0 || sz1.height <= 0 )
1112                 break;
1113             if( winSize.width > maxSize.width || winSize.height > maxSize.height )
1114                 break;
1115             if( winSize.width < minSize.width || winSize.height < minSize.height )
1116                 continue;
1117
1118             img1 = cvMat( sz.height, sz.width, CV_8UC1, imgSmall->data.ptr );
1119             sum1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, sum->data.ptr );
1120             sqsum1 = cvMat( sz.height+1, sz.width+1, CV_64FC1, sqsum->data.ptr );
1121             if( tilted )
1122             {
1123                 tilted1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, tilted->data.ptr );
1124                 _tilted = &tilted1;
1125             }
1126             norm1 = cvMat( sz1.height, sz1.width, CV_32FC1, normImg ? normImg->data.ptr : 0 );
1127             mask1 = cvMat( sz1.height, sz1.width, CV_8UC1, temp->data.ptr );
1128
1129             cvResize( img, &img1, CV_INTER_LINEAR );
1130             cvIntegral( &img1, &sum1, &sqsum1, _tilted );
1131
1132             int ystep = factor > 2 ? 1 : 2;
1133         #ifdef HAVE_TBB
1134             const int LOCS_PER_THREAD = 1000;
1135             int stripCount = ((sz1.width/ystep)*(sz1.height + ystep-1)/ystep + LOCS_PER_THREAD/2)/LOCS_PER_THREAD;
1136             stripCount = std::min(std::max(stripCount, 1), 100);
1137         #else
1138             const int stripCount = 1;
1139         #endif
1140             
1141 #ifdef HAVE_IPP
1142             if( use_ipp )
1143             {
1144                 cv::Mat fsum(sum1.rows, sum1.cols, CV_32F, sum1.data.ptr, sum1.step);
1145                 cv::Mat(&sum1).convertTo(fsum, CV_32F, 1, -(1<<24));
1146             }
1147             else
1148 #endif
1149                 cvSetImagesForHaarClassifierCascade( cascade, &sum1, &sqsum1, _tilted, 1. );            
1150             
1151             cv::Mat _norm1(&norm1), _mask1(&mask1);
1152             cv::parallel_for(cv::BlockedRange(0, stripCount),
1153                          cv::HaarDetectObjects_ScaleImage_Invoker(cascade,
1154                                 (((sz1.height + stripCount - 1)/stripCount + ystep-1)/ystep)*ystep,
1155                                 factor, cv::Mat(&sum1), cv::Mat(&sqsum1), &_norm1, &_mask1,
1156                                 cv::Rect(equRect), allCandidates, rejectLevels, levelWeights, outputRejectLevels));
1157         }
1158     }
1159     else
1160     {
1161         int n_factors = 0;
1162         cv::Rect scanROI;
1163
1164         cvIntegral( img, sum, sqsum, tilted );
1165
1166         if( doCannyPruning )
1167         {
1168             sumcanny = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1169             cvCanny( img, temp, 0, 50, 3 );
1170             cvIntegral( temp, sumcanny );
1171         }
1172
1173         for( n_factors = 0, factor = 1;
1174              factor*cascade->orig_window_size.width < img->cols - 10 &&
1175              factor*cascade->orig_window_size.height < img->rows - 10;
1176              n_factors++, factor *= scaleFactor )
1177             ;
1178
1179         if( findBiggestObject )
1180         {
1181             scaleFactor = 1./scaleFactor;
1182             factor *= scaleFactor;
1183         }
1184         else
1185             factor = 1;
1186
1187         for( ; n_factors-- > 0; factor *= scaleFactor )
1188         {
1189             const double ystep = std::max( 2., factor );
1190             CvSize winSize = { cvRound( cascade->orig_window_size.width * factor ),
1191                                 cvRound( cascade->orig_window_size.height * factor )};
1192             CvRect equRect = { 0, 0, 0, 0 };
1193             int *p[4] = {0,0,0,0};
1194             int *pq[4] = {0,0,0,0};
1195             int startX = 0, startY = 0;
1196             int endX = cvRound((img->cols - winSize.width) / ystep);
1197             int endY = cvRound((img->rows - winSize.height) / ystep);
1198
1199             if( winSize.width < minSize.width || winSize.height < minSize.height )
1200             {
1201                 if( findBiggestObject )
1202                     break;
1203                 continue;
1204             }
1205
1206             cvSetImagesForHaarClassifierCascade( cascade, sum, sqsum, tilted, factor );
1207             cvZero( temp );
1208
1209             if( doCannyPruning )
1210             {
1211                 equRect.x = cvRound(winSize.width*0.15);
1212                 equRect.y = cvRound(winSize.height*0.15);
1213                 equRect.width = cvRound(winSize.width*0.7);
1214                 equRect.height = cvRound(winSize.height*0.7);
1215
1216                 p[0] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step) + equRect.x;
1217                 p[1] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step)
1218                             + equRect.x + equRect.width;
1219                 p[2] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step) + equRect.x;
1220                 p[3] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step)
1221                             + equRect.x + equRect.width;
1222
1223                 pq[0] = (int*)(sum->data.ptr + equRect.y*sum->step) + equRect.x;
1224                 pq[1] = (int*)(sum->data.ptr + equRect.y*sum->step)
1225                             + equRect.x + equRect.width;
1226                 pq[2] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step) + equRect.x;
1227                 pq[3] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step)
1228                             + equRect.x + equRect.width;
1229             }
1230
1231             if( scanROI.area() > 0 )
1232             {
1233                 //adjust start_height and stop_height
1234                 startY = cvRound(scanROI.y / ystep);
1235                 endY = cvRound((scanROI.y + scanROI.height - winSize.height) / ystep);
1236
1237                 startX = cvRound(scanROI.x / ystep);
1238                 endX = cvRound((scanROI.x + scanROI.width - winSize.width) / ystep);
1239             }
1240
1241             cv::parallel_for(cv::BlockedRange(startY, endY),
1242                 cv::HaarDetectObjects_ScaleCascade_Invoker(cascade, winSize, cv::Range(startX, endX),
1243                                                            ystep, sum->step, (const int**)p,
1244                                                            (const int**)pq, allCandidates ));
1245
1246             if( findBiggestObject && !allCandidates.empty() && scanROI.area() == 0 )
1247             {
1248                 rectList.resize(allCandidates.size());
1249                 std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1250                 
1251                 groupRectangles(rectList, std::max(minNeighbors, 1), GROUP_EPS);
1252                 
1253                 if( !rectList.empty() )
1254                 {
1255                     size_t i, sz = rectList.size();
1256                     cv::Rect maxRect;
1257                     
1258                     for( i = 0; i < sz; i++ )
1259                     {
1260                         if( rectList[i].area() > maxRect.area() )
1261                             maxRect = rectList[i];
1262                     }
1263                     
1264                     allCandidates.push_back(maxRect);
1265                     
1266                     scanROI = maxRect;
1267                     int dx = cvRound(maxRect.width*GROUP_EPS);
1268                     int dy = cvRound(maxRect.height*GROUP_EPS);
1269                     scanROI.x = std::max(scanROI.x - dx, 0);
1270                     scanROI.y = std::max(scanROI.y - dy, 0);
1271                     scanROI.width = std::min(scanROI.width + dx*2, img->cols-1-scanROI.x);
1272                     scanROI.height = std::min(scanROI.height + dy*2, img->rows-1-scanROI.y);
1273                 
1274                     double minScale = roughSearch ? 0.6 : 0.4;
1275                     minSize.width = cvRound(maxRect.width*minScale);
1276                     minSize.height = cvRound(maxRect.height*minScale);
1277                 }
1278             }
1279         }
1280     }
1281
1282     rectList.resize(allCandidates.size());
1283     if(!allCandidates.empty())
1284         std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1285     
1286     if( minNeighbors != 0 || findBiggestObject )
1287     {
1288         if( outputRejectLevels )
1289         {
1290             groupRectangles(rectList, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
1291         }
1292         else
1293         {
1294             groupRectangles(rectList, rweights, std::max(minNeighbors, 1), GROUP_EPS);
1295         }
1296     }
1297     else
1298         rweights.resize(rectList.size(),0);
1299         
1300     if( findBiggestObject && rectList.size() )
1301     {
1302         CvAvgComp result_comp = {{0,0,0,0},0};
1303         
1304         for( size_t i = 0; i < rectList.size(); i++ )
1305         {
1306             cv::Rect r = rectList[i];
1307             if( r.area() > cv::Rect(result_comp.rect).area() )
1308             {
1309                 result_comp.rect = r;
1310                 result_comp.neighbors = rweights[i];
1311             }
1312         }
1313         cvSeqPush( result_seq, &result_comp );
1314     }
1315     else
1316     {
1317         for( size_t i = 0; i < rectList.size(); i++ )
1318         {
1319             CvAvgComp c;
1320             c.rect = rectList[i];
1321             c.neighbors = !rweights.empty() ? rweights[i] : 0;
1322             cvSeqPush( result_seq, &c );
1323         }
1324     }
1325
1326     return result_seq;
1327 }
1328
1329 CV_IMPL CvSeq*
1330 cvHaarDetectObjects( const CvArr* _img, 
1331                      CvHaarClassifierCascade* cascade, CvMemStorage* storage,
1332                      double scaleFactor,
1333                      int minNeighbors, int flags, CvSize minSize, CvSize maxSize )
1334 {
1335     std::vector<int> fakeLevels;
1336     std::vector<double> fakeWeights;
1337     return cvHaarDetectObjectsForROC( _img, cascade, storage, fakeLevels, fakeWeights, 
1338                                 scaleFactor, minNeighbors, flags, minSize, maxSize, false );
1339
1340 }
1341
1342
1343 static CvHaarClassifierCascade*
1344 icvLoadCascadeCART( const char** input_cascade, int n, CvSize orig_window_size )
1345 {
1346     int i;
1347     CvHaarClassifierCascade* cascade = icvCreateHaarClassifierCascade(n);
1348     cascade->orig_window_size = orig_window_size;
1349
1350     for( i = 0; i < n; i++ )
1351     {
1352         int j, count, l;
1353         float threshold = 0;
1354         const char* stage = input_cascade[i];
1355         int dl = 0;
1356
1357         /* tree links */
1358         int parent = -1;
1359         int next = -1;
1360
1361         sscanf( stage, "%d%n", &count, &dl );
1362         stage += dl;
1363
1364         assert( count > 0 );
1365         cascade->stage_classifier[i].count = count;
1366         cascade->stage_classifier[i].classifier =
1367             (CvHaarClassifier*)cvAlloc( count*sizeof(cascade->stage_classifier[i].classifier[0]));
1368
1369         for( j = 0; j < count; j++ )
1370         {
1371             CvHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
1372             int k, rects = 0;
1373             char str[100];
1374
1375             sscanf( stage, "%d%n", &classifier->count, &dl );
1376             stage += dl;
1377
1378             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1379                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1380                                       sizeof( *classifier->threshold ) +
1381                                       sizeof( *classifier->left ) +
1382                                       sizeof( *classifier->right ) ) +
1383                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1384             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1385             classifier->left = (int*) (classifier->threshold + classifier->count);
1386             classifier->right = (int*) (classifier->left + classifier->count);
1387             classifier->alpha = (float*) (classifier->right + classifier->count);
1388
1389             for( l = 0; l < classifier->count; l++ )
1390             {
1391                 sscanf( stage, "%d%n", &rects, &dl );
1392                 stage += dl;
1393
1394                 assert( rects >= 2 && rects <= CV_HAAR_FEATURE_MAX );
1395
1396                 for( k = 0; k < rects; k++ )
1397                 {
1398                     CvRect r;
1399                     int band = 0;
1400                     sscanf( stage, "%d%d%d%d%d%f%n",
1401                             &r.x, &r.y, &r.width, &r.height, &band,
1402                             &(classifier->haar_feature[l].rect[k].weight), &dl );
1403                     stage += dl;
1404                     classifier->haar_feature[l].rect[k].r = r;
1405                 }
1406                 sscanf( stage, "%s%n", str, &dl );
1407                 stage += dl;
1408
1409                 classifier->haar_feature[l].tilted = strncmp( str, "tilted", 6 ) == 0;
1410
1411                 for( k = rects; k < CV_HAAR_FEATURE_MAX; k++ )
1412                 {
1413                     memset( classifier->haar_feature[l].rect + k, 0,
1414                             sizeof(classifier->haar_feature[l].rect[k]) );
1415                 }
1416
1417                 sscanf( stage, "%f%d%d%n", &(classifier->threshold[l]),
1418                                        &(classifier->left[l]),
1419                                        &(classifier->right[l]), &dl );
1420                 stage += dl;
1421             }
1422             for( l = 0; l <= classifier->count; l++ )
1423             {
1424                 sscanf( stage, "%f%n", &(classifier->alpha[l]), &dl );
1425                 stage += dl;
1426             }
1427         }
1428
1429         sscanf( stage, "%f%n", &threshold, &dl );
1430         stage += dl;
1431
1432         cascade->stage_classifier[i].threshold = threshold;
1433
1434         /* load tree links */
1435         if( sscanf( stage, "%d%d%n", &parent, &next, &dl ) != 2 )
1436         {
1437             parent = i - 1;
1438             next = -1;
1439         }
1440         stage += dl;
1441
1442         cascade->stage_classifier[i].parent = parent;
1443         cascade->stage_classifier[i].next = next;
1444         cascade->stage_classifier[i].child = -1;
1445
1446         if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1447         {
1448             cascade->stage_classifier[parent].child = i;
1449         }
1450     }
1451
1452     return cascade;
1453 }
1454
1455 #ifndef _MAX_PATH
1456 #define _MAX_PATH 1024
1457 #endif
1458
1459 CV_IMPL CvHaarClassifierCascade*
1460 cvLoadHaarClassifierCascade( const char* directory, CvSize orig_window_size )
1461 {
1462     const char** input_cascade = 0;
1463     CvHaarClassifierCascade *cascade = 0;
1464
1465     int i, n;
1466     const char* slash;
1467     char name[_MAX_PATH];
1468     int size = 0;
1469     char* ptr = 0;
1470
1471     if( !directory )
1472         CV_Error( CV_StsNullPtr, "Null path is passed" );
1473
1474     n = (int)strlen(directory)-1;
1475     slash = directory[n] == '\\' || directory[n] == '/' ? "" : "/";
1476
1477     /* try to read the classifier from directory */
1478     for( n = 0; ; n++ )
1479     {
1480         sprintf( name, "%s%s%d/AdaBoostCARTHaarClassifier.txt", directory, slash, n );
1481         FILE* f = fopen( name, "rb" );
1482         if( !f )
1483             break;
1484         fseek( f, 0, SEEK_END );
1485         size += ftell( f ) + 1;
1486         fclose(f);
1487     }
1488
1489     if( n == 0 && slash[0] )
1490         return (CvHaarClassifierCascade*)cvLoad( directory );
1491
1492     if( n == 0 )
1493         CV_Error( CV_StsBadArg, "Invalid path" );
1494
1495     size += (n+1)*sizeof(char*);
1496     input_cascade = (const char**)cvAlloc( size );
1497     ptr = (char*)(input_cascade + n + 1);
1498
1499     for( i = 0; i < n; i++ )
1500     {
1501         sprintf( name, "%s/%d/AdaBoostCARTHaarClassifier.txt", directory, i );
1502         FILE* f = fopen( name, "rb" );
1503         if( !f )
1504             CV_Error( CV_StsError, "" );
1505         fseek( f, 0, SEEK_END );
1506         size = ftell( f );
1507         fseek( f, 0, SEEK_SET );
1508         fread( ptr, 1, size, f );
1509         fclose(f);
1510         input_cascade[i] = ptr;
1511         ptr += size;
1512         *ptr++ = '\0';
1513     }
1514
1515     input_cascade[n] = 0;
1516     cascade = icvLoadCascadeCART( input_cascade, n, orig_window_size );
1517
1518     if( input_cascade )
1519         cvFree( &input_cascade );
1520
1521     return cascade;
1522 }
1523
1524
1525 CV_IMPL void
1526 cvReleaseHaarClassifierCascade( CvHaarClassifierCascade** _cascade )
1527 {
1528     if( _cascade && *_cascade )
1529     {
1530         int i, j;
1531         CvHaarClassifierCascade* cascade = *_cascade;
1532
1533         for( i = 0; i < cascade->count; i++ )
1534         {
1535             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
1536                 cvFree( &cascade->stage_classifier[i].classifier[j].haar_feature );
1537             cvFree( &cascade->stage_classifier[i].classifier );
1538         }
1539         icvReleaseHidHaarClassifierCascade( &cascade->hid_cascade );
1540         cvFree( _cascade );
1541     }
1542 }
1543
1544
1545 /****************************************************************************************\
1546 *                                  Persistence functions                                 *
1547 \****************************************************************************************/
1548
1549 /* field names */
1550
1551 #define ICV_HAAR_SIZE_NAME            "size"
1552 #define ICV_HAAR_STAGES_NAME          "stages"
1553 #define ICV_HAAR_TREES_NAME             "trees"
1554 #define ICV_HAAR_FEATURE_NAME             "feature"
1555 #define ICV_HAAR_RECTS_NAME                 "rects"
1556 #define ICV_HAAR_TILTED_NAME                "tilted"
1557 #define ICV_HAAR_THRESHOLD_NAME           "threshold"
1558 #define ICV_HAAR_LEFT_NODE_NAME           "left_node"
1559 #define ICV_HAAR_LEFT_VAL_NAME            "left_val"
1560 #define ICV_HAAR_RIGHT_NODE_NAME          "right_node"
1561 #define ICV_HAAR_RIGHT_VAL_NAME           "right_val"
1562 #define ICV_HAAR_STAGE_THRESHOLD_NAME   "stage_threshold"
1563 #define ICV_HAAR_PARENT_NAME            "parent"
1564 #define ICV_HAAR_NEXT_NAME              "next"
1565
1566 static int
1567 icvIsHaarClassifier( const void* struct_ptr )
1568 {
1569     return CV_IS_HAAR_CLASSIFIER( struct_ptr );
1570 }
1571
1572 static void*
1573 icvReadHaarClassifier( CvFileStorage* fs, CvFileNode* node )
1574 {
1575     CvHaarClassifierCascade* cascade = NULL;
1576
1577     char buf[256];
1578     CvFileNode* seq_fn = NULL; /* sequence */
1579     CvFileNode* fn = NULL;
1580     CvFileNode* stages_fn = NULL;
1581     CvSeqReader stages_reader;
1582     int n;
1583     int i, j, k, l;
1584     int parent, next;
1585
1586     stages_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_STAGES_NAME );
1587     if( !stages_fn || !CV_NODE_IS_SEQ( stages_fn->tag) )
1588         CV_Error( CV_StsError, "Invalid stages node" );
1589
1590     n = stages_fn->data.seq->total;
1591     cascade = icvCreateHaarClassifierCascade(n);
1592
1593     /* read size */
1594     seq_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_SIZE_NAME );
1595     if( !seq_fn || !CV_NODE_IS_SEQ( seq_fn->tag ) || seq_fn->data.seq->total != 2 )
1596         CV_Error( CV_StsError, "size node is not a valid sequence." );
1597     fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 0 );
1598     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1599         CV_Error( CV_StsError, "Invalid size node: width must be positive integer" );
1600     cascade->orig_window_size.width = fn->data.i;
1601     fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 1 );
1602     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1603         CV_Error( CV_StsError, "Invalid size node: height must be positive integer" );
1604     cascade->orig_window_size.height = fn->data.i;
1605
1606     cvStartReadSeq( stages_fn->data.seq, &stages_reader );
1607     for( i = 0; i < n; ++i )
1608     {
1609         CvFileNode* stage_fn;
1610         CvFileNode* trees_fn;
1611         CvSeqReader trees_reader;
1612
1613         stage_fn = (CvFileNode*) stages_reader.ptr;
1614         if( !CV_NODE_IS_MAP( stage_fn->tag ) )
1615         {
1616             sprintf( buf, "Invalid stage %d", i );
1617             CV_Error( CV_StsError, buf );
1618         }
1619
1620         trees_fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_TREES_NAME );
1621         if( !trees_fn || !CV_NODE_IS_SEQ( trees_fn->tag )
1622             || trees_fn->data.seq->total <= 0 )
1623         {
1624             sprintf( buf, "Trees node is not a valid sequence. (stage %d)", i );
1625             CV_Error( CV_StsError, buf );
1626         }
1627
1628         cascade->stage_classifier[i].classifier =
1629             (CvHaarClassifier*) cvAlloc( trees_fn->data.seq->total
1630                 * sizeof( cascade->stage_classifier[i].classifier[0] ) );
1631         for( j = 0; j < trees_fn->data.seq->total; ++j )
1632         {
1633             cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
1634         }
1635         cascade->stage_classifier[i].count = trees_fn->data.seq->total;
1636
1637         cvStartReadSeq( trees_fn->data.seq, &trees_reader );
1638         for( j = 0; j < trees_fn->data.seq->total; ++j )
1639         {
1640             CvFileNode* tree_fn;
1641             CvSeqReader tree_reader;
1642             CvHaarClassifier* classifier;
1643             int last_idx;
1644
1645             classifier = &cascade->stage_classifier[i].classifier[j];
1646             tree_fn = (CvFileNode*) trees_reader.ptr;
1647             if( !CV_NODE_IS_SEQ( tree_fn->tag ) || tree_fn->data.seq->total <= 0 )
1648             {
1649                 sprintf( buf, "Tree node is not a valid sequence."
1650                          " (stage %d, tree %d)", i, j );
1651                 CV_Error( CV_StsError, buf );
1652             }
1653
1654             classifier->count = tree_fn->data.seq->total;
1655             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1656                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1657                                       sizeof( *classifier->threshold ) +
1658                                       sizeof( *classifier->left ) +
1659                                       sizeof( *classifier->right ) ) +
1660                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1661             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1662             classifier->left = (int*) (classifier->threshold + classifier->count);
1663             classifier->right = (int*) (classifier->left + classifier->count);
1664             classifier->alpha = (float*) (classifier->right + classifier->count);
1665
1666             cvStartReadSeq( tree_fn->data.seq, &tree_reader );
1667             for( k = 0, last_idx = 0; k < tree_fn->data.seq->total; ++k )
1668             {
1669                 CvFileNode* node_fn;
1670                 CvFileNode* feature_fn;
1671                 CvFileNode* rects_fn;
1672                 CvSeqReader rects_reader;
1673
1674                 node_fn = (CvFileNode*) tree_reader.ptr;
1675                 if( !CV_NODE_IS_MAP( node_fn->tag ) )
1676                 {
1677                     sprintf( buf, "Tree node %d is not a valid map. (stage %d, tree %d)",
1678                              k, i, j );
1679                     CV_Error( CV_StsError, buf );
1680                 }
1681                 feature_fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_FEATURE_NAME );
1682                 if( !feature_fn || !CV_NODE_IS_MAP( feature_fn->tag ) )
1683                 {
1684                     sprintf( buf, "Feature node is not a valid map. "
1685                              "(stage %d, tree %d, node %d)", i, j, k );
1686                     CV_Error( CV_StsError, buf );
1687                 }
1688                 rects_fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_RECTS_NAME );
1689                 if( !rects_fn || !CV_NODE_IS_SEQ( rects_fn->tag )
1690                     || rects_fn->data.seq->total < 1
1691                     || rects_fn->data.seq->total > CV_HAAR_FEATURE_MAX )
1692                 {
1693                     sprintf( buf, "Rects node is not a valid sequence. "
1694                              "(stage %d, tree %d, node %d)", i, j, k );
1695                     CV_Error( CV_StsError, buf );
1696                 }
1697                 cvStartReadSeq( rects_fn->data.seq, &rects_reader );
1698                 for( l = 0; l < rects_fn->data.seq->total; ++l )
1699                 {
1700                     CvFileNode* rect_fn;
1701                     CvRect r;
1702
1703                     rect_fn = (CvFileNode*) rects_reader.ptr;
1704                     if( !CV_NODE_IS_SEQ( rect_fn->tag ) || rect_fn->data.seq->total != 5 )
1705                     {
1706                         sprintf( buf, "Rect %d is not a valid sequence. "
1707                                  "(stage %d, tree %d, node %d)", l, i, j, k );
1708                         CV_Error( CV_StsError, buf );
1709                     }
1710
1711                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 0 );
1712                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1713                     {
1714                         sprintf( buf, "x coordinate must be non-negative integer. "
1715                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1716                         CV_Error( CV_StsError, buf );
1717                     }
1718                     r.x = fn->data.i;
1719                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 1 );
1720                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1721                     {
1722                         sprintf( buf, "y coordinate must be non-negative integer. "
1723                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1724                         CV_Error( CV_StsError, buf );
1725                     }
1726                     r.y = fn->data.i;
1727                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 2 );
1728                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1729                         || r.x + fn->data.i > cascade->orig_window_size.width )
1730                     {
1731                         sprintf( buf, "width must be positive integer and "
1732                                  "(x + width) must not exceed window width. "
1733                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1734                         CV_Error( CV_StsError, buf );
1735                     }
1736                     r.width = fn->data.i;
1737                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 3 );
1738                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1739                         || r.y + fn->data.i > cascade->orig_window_size.height )
1740                     {
1741                         sprintf( buf, "height must be positive integer and "
1742                                  "(y + height) must not exceed window height. "
1743                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1744                         CV_Error( CV_StsError, buf );
1745                     }
1746                     r.height = fn->data.i;
1747                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 4 );
1748                     if( !CV_NODE_IS_REAL( fn->tag ) )
1749                     {
1750                         sprintf( buf, "weight must be real number. "
1751                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1752                         CV_Error( CV_StsError, buf );
1753                     }
1754
1755                     classifier->haar_feature[k].rect[l].weight = (float) fn->data.f;
1756                     classifier->haar_feature[k].rect[l].r = r;
1757
1758                     CV_NEXT_SEQ_ELEM( sizeof( *rect_fn ), rects_reader );
1759                 } /* for each rect */
1760                 for( l = rects_fn->data.seq->total; l < CV_HAAR_FEATURE_MAX; ++l )
1761                 {
1762                     classifier->haar_feature[k].rect[l].weight = 0;
1763                     classifier->haar_feature[k].rect[l].r = cvRect( 0, 0, 0, 0 );
1764                 }
1765
1766                 fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_TILTED_NAME);
1767                 if( !fn || !CV_NODE_IS_INT( fn->tag ) )
1768                 {
1769                     sprintf( buf, "tilted must be 0 or 1. "
1770                              "(stage %d, tree %d, node %d)", i, j, k );
1771                     CV_Error( CV_StsError, buf );
1772                 }
1773                 classifier->haar_feature[k].tilted = ( fn->data.i != 0 );
1774                 fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_THRESHOLD_NAME);
1775                 if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1776                 {
1777                     sprintf( buf, "threshold must be real number. "
1778                              "(stage %d, tree %d, node %d)", i, j, k );
1779                     CV_Error( CV_StsError, buf );
1780                 }
1781                 classifier->threshold[k] = (float) fn->data.f;
1782                 fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_NODE_NAME);
1783                 if( fn )
1784                 {
1785                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1786                         || fn->data.i >= tree_fn->data.seq->total )
1787                     {
1788                         sprintf( buf, "left node must be valid node number. "
1789                                  "(stage %d, tree %d, node %d)", i, j, k );
1790                         CV_Error( CV_StsError, buf );
1791                     }
1792                     /* left node */
1793                     classifier->left[k] = fn->data.i;
1794                 }
1795                 else
1796                 {
1797                     fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_VAL_NAME );
1798                     if( !fn )
1799                     {
1800                         sprintf( buf, "left node or left value must be specified. "
1801                                  "(stage %d, tree %d, node %d)", i, j, k );
1802                         CV_Error( CV_StsError, buf );
1803                     }
1804                     if( !CV_NODE_IS_REAL( fn->tag ) )
1805                     {
1806                         sprintf( buf, "left value must be real number. "
1807                                  "(stage %d, tree %d, node %d)", i, j, k );
1808                         CV_Error( CV_StsError, buf );
1809                     }
1810                     /* left value */
1811                     if( last_idx >= classifier->count + 1 )
1812                     {
1813                         sprintf( buf, "Tree structure is broken: too many values. "
1814                                  "(stage %d, tree %d, node %d)", i, j, k );
1815                         CV_Error( CV_StsError, buf );
1816                     }
1817                     classifier->left[k] = -last_idx;
1818                     classifier->alpha[last_idx++] = (float) fn->data.f;
1819                 }
1820                 fn = cvGetFileNodeByName( fs, node_fn,ICV_HAAR_RIGHT_NODE_NAME);
1821                 if( fn )
1822                 {
1823                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1824                         || fn->data.i >= tree_fn->data.seq->total )
1825                     {
1826                         sprintf( buf, "right node must be valid node number. "
1827                                  "(stage %d, tree %d, node %d)", i, j, k );
1828                         CV_Error( CV_StsError, buf );
1829                     }
1830                     /* right node */
1831                     classifier->right[k] = fn->data.i;
1832                 }
1833                 else
1834                 {
1835                     fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_RIGHT_VAL_NAME );
1836                     if( !fn )
1837                     {
1838                         sprintf( buf, "right node or right value must be specified. "
1839                                  "(stage %d, tree %d, node %d)", i, j, k );
1840                         CV_Error( CV_StsError, buf );
1841                     }
1842                     if( !CV_NODE_IS_REAL( fn->tag ) )
1843                     {
1844                         sprintf( buf, "right value must be real number. "
1845                                  "(stage %d, tree %d, node %d)", i, j, k );
1846                         CV_Error( CV_StsError, buf );
1847                     }
1848                     /* right value */
1849                     if( last_idx >= classifier->count + 1 )
1850                     {
1851                         sprintf( buf, "Tree structure is broken: too many values. "
1852                                  "(stage %d, tree %d, node %d)", i, j, k );
1853                         CV_Error( CV_StsError, buf );
1854                     }
1855                     classifier->right[k] = -last_idx;
1856                     classifier->alpha[last_idx++] = (float) fn->data.f;
1857                 }
1858
1859                 CV_NEXT_SEQ_ELEM( sizeof( *node_fn ), tree_reader );
1860             } /* for each node */
1861             if( last_idx != classifier->count + 1 )
1862             {
1863                 sprintf( buf, "Tree structure is broken: too few values. "
1864                          "(stage %d, tree %d)", i, j );
1865                 CV_Error( CV_StsError, buf );
1866             }
1867
1868             CV_NEXT_SEQ_ELEM( sizeof( *tree_fn ), trees_reader );
1869         } /* for each tree */
1870
1871         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_STAGE_THRESHOLD_NAME);
1872         if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1873         {
1874             sprintf( buf, "stage threshold must be real number. (stage %d)", i );
1875             CV_Error( CV_StsError, buf );
1876         }
1877         cascade->stage_classifier[i].threshold = (float) fn->data.f;
1878
1879         parent = i - 1;
1880         next = -1;
1881
1882         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_PARENT_NAME );
1883         if( !fn || !CV_NODE_IS_INT( fn->tag )
1884             || fn->data.i < -1 || fn->data.i >= cascade->count )
1885         {
1886             sprintf( buf, "parent must be integer number. (stage %d)", i );
1887             CV_Error( CV_StsError, buf );
1888         }
1889         parent = fn->data.i;
1890         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_NEXT_NAME );
1891         if( !fn || !CV_NODE_IS_INT( fn->tag )
1892             || fn->data.i < -1 || fn->data.i >= cascade->count )
1893         {
1894             sprintf( buf, "next must be integer number. (stage %d)", i );
1895             CV_Error( CV_StsError, buf );
1896         }
1897         next = fn->data.i;
1898
1899         cascade->stage_classifier[i].parent = parent;
1900         cascade->stage_classifier[i].next = next;
1901         cascade->stage_classifier[i].child = -1;
1902
1903         if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1904         {
1905             cascade->stage_classifier[parent].child = i;
1906         }
1907
1908         CV_NEXT_SEQ_ELEM( sizeof( *stage_fn ), stages_reader );
1909     } /* for each stage */
1910
1911     return cascade;
1912 }
1913
1914 static void
1915 icvWriteHaarClassifier( CvFileStorage* fs, const char* name, const void* struct_ptr,
1916                         CvAttrList attributes )
1917 {
1918     int i, j, k, l;
1919     char buf[256];
1920     const CvHaarClassifierCascade* cascade = (const CvHaarClassifierCascade*) struct_ptr;
1921
1922     /* TODO: parameters check */
1923
1924     cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_HAAR, attributes );
1925
1926     cvStartWriteStruct( fs, ICV_HAAR_SIZE_NAME, CV_NODE_SEQ | CV_NODE_FLOW );
1927     cvWriteInt( fs, NULL, cascade->orig_window_size.width );
1928     cvWriteInt( fs, NULL, cascade->orig_window_size.height );
1929     cvEndWriteStruct( fs ); /* size */
1930
1931     cvStartWriteStruct( fs, ICV_HAAR_STAGES_NAME, CV_NODE_SEQ );
1932     for( i = 0; i < cascade->count; ++i )
1933     {
1934         cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1935         sprintf( buf, "stage %d", i );
1936         cvWriteComment( fs, buf, 1 );
1937
1938         cvStartWriteStruct( fs, ICV_HAAR_TREES_NAME, CV_NODE_SEQ );
1939
1940         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
1941         {
1942             CvHaarClassifier* tree = &cascade->stage_classifier[i].classifier[j];
1943
1944             cvStartWriteStruct( fs, NULL, CV_NODE_SEQ );
1945             sprintf( buf, "tree %d", j );
1946             cvWriteComment( fs, buf, 1 );
1947
1948             for( k = 0; k < tree->count; ++k )
1949             {
1950                 CvHaarFeature* feature = &tree->haar_feature[k];
1951
1952                 cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1953                 if( k )
1954                 {
1955                     sprintf( buf, "node %d", k );
1956                 }
1957                 else
1958                 {
1959                     sprintf( buf, "root node" );
1960                 }
1961                 cvWriteComment( fs, buf, 1 );
1962
1963                 cvStartWriteStruct( fs, ICV_HAAR_FEATURE_NAME, CV_NODE_MAP );
1964
1965                 cvStartWriteStruct( fs, ICV_HAAR_RECTS_NAME, CV_NODE_SEQ );
1966                 for( l = 0; l < CV_HAAR_FEATURE_MAX && feature->rect[l].r.width != 0; ++l )
1967                 {
1968                     cvStartWriteStruct( fs, NULL, CV_NODE_SEQ | CV_NODE_FLOW );
1969                     cvWriteInt(  fs, NULL, feature->rect[l].r.x );
1970                     cvWriteInt(  fs, NULL, feature->rect[l].r.y );
1971                     cvWriteInt(  fs, NULL, feature->rect[l].r.width );
1972                     cvWriteInt(  fs, NULL, feature->rect[l].r.height );
1973                     cvWriteReal( fs, NULL, feature->rect[l].weight );
1974                     cvEndWriteStruct( fs ); /* rect */
1975                 }
1976                 cvEndWriteStruct( fs ); /* rects */
1977                 cvWriteInt( fs, ICV_HAAR_TILTED_NAME, feature->tilted );
1978                 cvEndWriteStruct( fs ); /* feature */
1979
1980                 cvWriteReal( fs, ICV_HAAR_THRESHOLD_NAME, tree->threshold[k]);
1981
1982                 if( tree->left[k] > 0 )
1983                 {
1984                     cvWriteInt( fs, ICV_HAAR_LEFT_NODE_NAME, tree->left[k] );
1985                 }
1986                 else
1987                 {
1988                     cvWriteReal( fs, ICV_HAAR_LEFT_VAL_NAME,
1989                         tree->alpha[-tree->left[k]] );
1990                 }
1991
1992                 if( tree->right[k] > 0 )
1993                 {
1994                     cvWriteInt( fs, ICV_HAAR_RIGHT_NODE_NAME, tree->right[k] );
1995                 }
1996                 else
1997                 {
1998                     cvWriteReal( fs, ICV_HAAR_RIGHT_VAL_NAME,
1999                         tree->alpha[-tree->right[k]] );
2000                 }
2001
2002                 cvEndWriteStruct( fs ); /* split */
2003             }
2004
2005             cvEndWriteStruct( fs ); /* tree */
2006         }
2007
2008         cvEndWriteStruct( fs ); /* trees */
2009
2010         cvWriteReal( fs, ICV_HAAR_STAGE_THRESHOLD_NAME, cascade->stage_classifier[i].threshold);
2011         cvWriteInt( fs, ICV_HAAR_PARENT_NAME, cascade->stage_classifier[i].parent );
2012         cvWriteInt( fs, ICV_HAAR_NEXT_NAME, cascade->stage_classifier[i].next );
2013
2014         cvEndWriteStruct( fs ); /* stage */
2015     } /* for each stage */
2016
2017     cvEndWriteStruct( fs ); /* stages */
2018     cvEndWriteStruct( fs ); /* root */
2019 }
2020
2021 static void*
2022 icvCloneHaarClassifier( const void* struct_ptr )
2023 {
2024     CvHaarClassifierCascade* cascade = NULL;
2025
2026     int i, j, k, n;
2027     const CvHaarClassifierCascade* cascade_src =
2028         (const CvHaarClassifierCascade*) struct_ptr;
2029
2030     n = cascade_src->count;
2031     cascade = icvCreateHaarClassifierCascade(n);
2032     cascade->orig_window_size = cascade_src->orig_window_size;
2033
2034     for( i = 0; i < n; ++i )
2035     {
2036         cascade->stage_classifier[i].parent = cascade_src->stage_classifier[i].parent;
2037         cascade->stage_classifier[i].next = cascade_src->stage_classifier[i].next;
2038         cascade->stage_classifier[i].child = cascade_src->stage_classifier[i].child;
2039         cascade->stage_classifier[i].threshold = cascade_src->stage_classifier[i].threshold;
2040
2041         cascade->stage_classifier[i].count = 0;
2042         cascade->stage_classifier[i].classifier =
2043             (CvHaarClassifier*) cvAlloc( cascade_src->stage_classifier[i].count
2044                 * sizeof( cascade->stage_classifier[i].classifier[0] ) );
2045
2046         cascade->stage_classifier[i].count = cascade_src->stage_classifier[i].count;
2047
2048         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2049             cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
2050
2051         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2052         {
2053             const CvHaarClassifier* classifier_src =
2054                 &cascade_src->stage_classifier[i].classifier[j];
2055             CvHaarClassifier* classifier =
2056                 &cascade->stage_classifier[i].classifier[j];
2057
2058             classifier->count = classifier_src->count;
2059             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
2060                 classifier->count * ( sizeof( *classifier->haar_feature ) +
2061                                       sizeof( *classifier->threshold ) +
2062                                       sizeof( *classifier->left ) +
2063                                       sizeof( *classifier->right ) ) +
2064                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
2065             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
2066             classifier->left = (int*) (classifier->threshold + classifier->count);
2067             classifier->right = (int*) (classifier->left + classifier->count);
2068             classifier->alpha = (float*) (classifier->right + classifier->count);
2069             for( k = 0; k < classifier->count; ++k )
2070             {
2071                 classifier->haar_feature[k] = classifier_src->haar_feature[k];
2072                 classifier->threshold[k] = classifier_src->threshold[k];
2073                 classifier->left[k] = classifier_src->left[k];
2074                 classifier->right[k] = classifier_src->right[k];
2075                 classifier->alpha[k] = classifier_src->alpha[k];
2076             }
2077             classifier->alpha[classifier->count] =
2078                 classifier_src->alpha[classifier->count];
2079         }
2080     }
2081
2082     return cascade;
2083 }
2084
2085
2086 CvType haar_type( CV_TYPE_NAME_HAAR, icvIsHaarClassifier,
2087                   (CvReleaseFunc)cvReleaseHaarClassifierCascade,
2088                   icvReadHaarClassifier, icvWriteHaarClassifier,
2089                   icvCloneHaarClassifier );
2090
2091 #if 0
2092 namespace cv
2093 {
2094
2095 HaarClassifierCascade::HaarClassifierCascade() {}
2096 HaarClassifierCascade::HaarClassifierCascade(const String& filename)
2097 { load(filename); }
2098     
2099 bool HaarClassifierCascade::load(const String& filename)
2100 {
2101     cascade = Ptr<CvHaarClassifierCascade>((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
2102     return (CvHaarClassifierCascade*)cascade != 0;
2103 }
2104
2105 void HaarClassifierCascade::detectMultiScale( const Mat& image,
2106                        Vector<Rect>& objects, double scaleFactor,
2107                        int minNeighbors, int flags,
2108                        Size minSize )
2109 {
2110     MemStorage storage(cvCreateMemStorage(0));
2111     CvMat _image = image;
2112     CvSeq* _objects = cvHaarDetectObjects( &_image, cascade, storage, scaleFactor,
2113                                            minNeighbors, flags, minSize );
2114     Seq<Rect>(_objects).copyTo(objects);
2115 }
2116
2117 int HaarClassifierCascade::runAt(Point pt, int startStage, int) const
2118 {
2119     return cvRunHaarClassifierCascade(cascade, pt, startStage);
2120 }
2121
2122 void HaarClassifierCascade::setImages( const Mat& sum, const Mat& sqsum,
2123                                        const Mat& tilted, double scale )
2124 {
2125     CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted;
2126     cvSetImagesForHaarClassifierCascade( cascade, &_sum, &_sqsum, &_tilted, scale );
2127 }
2128
2129 }
2130 #endif
2131
2132 /* End of file. */