Added ROC-curve calculating to the cascade detection algorithm
authorAlexey Kazakov <no@email>
Fri, 22 Apr 2011 10:03:05 +0000 (10:03 +0000)
committerAlexey Kazakov <no@email>
Fri, 22 Apr 2011 10:03:05 +0000 (10:03 +0000)
modules/highgui/src/grfmt_tiff.cpp
modules/highgui/src/grfmt_tiff.hpp
modules/highgui/src/utils.cpp
modules/highgui/src/utils.hpp
modules/objdetect/include/opencv2/objdetect/objdetect.hpp
modules/objdetect/src/cascadedetect.cpp
modules/objdetect/src/datamatrix.cpp [deleted file]
modules/objdetect/src/followblk.h [deleted file]
modules/objdetect/src/haar.cpp
modules/python/src/cv.cpp
samples/c/JCB.png [deleted file]

index b96090d..dfb4db2 100644 (file)
@@ -301,11 +301,6 @@ TiffEncoder::~TiffEncoder()
 {
 }
 
-bool TiffEncoder::isFormatSupported( int depth ) const
-{
-    return depth == CV_8U || depth == CV_16U;
-}
-
 ImageEncoder TiffEncoder::newEncoder() const
 {
     return new TiffEncoder;
@@ -326,13 +321,7 @@ bool  TiffEncoder::write( const Mat& img, const vector<int>& )
 {
     int channels = img.channels();
     int width = img.cols, height = img.rows;
-    int depth = img.depth();
-
-    if (depth != CV_8U && depth != CV_16U)
-        return false;
-
-    int bytesPerChannel = depth == CV_8U ? 1 : 2;
-    int fileStep = width * channels * bytesPerChannel;
+    int fileStep = width*channels;
     WLByteStream strm;
 
     if( m_buf )
@@ -367,7 +356,7 @@ bool  TiffEncoder::write( const Mat& img, const vector<int>& )
     uchar* buffer = _buffer;
     int  stripOffsetsOffset = 0;
     int  stripCountsOffset = 0;
-    int  bitsPerSample = 8 * bytesPerChannel;
+    int  bitsPerSample = 8; // TODO support 16 bit
     int  y = 0;
 
     strm.putBytes( fmtSignTiffII, 4 );
@@ -387,15 +376,9 @@ bool  TiffEncoder::write( const Mat& img, const vector<int>& )
         for( ; y < limit; y++ )
         {
             if( channels == 3 )
-                if (depth == CV_8U)
-                    icvCvt_BGR2RGB_8u_C3R( img.data + img.step*y, 0, buffer, 0, cvSize(width,1) );
-                else
-                    icvCvt_BGR2RGB_16u_C3R( (const ushort*)(img.data + img.step*y), 0, (ushort*)buffer, 0, cvSize(width,1) );
+                icvCvt_BGR2RGB_8u_C3R( img.data + img.step*y, 0, buffer, 0, cvSize(width,1) );
             else if( channels == 4 )
-                if (depth == CV_8U)
-                    icvCvt_BGRA2RGBA_8u_C4R( img.data + img.step*y, 0, buffer, 0, cvSize(width,1) );
-                else
-                    icvCvt_BGRA2RGBA_16u_C4R( (const ushort*)(img.data + img.step*y), 0, (ushort*)buffer, 0, cvSize(width,1) );
+                icvCvt_BGRA2RGBA_8u_C4R( img.data + img.step*y, 0, buffer, 0, cvSize(width,1) );
 
             strm.putBytes( channels > 1 ? buffer : img.data + img.step*y, fileStep );
         }
@@ -433,13 +416,12 @@ bool  TiffEncoder::write( const Mat& img, const vector<int>& )
 
     if( channels > 1 )
     {
-        int bitsPerSamplePos = strm.getPos();
-        strm.putWord(bitsPerSample);
-        strm.putWord(bitsPerSample);
-        strm.putWord(bitsPerSample);
+        bitsPerSample = strm.getPos();
+        strm.putWord(8);
+        strm.putWord(8);
+        strm.putWord(8);
         if( channels == 4 )
-            strm.putWord(bitsPerSample);
-        bitsPerSample = bitsPerSamplePos;
+            strm.putWord(8);
     }
 
     directoryOffset = strm.getPos();
index 037739a..877c229 100644 (file)
@@ -118,8 +118,6 @@ public:
     TiffEncoder();
     virtual ~TiffEncoder();
 
-    bool isFormatSupported( int depth ) const;
-
     bool  write( const Mat& img, const vector<int>& params );
     ImageEncoder newEncoder() const;
 
index e4b2a22..4197b90 100644 (file)
@@ -192,25 +192,6 @@ void icvCvt_BGRA2RGBA_8u_C4R( const uchar* bgra, int bgra_step,
     }
 }
 
-void icvCvt_BGRA2RGBA_16u_C4R( const ushort* bgra, int bgra_step,
-                              ushort* rgba, int rgba_step, CvSize size )
-{
-    int i;
-    for( ; size.height--; )
-    {
-        for( i = 0; i < size.width; i++, bgra += 4, rgba += 4 )
-        {
-            ushort t0 = bgra[0], t1 = bgra[1];
-            ushort t2 = bgra[2], t3 = bgra[3];
-
-            rgba[0] = t2; rgba[1] = t1;
-            rgba[2] = t0; rgba[3] = t3;
-        }
-        bgra += bgra_step/sizeof(bgra[0]) - size.width*4;
-        rgba += rgba_step/sizeof(rgba[0]) - size.width*4;
-    }
-}
-
 
 void icvCvt_BGR2RGB_8u_C3R( const uchar* bgr, int bgr_step,
                             uchar* rgb, int rgb_step, CvSize size )
index 0c942c4..5eba19a 100644 (file)
@@ -88,10 +88,6 @@ void icvCvt_BGRA2RGBA_8u_C4R( const uchar* bgra, int bgra_step,
                               uchar* rgba, int rgba_step, CvSize size );
 #define icvCvt_RGBA2BGRA_8u_C4R icvCvt_BGRA2RGBA_8u_C4R
 
-void icvCvt_BGRA2RGBA_16u_C4R( const ushort* bgra, int bgra_step,
-                              ushort* rgba, int rgba_step, CvSize size );
-#define icvCvt_RGBA2BGRA_16u_C4R icvCvt_BGRA2RGBA_16u_C4R
-
 void icvCvt_BGR5552Gray_8u_C2C1R( const uchar* bgr555, int bgr555_step,
                                   uchar* gray, int gray_step, CvSize size );
 void icvCvt_BGR5652Gray_8u_C2C1R( const uchar* bgr565, int bgr565_step,
index 6f18042..ed60a1b 100644 (file)
@@ -125,9 +125,17 @@ CVAPI(void) cvReleaseHaarClassifierCascade( CvHaarClassifierCascade** cascade );
 #define CV_HAAR_FIND_BIGGEST_OBJECT 4
 #define CV_HAAR_DO_ROUGH_SEARCH     8
 
+CVAPI(CvSeq*) cvHaarDetectObjectsForROC( const CvArr* image,
+                     CvHaarClassifierCascade* cascade, CvMemStorage* storage,
+                     std::vector<int>& rejectLevels, std::vector<double>& levelWeightds,
+                     double scale_factor CV_DEFAULT(1.1),
+                     int min_neighbors CV_DEFAULT(3), int flags CV_DEFAULT(0),
+                     CvSize min_size CV_DEFAULT(cvSize(0,0)), CvSize max_size CV_DEFAULT(cvSize(0,0)),
+                     bool outputRejectLevels = false );
+
 CVAPI(CvSeq*) cvHaarDetectObjects( const CvArr* image,
-                     CvHaarClassifierCascade* cascade,
-                     CvMemStorage* storage, double scale_factor CV_DEFAULT(1.1),
+                     CvHaarClassifierCascade* cascade, CvMemStorage* storage, 
+                     double scale_factor CV_DEFAULT(1.1),
                      int min_neighbors CV_DEFAULT(3), int flags CV_DEFAULT(0),
                      CvSize min_size CV_DEFAULT(cvSize(0,0)), CvSize max_size CV_DEFAULT(cvSize(0,0)));
 
@@ -275,7 +283,8 @@ namespace cv
 
 CV_EXPORTS_W void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps=0.2);
 CV_EXPORTS_W void groupRectangles(vector<Rect>& rectList, CV_OUT vector<int>& weights, int groupThreshold, double eps=0.2);
-CV_EXPORTS void groupRectangles(vector<Rect>& rectList, vector<double>& resultWeights, int groupThreshold = 2, double eps=0.2);
+CV_EXPORTS void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, 
+                                vector<double>& levelWeights, int groupThreshold, double eps=0.2);
 CV_EXPORTS void groupRectangles_meanshift(vector<Rect>& rectList, vector<double>& foundWeights, vector<double>& foundScales, 
                                                                                  double detectThreshold = 0.0, Size winDetSize = Size(64, 128));
 
@@ -352,11 +361,12 @@ public:
     CV_WRAP virtual void detectMultiScale( const Mat& image,
                                    CV_OUT vector<Rect>& objects,
                                    vector<int>& rejectLevels,
+                                   vector<double>& levelWeights,
                                    double scaleFactor=1.1,
                                    int minNeighbors=3, int flags=0,
                                    Size minSize=Size(),
                                    Size maxSize=Size(),
-                                   bool outputRejectLevels = false );
+                                   bool outputRejectLevels=false );
 
 
     bool isOldFormatCascade() const;
@@ -370,7 +380,7 @@ protected:
 
     virtual bool detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
                                     int stripSize, int yStep, double factor, vector<Rect>& candidates,
-                                    vector<int>& rejectLevels, bool outputRejectLevels = false);
+                                    vector<int>& rejectLevels, vector<double>& levelWeights, bool outputRejectLevels=false);
 
 protected:
     enum { BOOST = 0 };
@@ -380,19 +390,19 @@ protected:
     friend struct CascadeClassifierInvoker;
 
     template<class FEval>
-    friend int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
+    friend int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
 
     template<class FEval>
-    friend int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
+    friend int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
 
     template<class FEval>
-    friend int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
+    friend int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
 
     template<class FEval>
-    friend int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator);
+    friend int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight);
 
     bool setImage( Ptr<FeatureEvaluator>&, const Mat& );
-    virtual int runAt( Ptr<FeatureEvaluator>&, Point );
+    virtual int runAt( Ptr<FeatureEvaluator>&, Point, double& weight );
 
     class Data
     {
@@ -436,35 +446,6 @@ protected:
     Data data;
     Ptr<FeatureEvaluator> featureEvaluator;
     Ptr<CvHaarClassifierCascade> oldCascade;
-
-//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-public:
-    int getNumStages()
-    {
-        int numStages;
-        if( !isOldFormatCascade() )
-        {
-            numStages = data.stages.size();
-        }
-        else
-        {
-            numStages = this->oldCascade->count;
-        }
-        return numStages;
-    }
-    void setNumStages(int stageCount)
-    {
-        if( !isOldFormatCascade() )
-        {
-            if( stageCount )
-                data.stages.resize(stageCount);
-        }
-        else
-            if( stageCount )
-                this->oldCascade->count = stageCount;
-    }
-//!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
 };
 
 //////////////// HOG (Histogram-of-Oriented-Gradients) Descriptor and Object Detector //////////////
index 59060dd..e36d670 100644 (file)
@@ -63,7 +63,7 @@ public:
 };    
     
 
-static void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* foundWeights)
+static void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps, vector<int>* weights, vector<double>* levelWeights)
 {
     if( groupThreshold <= 0 || rectList.empty() )
     {
@@ -82,7 +82,8 @@ static void groupRectangles(vector<Rect>& rectList, int groupThreshold, double e
     
     vector<Rect> rrects(nclasses);
     vector<int> rweights(nclasses, 0);
-       vector<double> outWeights(nclasses, 0.0);
+       vector<int> rejectLevels(nclasses, 0);
+    vector<double> rejectWeights(nclasses, DBL_MIN);
     int i, j, nlabels = (int)labels.size();
     for( i = 0; i < nlabels; i++ )
     {
@@ -93,12 +94,18 @@ static void groupRectangles(vector<Rect>& rectList, int groupThreshold, double e
         rrects[cls].height += rectList[i].height;
         rweights[cls]++;
     }
-       if ( foundWeights && !foundWeights->empty() )
+    if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() )
        {
                for( i = 0; i < nlabels; i++ )
                {
                        int cls = labels[i];
-                       outWeights[cls] = outWeights[cls] + (*foundWeights)[i];
+            if( (*weights)[i] > rejectLevels[cls] )
+            {
+                rejectLevels[cls] = (*weights)[i];
+                rejectWeights[cls] = (*levelWeights)[i];
+            }
+            else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) )
+                rejectWeights[cls] = (*levelWeights)[i];
                }
        }
     
@@ -115,14 +122,14 @@ static void groupRectangles(vector<Rect>& rectList, int groupThreshold, double e
     rectList.clear();
     if( weights )
         weights->clear();
-       if( foundWeights )
-               foundWeights->clear();
+       if( levelWeights )
+               levelWeights->clear();
     
     for( i = 0; i < nclasses; i++ )
     {
         Rect r1 = rrects[i];
-        int n1 = rweights[i];
-               double w1 = outWeights[i];
+        int n1 = levelWeights ? rejectLevels[i] : rweights[i];
+               double w1 = rejectWeights[i];
         if( n1 <= groupThreshold )
             continue;
         // filter out small face rectangles inside large rectangles
@@ -151,8 +158,8 @@ static void groupRectangles(vector<Rect>& rectList, int groupThreshold, double e
             rectList.push_back(r1);
             if( weights )
                 weights->push_back(n1);
-                       if( foundWeights )
-                               foundWeights->push_back(w1);
+                       if( levelWeights )
+                               levelWeights->push_back(w1);
         }
     }
 }
@@ -211,12 +218,12 @@ void groupRectangles(vector<Rect>& rectList, vector<int>& weights, int groupThre
 {
     groupRectangles(rectList, groupThreshold, eps, &weights, 0);
 }
-
-void groupRectangles(vector<Rect>& rectList, vector<double>& foundWeights, int groupThreshold, double eps)
+//used for cascade detection algorithm for ROC-curve calculating
+void groupRectangles(vector<Rect>& rectList, vector<int>& rejectLevels, vector<double>& levelWeights, int groupThreshold, double eps)
 {
-    groupRectangles(rectList, groupThreshold, eps, 0, &foundWeights);
+    groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights);
 }
-
+//can be used for HOG detection algorithm only
 void groupRectangles_meanshift(vector<Rect>& rectList, vector<double>& foundWeights, 
                                                           vector<double>& foundScales, double detectThreshold, Size winDetSize)
 {
@@ -706,7 +713,7 @@ bool CascadeClassifier::load(const string& filename)
 }
     
 template<class FEval>
-inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
+inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
 {
     int nstages = (int)cascade.data.stages.size();
     int nodeOfs = 0, leafOfs = 0;
@@ -720,7 +727,7 @@ inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_f
     {
         CascadeClassifier::Data::Stage& stage = cascadeStages[si];
         int wi, ntrees = stage.ntrees;
-        double sum = 0;
+        sum = 0;
         
         for( wi = 0; wi < ntrees; wi++ )
         {
@@ -745,7 +752,7 @@ inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_f
 }
 
 template<class FEval>
-inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
+inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
 {
     int nstages = (int)cascade.data.stages.size();
     int nodeOfs = 0, leafOfs = 0;
@@ -761,7 +768,7 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator>
     {
         CascadeClassifier::Data::Stage& stage = cascadeStages[si];
         int wi, ntrees = stage.ntrees;
-        double sum = 0;
+        sum = 0;
         
         for( wi = 0; wi < ntrees; wi++ )
         {
@@ -786,7 +793,7 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator>
 }
 
 template<class FEval>
-inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
+inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
 {
     int nodeOfs = 0, leafOfs = 0;
     FEval& featureEvaluator = (FEval&)*_featureEvaluator;
@@ -798,7 +805,7 @@ inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator
     for( int stageIdx = 0; stageIdx < nstages; stageIdx++ )
     {
         CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx];
-        double sum = 0.0;
+        sum = 0.0;
 
         int ntrees = stage.ntrees;
         for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 )
@@ -816,7 +823,7 @@ inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator
 }
 
 template<class FEval>
-inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator )
+inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_featureEvaluator, double& sum )
 {
     int nstages = (int)cascade.data.stages.size();
     int nodeOfs = 0, leafOfs = 0;
@@ -831,7 +838,7 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvalu
     {
         CascadeClassifier::Data::Stage& stage = cascadeStages[si];
         int wi, ntrees = stage.ntrees;
-        double sum = 0;
+        sum = 0;
 
         for( wi = 0; wi < ntrees; wi++ )
         {
@@ -848,7 +855,7 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvalu
     return 1;
 }
 
-int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt )
+int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt, double& weight )
 {
     CV_Assert( oldCascade.empty() );
         
@@ -857,11 +864,11 @@ int CascadeClassifier::runAt( Ptr<FeatureEvaluator>& featureEvaluator, Point pt
 
     return !featureEvaluator->setWindow(pt) ? -1 :
                 data.isStumpBased ? ( data.featureType == FeatureEvaluator::HAAR ?
-                    predictOrderedStump<HaarEvaluator>( *this, featureEvaluator ) :
-                    predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator ) ) :
+                    predictOrderedStump<HaarEvaluator>( *this, featureEvaluator, weight ) :
+                    predictCategoricalStump<LBPEvaluator>( *this, featureEvaluator, weight ) ) :
                                  ( data.featureType == FeatureEvaluator::HAAR ?
-                    predictOrdered<HaarEvaluator>( *this, featureEvaluator ) :
-                    predictCategorical<LBPEvaluator>( *this, featureEvaluator ) );
+                    predictOrdered<HaarEvaluator>( *this, featureEvaluator, weight ) :
+                    predictCategorical<LBPEvaluator>( *this, featureEvaluator, weight ) );
 }
     
 bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& featureEvaluator, const Mat& image )
@@ -872,7 +879,7 @@ bool CascadeClassifier::setImage( Ptr<FeatureEvaluator>& featureEvaluator, const
 struct CascadeClassifierInvoker
 {
     CascadeClassifierInvoker( CascadeClassifier& _cc, Size _sz1, int _stripSize, int _yStep, double _factor, 
-        ConcurrentRectVector& _vec, vector<int>& _levels, bool outputLevels = false  )
+        ConcurrentRectVector& _vec, vector<int>& _levels, vector<double>& _weights, bool outputLevels = false  )
     {
         classifier = &_cc;
         processingRectSize = _sz1;
@@ -881,6 +888,7 @@ struct CascadeClassifierInvoker
         scalingFactor = _factor;
         rectangles = &_vec;
         rejectLevels  = outputLevels ? &_levels : 0;
+        levelWeights  = outputLevels ? &_weights : 0;
     }
     
     void operator()(const BlockedRange& range) const
@@ -894,15 +902,17 @@ struct CascadeClassifierInvoker
         {
             for( int x = 0; x < processingRectSize.width; x += yStep )
             {
-                int result = classifier->runAt(evaluator, Point(x, y));
+                double gypWeight;
+                int result = classifier->runAt(evaluator, Point(x, y), gypWeight);
                 if( rejectLevels )
                 {
                     if( result == 1 )
                         result =  -1*classifier->data.stages.size();
-                    if( classifier->data.stages.size() + result < 6 )
+                    if( classifier->data.stages.size() + result < 4 )
                     {
                         rectangles->push_back(Rect(cvRound(x*scalingFactor), cvRound(y*scalingFactor), winSize.width, winSize.height)); 
                         rejectLevels->push_back(-result);
+                        levelWeights->push_back(gypWeight);
                     }
                 }                    
                 else if( result > 0 )
@@ -920,48 +930,44 @@ struct CascadeClassifierInvoker
     int stripSize, yStep;
     double scalingFactor;
     vector<int> *rejectLevels;
+    vector<double> *levelWeights;
 };
     
 struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } };
 
 bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
                                            int stripSize, int yStep, double factor, vector<Rect>& candidates,
-                                           vector<int>& levels, bool outputRejectLevels )
+                                           vector<int>& levels, vector<double>& weights, bool outputRejectLevels )
 {
     if( !featureEvaluator->setImage( image, data.origWinSize ) )
         return false;
 
     ConcurrentRectVector concurrentCandidates;
     vector<int> rejectLevels;
+    vector<double> levelWeights;
     if( outputRejectLevels )
     {
         parallel_for(BlockedRange(0, stripCount), CascadeClassifierInvoker( *this, processingRectSize, stripSize, yStep, factor,
-            concurrentCandidates, rejectLevels, true));
+            concurrentCandidates, rejectLevels, levelWeights, true));
         levels.insert( levels.end(), rejectLevels.begin(), rejectLevels.end() );
+        weights.insert( weights.end(), levelWeights.begin(), levelWeights.end() );
     }
     else
     {
          parallel_for(BlockedRange(0, stripCount), CascadeClassifierInvoker( *this, processingRectSize, stripSize, yStep, factor,
-            concurrentCandidates, rejectLevels, false));
+            concurrentCandidates, rejectLevels, levelWeights, false));
     }
     candidates.insert( candidates.end(), concurrentCandidates.begin(), concurrentCandidates.end() );
 
     return true;
 }
 
-//bool CascadeClassifier::detectSingleScale( const Mat& image, int stripCount, Size processingRectSize,
-//                                           int stripSize, int yStep, double factor, vector<Rect>& candidates )
-//{
-//    vector<int> fakeLevels;
-//    return detectSingleScale( image, stripCount, processingRectSize, 
-//        stripSize, yStep, factor, candidates, fakeLevels, false );
-//}
-
 bool CascadeClassifier::isOldFormatCascade() const
 {
     return !oldCascade.empty();
 }
 
+
 int CascadeClassifier::getFeatureType() const
 {
     return featureEvaluator->getFeatureType();
@@ -979,6 +985,7 @@ bool CascadeClassifier::setImage(const Mat& image)
 
 void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects, 
                                           vector<int>& rejectLevels,
+                                          vector<double>& levelWeights,
                                           double scaleFactor, int minNeighbors,
                                           int flags, Size minObjectSize, Size maxObjectSize, 
                                           bool outputRejectLevels )
@@ -994,8 +1001,8 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
     {
         MemStorage storage(cvCreateMemStorage(0));
         CvMat _image = image;
-        CvSeq* _objects = cvHaarDetectObjects( &_image, oldCascade, storage, scaleFactor,
-                                              minNeighbors, flags, minObjectSize );
+        CvSeq* _objects = cvHaarDetectObjectsForROC( &_image, oldCascade, storage, rejectLevels, levelWeights, scaleFactor,
+                                              minNeighbors, flags, minObjectSize, maxObjectSize, outputRejectLevels );
         vector<CvAvgComp> vecAvgComp;
         Seq<CvAvgComp>(_objects).copyTo(vecAvgComp);
         objects.resize(vecAvgComp.size());
@@ -1051,15 +1058,22 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
     #endif
 
         if( !detectSingleScale( scaledImage, stripCount, processingRectSize, stripSize, yStep, factor, candidates, 
-            rejectLevels, outputRejectLevels ) )
+            rejectLevels, levelWeights, outputRejectLevels ) )
             break;
     }
 
+    
     objects.resize(candidates.size());
     std::copy(candidates.begin(), candidates.end(), objects.begin());
 
-
-    groupRectangles( objects, rejectLevels, minNeighbors, GROUP_EPS );
+    if( outputRejectLevels )
+    {
+        groupRectangles( objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
+    }
+    else
+    {
+        groupRectangles( objects, minNeighbors, GROUP_EPS );
+    }
 }
 
 void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
@@ -1067,7 +1081,8 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
                                           int flags, Size minObjectSize, Size maxObjectSize)
 {
     vector<int> fakeLevels;
-    detectMultiScale( image, objects, fakeLevels, scaleFactor, 
+    vector<double> fakeWeights;
+    detectMultiScale( image, objects, fakeLevels, fakeWeights, scaleFactor, 
         minNeighbors, flags, minObjectSize, maxObjectSize, false );
 }    
 
diff --git a/modules/objdetect/src/datamatrix.cpp b/modules/objdetect/src/datamatrix.cpp
deleted file mode 100644 (file)
index 9bd803b..0000000
+++ /dev/null
@@ -1,434 +0,0 @@
-#if CV_SSE2
-#include <xmmintrin.h>
-#endif
-
-#include "precomp.hpp"
-#include <deque>
-using namespace std;
-
-#undef NDEBUG
-#include <assert.h>
-
-class Sampler {
-public:
-  CvMat *im;
-  CvPoint o;
-  CvPoint c, cc;
-  CvMat *perim;
-  CvPoint fcoord(float fx, float fy);
-  CvPoint coord(int ix, int iy);
-  Sampler() {}
-  Sampler(CvMat *_im, CvPoint _o, CvPoint _c, CvPoint _cc);
-  uint8 getpixel(int ix, int iy);
-  int isinside(int x, int y);
-  int overlap(Sampler &other);
-  int hasbars();
-  void timing();
-  CvMat *extract();
-};
-
-class code {    // used in this file only
-public:
-  char msg[4];
-  CvMat *original;
-  Sampler sa;
-};
-
-#include "followblk.h"
-
-#define dethresh 0.92f
-#define eincO    (2 * dethresh)         // e increment orthogonal
-#define eincD    (1.414f * dethresh)     // e increment diagonal
-
-static const float eincs[] = {
-  eincO, eincD,
-  eincO, eincD,
-  eincO, eincD,
-  eincO, eincD,
-  999 };
-
-#define Ki(x) _mm_set_epi32((x),(x),(x),(x))
-#define Kf(x) _mm_set_ps((x),(x),(x),(x))
-
-static const int CV_DECL_ALIGNED(16) absmask[] = {0x7fffffff, 0x7fffffff, 0x7fffffff, 0x7fffffff};
-#define _mm_abs_ps(x) _mm_and_ps((x), *(const __m128*)absmask)
-
-static void writexy(CvMat *m, int r, CvPoint p)
-{
-  int *pdst = (int*)cvPtr2D(m, r, 0);
-  pdst[0] = p.x;
-  pdst[1] = p.y;
-}
-
-Sampler::Sampler(CvMat *_im, CvPoint _o, CvPoint _c, CvPoint _cc)
-{
-  im = _im;
-  o = _o;
-  c = _c;
-  cc = _cc;
-  perim = cvCreateMat(4, 1, CV_32SC2);
-  writexy(perim, 0, fcoord(-.2f,-.2f));
-  writexy(perim, 1, fcoord(-.2f,1.2f));
-  writexy(perim, 2, fcoord(1.2f,1.2f));
-  writexy(perim, 3, fcoord(1.2f,-.2f));
-  // printf("Sampler %d,%d %d,%d %d,%d\n", o.x, o.y, c.x, c.y, cc.x, cc.y);
-}
-
-CvPoint Sampler::fcoord(float fx, float fy)
-{
-  CvPoint r;
-  r.x = (int)(o.x + fx * (cc.x - o.x) + fy * (c.x - o.x));
-  r.y = (int)(o.y + fx * (cc.y - o.y) + fy * (c.y - o.y));
-  return r;
-}
-
-CvPoint Sampler::coord(int ix, int iy)
-{
-  return fcoord(0.05f + 0.1f * ix, 0.05f + 0.1f * iy);
-}
-
-uint8 Sampler::getpixel(int ix, int iy)
-{
-  CvPoint pt = coord(ix, iy);
-  // printf("%d,%d\n", pt.x, pt.y);
-  return *cvPtr2D(im, pt.y, pt.x);
-}
-
-int Sampler::isinside(int x, int y)
-{
-  CvPoint2D32f fp;
-  fp.x = (float)x;
-  fp.y = (float)y;
-  return cvPointPolygonTest(perim, fp, 0) < 0;
-}
-
-int Sampler::overlap(Sampler &other)
-{
-  for (int i = 0; i < 4; i++) {
-    CvScalar p;
-    p = cvGet2D(other.perim, i, 0);
-    if (isinside((int)p.val[0], (int)p.val[1]))
-      return 1;
-    p = cvGet2D(perim, i, 0);
-    if (other.isinside((int)p.val[0], (int)p.val[1]))
-      return 1;
-  }
-  return 0;
-}
-
-int Sampler::hasbars()
-{
-  return getpixel(9, 1) > getpixel(9, 0);
-}
-
-void Sampler::timing()
-{
-  uint8 dark = getpixel(9, 0);
-  for (int i = 1; i < 3; i += 2) {
-    uint8 light = getpixel(9, i);
-    // if (light <= dark)
-    //  goto endo;
-    dark = getpixel(9, i + 1);
-    // if (up <= down)
-    //  goto endo;
-  }
-}
-
-CvMat *Sampler::extract()
-{
-  // return a 10x10 CvMat for the current contents, 0 is black, 255 is white
-  // Sampler has (0,0) at bottom left, so invert Y
-  CvMat *r = cvCreateMat(10, 10, CV_8UC1);
-  for (int x = 0; x < 10; x++)
-    for (int y = 0; y < 10; y++)
-      *cvPtr2D(r, 9 - y, x) = (getpixel(x, y) < 128) ? 0 : 255;
-  return r;
-}
-
-static void apron(CvMat *v)
-{
-  int r = v->rows;
-  int c = v->cols;
-  memset(cvPtr2D(v, 0, 0), 0x22, c);
-  memset(cvPtr2D(v, 1, 0), 0x22, c);
-  memset(cvPtr2D(v, r - 2, 0), 0x22, c);
-  memset(cvPtr2D(v, r - 1, 0), 0x22, c);
-  int y;
-  for (y = 2; y < r - 2; y++) {
-    uchar *lp = cvPtr2D(v, y, 0);
-    lp[0] = 0x22;
-    lp[1] = 0x22;
-    lp[c-2] = 0x22;
-    lp[c-1] = 0x22;
-  }
-}
-
-static void cfollow(CvMat *src, CvMat *dst)
-{
-  int sx, sy;
-  uint8 *vpd = cvPtr2D(src, 0, 0);
-  for (sy = 0; sy < src->rows; sy++) {
-    short *wr = (short*)cvPtr2D(dst, sy, 0);
-    for (sx = 0; sx < src->cols; sx++) {
-      int x = sx;
-      int y = sy;
-      float e = 0;
-      int ontrack = true;
-      int dir;
-
-      while (ontrack) {
-        dir = vpd[y * src->step + x];
-        int xd = ((dir & 0xf) - 2);
-        int yd = ((dir >> 4) - 2);
-        e += (dir == 0x22) ? 999 : ((dir & 1) ? eincD : eincO);
-        x += xd;
-        y += yd;
-        if (e > 10.) {
-          float d = (float)(((x - sx) * (x - sx)) + ((y - sy) * (y - sy)));
-          ontrack = d > (e * e);
-        }
-      }
-      if ((24 <= e) && (e < 999)) {
-        // printf("sx=%d, sy=%d, x=%d, y=%d\n", sx, sy, x, y);
-        *wr++ = (short)(x - sx);
-        *wr++ = (short)(y - sy);
-      } else {
-        *wr++ = 0;
-        *wr++ = 0;
-      }
-    }
-  }
-}
-
-static uint8 gf256mul(uint8 a, uint8 b)
-{
-    return Alog[(Log[a] + Log[b]) % 255];
-}
-
-static int decode(Sampler &sa, code &cc)
-{
-  uint8 binary[8] = {0,0,0,0,0,0,0,0};
-  uint8 b = 0;
-
-  for (int i = 0; i < 64; i++) {
-    b = (b << 1) + (sa.getpixel(pickup[i].x, pickup[i].y) <= 128);
-    if ((i & 7) == 7) {
-      binary[i >> 3] = b;
-      b = 0;
-    }
-  }
-
-  // Compute the 5 RS codewords for the 3 datawords
-
-  uint8 c[5] = {0,0,0,0,0};
-  {
-    int i, j;
-    uint8 a[5] = {228, 48, 15, 111, 62};
-    int k = 5;
-    for (i = 0; i < 3; i++) {
-      uint8 t = binary[i] ^ c[4];
-      for (j = k - 1; j != -1; j--) {
-        if (t == 0)
-            c[j] = 0;
-        else
-            c[j] = gf256mul(t, a[j]);
-        if (j > 0)
-            c[j] = c[j - 1] ^ c[j];
-      }
-    }
-  }
-
-  if ((c[4] == binary[3]) &&
-      (c[3] == binary[4]) &&
-      (c[2] == binary[5]) &&
-      (c[1] == binary[6]) &&
-      (c[0] == binary[7])) {
-    uint8 x = 0xff & (binary[0] - 1);
-    uint8 y = 0xff & (binary[1] - 1);
-    uint8 z = 0xff & (binary[2] - 1);
-    cc.msg[0] = x;
-    cc.msg[1] = y;
-    cc.msg[2] = z;
-    cc.msg[3] = 0;
-    cc.sa = sa;
-    cc.original = sa.extract();
-    return 1;
-  } else {
-    return 0;
-  }
-}
-
-static deque<CvPoint> trailto(CvMat *v, int x, int y, CvMat *terminal)
-{
-  CvPoint np;
-  /* Return the last 10th of the trail of points following v from (x,y)
-   * to terminal
-   */
-
-  int ex = x + ((short*)cvPtr2D(terminal, y, x))[0];
-  int ey = y + ((short*)cvPtr2D(terminal, y, x))[1];
-  deque<CvPoint> r;
-  while ((x != ex) || (y != ey)) {
-    np.x = x;
-    np.y = y;
-    r.push_back(np);
-    int dir = *cvPtr2D(v, y, x);
-    int xd = ((dir & 0xf) - 2);
-    int yd = ((dir >> 4) - 2);
-    x += xd;
-    y += yd;
-  }
-
-  int l = r.size() * 9 / 10;
-  while (l--)
-    r.pop_front();
-  return r;
-}
-
-deque <DataMatrixCode> cvFindDataMatrix(CvMat *im)
-{
-#if CV_SSE2
-  int r = im->rows;
-  int c = im->cols;
-
-#define SAMESIZE(nm, ty) CvMat *nm = cvCreateMat(r, c, ty);
-
-  SAMESIZE(thresh, CV_8UC1)
-  SAMESIZE(vecpic, CV_8UC1)
-  SAMESIZE(vc, CV_8UC1)
-  SAMESIZE(vcc, CV_8UC1)
-  SAMESIZE(cxy, CV_16SC2)
-  SAMESIZE(ccxy, CV_16SC2)
-
-  cvAdaptiveThreshold(im, thresh, 255.0, CV_ADAPTIVE_THRESH_MEAN_C, CV_THRESH_BINARY, 13);
-  {
-    int x, y;
-    int sstride = thresh->step;
-    int sw = thresh->cols; // source width
-    for (y = 2; y < thresh->rows - 2; y++) {
-      uint8 *ps = cvPtr2D(thresh, y, 0);
-      uint8 *pd = cvPtr2D(vecpic, y, 0);
-      uint8 *pvc = cvPtr2D(vc, y, 0);
-      uint8 *pvcc = cvPtr2D(vcc, y, 0);
-      for (x = 0; x < sw; x++) {
-        uint8 v =
-            (0x01 & ps[-2 * sstride]) |
-            (0x02 & ps[-sstride + 1]) |
-            (0x04 & ps[2]) |
-            (0x08 & ps[sstride + 1]) |
-            (0x10 & ps[2 * sstride]) |
-            (0x20 & ps[sstride - 1]) |
-            (0x40 & ps[-2]) |
-            (0x80 & ps[-sstride -1]);
-        *pd++ = v;
-        *pvc++ = cblk[v];
-        *pvcc++ = ccblk[v];
-        ps++;
-      }
-    }
-    apron(vc);
-    apron(vcc);
-  }
-
-  cfollow(vc, cxy);
-  cfollow(vcc, ccxy);
-
-  deque <CvPoint> candidates;
-  {
-    int x, y;
-    int r = cxy->rows;
-    int c = cxy->cols;
-    for (y = 0; y < r; y++) {
-      const short *cd = (const short*)cvPtr2D(cxy, y, 0);
-      const short *ccd = (const short*)cvPtr2D(ccxy, y, 0);
-      for (x = 0; x < c; x += 4, cd += 8, ccd += 8) {
-        __m128i v = _mm_loadu_si128((const __m128i*)cd);
-        __m128 cyxyxA = _mm_cvtepi32_ps(_mm_srai_epi32(_mm_unpacklo_epi16(v, v), 16));
-        __m128 cyxyxB = _mm_cvtepi32_ps(_mm_srai_epi32(_mm_unpackhi_epi16(v, v), 16));
-        __m128 cx = _mm_shuffle_ps(cyxyxA, cyxyxB, _MM_SHUFFLE(0, 2, 0, 2));
-        __m128 cy = _mm_shuffle_ps(cyxyxA, cyxyxB, _MM_SHUFFLE(1, 3, 1, 3));
-        __m128 cmag = _mm_sqrt_ps(_mm_add_ps(_mm_mul_ps(cx, cx), _mm_mul_ps(cy, cy)));
-        __m128 crmag = _mm_rcp_ps(cmag);
-        __m128 ncx = _mm_mul_ps(cx, crmag);
-        __m128 ncy = _mm_mul_ps(cy, crmag);
-
-        v = _mm_loadu_si128((const __m128i*)ccd);
-        __m128 ccyxyxA = _mm_cvtepi32_ps(_mm_srai_epi32(_mm_unpacklo_epi16(v, v), 16));
-        __m128 ccyxyxB = _mm_cvtepi32_ps(_mm_srai_epi32(_mm_unpackhi_epi16(v, v), 16));
-        __m128 ccx = _mm_shuffle_ps(ccyxyxA, ccyxyxB, _MM_SHUFFLE(0, 2, 0, 2));
-        __m128 ccy = _mm_shuffle_ps(ccyxyxA, ccyxyxB, _MM_SHUFFLE(1, 3, 1, 3));
-        __m128 ccmag = _mm_sqrt_ps(_mm_add_ps(_mm_mul_ps(ccx, ccx), _mm_mul_ps(ccy, ccy)));
-        __m128 ccrmag = _mm_rcp_ps(ccmag);
-        __m128 nccx = _mm_mul_ps(ccx, ccrmag);
-        __m128 nccy = _mm_mul_ps(ccy, ccrmag);
-
-        __m128 dot = _mm_mul_ps(_mm_mul_ps(ncx, nccx), _mm_mul_ps(ncy, nccy));
-        // iscand = (cmag > 30) & (ccmag > 30) & (numpy.minimum(cmag, ccmag) * 1.1 > numpy.maximum(cmag, ccmag)) & (abs(dot) < 0.25)
-        __m128 iscand = _mm_and_ps(_mm_cmpgt_ps(cmag, Kf(30)), _mm_cmpgt_ps(ccmag, Kf(30)));
-
-        iscand = _mm_and_ps(iscand, _mm_cmpgt_ps(_mm_mul_ps(_mm_min_ps(cmag, ccmag), Kf(1.1f)), _mm_max_ps(cmag, ccmag)));
-           iscand = _mm_and_ps(iscand, _mm_cmplt_ps(_mm_abs_ps(dot),  Kf(0.25f)));
-
-        unsigned int CV_DECL_ALIGNED(16) result[4];
-        _mm_store_ps((float*)result, iscand);
-        int ix;
-        CvPoint np;
-        for (ix = 0; ix < 4; ix++) {
-          if (result[ix]) {
-            np.x = x + ix;
-            np.y = y;
-            candidates.push_back(np);
-          }
-        }
-      }
-    }
-  }
-
-  deque <code> codes;
-  size_t i, j, k;
-  while (!candidates.empty()) {
-    CvPoint o = candidates.front();
-    candidates.pop_front();
-    deque<CvPoint> ptc = trailto(vc, o.x, o.y, cxy);
-    deque<CvPoint> ptcc = trailto(vcc, o.x, o.y, ccxy);
-    for (j = 0; j < ptc.size(); j++) {
-      for (k = 0; k < ptcc.size(); k++) {
-        code cc;
-        Sampler sa(im, o, ptc[j], ptcc[k]);
-        for (i = 0; i < codes.size(); i++) {
-          if (sa.overlap(codes[i].sa))
-            goto endo;
-        }
-        if (codes.size() > 0) {
-          printf("searching for more\n");
-        }
-        if (decode(sa, cc)) {
-          codes.push_back(cc);
-          goto endo;
-        }
-      }
-    }
-endo: ; // end search for this o
-  }
-
-  cvFree(&thresh);
-  cvFree(&vecpic);
-  cvFree(&vc);
-  cvFree(&vcc);
-  cvFree(&cxy);
-  cvFree(&ccxy);
-
-  deque <DataMatrixCode> rc;
-  for (i = 0; i < codes.size(); i++) {
-    DataMatrixCode cc;
-    strcpy(cc.msg, codes[i].msg);
-    cc.original = codes[i].original;
-    cc.corners = codes[i].sa.perim;
-    rc.push_back(cc);
-  }
-  return rc;
-#else
-  deque <DataMatrixCode> rc;
-  return rc;
-#endif
-}
diff --git a/modules/objdetect/src/followblk.h b/modules/objdetect/src/followblk.h
deleted file mode 100644 (file)
index 5277a7e..0000000
+++ /dev/null
@@ -1,5 +0,0 @@
-unsigned char cblk[256] = { 34,19,36,36,51,19,51,51,66,19,36,36,66,19,66,66,49,19,36,36,51,19,51,51,49,19,36,36,49,19,49,49,32,19,36,36,51,19,51,51,66,19,36,36,66,19,66,66,32,19,36,36,51,19,51,51,32,19,36,36,32,19,32,32,17,19,36,36,51,19,51,51,66,19,36,36,66,19,66,66,49,19,36,36,51,19,51,51,49,19,36,36,49,19,49,49,17,19,36,36,51,19,51,51,66,19,36,36,66,19,66,66,17,19,36,36,51,19,51,51,17,19,36,36,17,19,17,17,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,66,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,49,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,66,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,32,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,66,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,49,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,66,2,19,2,36,2,19,2,51,2,19,2,36,2,19,2,34 };
-unsigned char ccblk[256] = { 34,17,2,17,19,19,2,17,36,36,2,36,19,19,2,17,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,17,66,66,2,66,19,19,2,66,36,36,2,36,19,19,2,66,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,17,49,49,2,49,19,19,2,49,36,36,2,36,19,19,2,49,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,49,66,66,2,66,19,19,2,66,36,36,2,36,19,19,2,66,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,17,32,32,2,32,19,19,2,32,36,36,2,36,19,19,2,32,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,32,66,66,2,66,19,19,2,66,36,36,2,36,19,19,2,66,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,32,49,49,2,49,19,19,2,49,36,36,2,36,19,19,2,49,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,49,66,66,2,66,19,19,2,66,36,36,2,36,19,19,2,66,51,51,2,51,19,19,2,51,36,36,2,36,19,19,2,34 };
-static const CvPoint pickup[64] = { {7,6},{8,6},{7,5},{8,5},{1,5},{7,4},{8,4},{1,4},{1,8},{2,8},{1,7},{2,7},{3,7},{1,6},{2,6},{3,6},{3,2},{4,2},{3,1},{4,1},{5,1},{3,8},{4,8},{5,8},{6,1},{7,1},{6,8},{7,8},{8,8},{6,7},{7,7},{8,7},{4,7},{5,7},{4,6},{5,6},{6,6},{4,5},{5,5},{6,5},{2,5},{3,5},{2,4},{3,4},{4,4},{2,3},{3,3},{4,3},{8,3},{1,3},{8,2},{1,2},{2,2},{8,1},{1,1},{2,1},{5,4},{6,4},{5,3},{6,3},{7,3},{5,2},{6,2},{7,2} };
-static const uint8 Alog[256] = { 1,2,4,8,16,32,64,128,45,90,180,69,138,57,114,228,229,231,227,235,251,219,155,27,54,108,216,157,23,46,92,184,93,186,89,178,73,146,9,18,36,72,144,13,26,52,104,208,141,55,110,220,149,7,14,28,56,112,224,237,247,195,171,123,246,193,175,115,230,225,239,243,203,187,91,182,65,130,41,82,164,101,202,185,95,190,81,162,105,210,137,63,126,252,213,135,35,70,140,53,106,212,133,39,78,156,21,42,84,168,125,250,217,159,19,38,76,152,29,58,116,232,253,215,131,43,86,172,117,234,249,223,147,11,22,44,88,176,77,154,25,50,100,200,189,87,174,113,226,233,255,211,139,59,118,236,245,199,163,107,214,129,47,94,188,85,170,121,242,201,191,83,166,97,194,169,127,254,209,143,51,102,204,181,71,142,49,98,196,165,103,206,177,79,158,17,34,68,136,61,122,244,197,167,99,198,161,111,222,145,15,30,60,120,240,205,183,67,134,33,66,132,37,74,148,5,10,20,40,80,160,109,218,153,31,62,124,248,221,151,3,6,12,24,48,96,192,173,119,238,241,207,179,75,150,1 };
-static const uint8 Log[256] = { -255,255,1,240,2,225,241,53,3,38,226,133,242,43,54,210,4,195,39,114,227,106,134,28,243,140,44,23,55,118,211,234,5,219,196,96,40,222,115,103,228,78,107,125,135,8,29,162,244,186,141,180,45,99,24,49,56,13,119,153,212,199,235,91,6,76,220,217,197,11,97,184,41,36,223,253,116,138,104,193,229,86,79,171,108,165,126,145,136,34,9,74,30,32,163,84,245,173,187,204,142,81,181,190,46,88,100,159,25,231,50,207,57,147,14,67,120,128,154,248,213,167,200,63,236,110,92,176,7,161,77,124,221,102,218,95,198,90,12,152,98,48,185,179,42,209,37,132,224,52,254,239,117,233,139,22,105,27,194,113,230,206,87,158,80,189,172,203,109,175,166,62,127,247,146,66,137,192,35,252,10,183,75,216,31,83,33,73,164,144,85,170,246,65,174,61,188,202,205,157,143,169,82,72,182,215,191,251,47,178,89,151,101,94,160,123,26,112,232,21,51,238,208,131,58,69,148,18,15,16,68,17,121,149,129,19,155,59,249,70,214,250,168,71,201,156,64,60,237,130,111,20,93,122,177,150 };
index f52dd34..d0ec2cf 100644 (file)
@@ -654,8 +654,8 @@ double icvEvalHidHaarClassifier( CvHidHaarClassifier* classifier,
 
 
 CV_IMPL int
-cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
-                            CvPoint pt, int start_stage )
+cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
+                               CvPoint pt, double& stage_sum, int start_stage )
 {
     int result = -1;
 
@@ -698,7 +698,7 @@ cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
 
         while( ptr )
         {
-            double stage_sum = 0;
+            stage_sum = 0.0;
 
             for( j = 0; j < ptr->count; j++ )
             {
@@ -724,7 +724,7 @@ cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
         for( i = start_stage; i < cascade->count; i++ )
         {
 #ifndef CV_HAAR_USE_SSE
-            double stage_sum = 0;
+            stage_sum = 0.0;
 #else
             __m128d stage_sum = _mm_setzero_pd();
 #endif
@@ -796,7 +796,7 @@ cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
     {
         for( i = start_stage; i < cascade->count; i++ )
         {
-            double stage_sum = 0;
+            stage_sum = 0.0;
 
             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
             {
@@ -809,10 +809,16 @@ cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
                 return -i;
         }
     }
-
     return 1;
 }
 
+CV_IMPL int
+cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
+                            CvPoint pt, int start_stage )
+{
+    double stage_sum;
+    return cvRunHaarClassifierCascadeSum(_cascade, pt, stage_sum, start_stage);
+}
 
 namespace cv
 {
@@ -822,7 +828,9 @@ struct HaarDetectObjects_ScaleImage_Invoker
     HaarDetectObjects_ScaleImage_Invoker( const CvHaarClassifierCascade* _cascade,
                                           int _stripSize, double _factor,
                                           const Mat& _sum1, const Mat& _sqsum1, Mat* _norm1,
-                                          Mat* _mask1, Rect _equRect, ConcurrentRectVector& _vec )
+                                          Mat* _mask1, Rect _equRect, ConcurrentRectVector& _vec, 
+                                          std::vector<int>& _levels, std::vector<double>& _weights,
+                                          bool _outputLevels  )
     {
         cascade = _cascade;
         stripSize = _stripSize;
@@ -833,6 +841,8 @@ struct HaarDetectObjects_ScaleImage_Invoker
         mask1 = _mask1;
         equRect = _equRect;
         vec = &_vec;
+        rejectLevels = _outputLevels ? &_levels : 0;
+        levelWeights = _outputLevels ? &_weights : 0;
     }
     
     void operator()( const BlockedRange& range ) const
@@ -902,9 +912,26 @@ struct HaarDetectObjects_ScaleImage_Invoker
             for( y = y1; y < y2; y += ystep )
                 for( x = 0; x < ssz.width; x += ystep )
                 {
-                    if( cvRunHaarClassifierCascade( cascade, cvPoint(x,y), 0 ) > 0 )
-                        vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
-                                            winSize.width, winSize.height)); 
+                    double gypWeight;
+                    int result = cvRunHaarClassifierCascadeSum( cascade, cvPoint(x,y), gypWeight, 0 );
+                    if( rejectLevels )
+                    {
+                        if( result == 1 )
+                            result = -1*cascade->count;
+                        if( cascade->count + result < 4 )
+                        {
+                            vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
+                                           winSize.width, winSize.height));
+                            rejectLevels->push_back(-result);
+                            levelWeights->push_back(gypWeight);
+                        }
+                    }
+                    else
+                    {
+                        if( result > 0 )
+                            vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
+                                           winSize.width, winSize.height)); 
+                    }
                 }
     }
     
@@ -914,6 +941,8 @@ struct HaarDetectObjects_ScaleImage_Invoker
     Mat sum1, sqsum1, *norm1, *mask1;
     Rect equRect;
     ConcurrentRectVector* vec;
+    std::vector<int>* rejectLevels;
+    std::vector<double>* levelWeights;
 };
     
 
@@ -983,10 +1012,11 @@ struct HaarDetectObjects_ScaleCascade_Invoker
     
 
 CV_IMPL CvSeq*
-cvHaarDetectObjects( const CvArr* _img,
-                     CvHaarClassifierCascade* cascade,
-                     CvMemStorage* storage, double scaleFactor,
-                     int minNeighbors, int flags, CvSize minSize, CvSize maxSize )
+cvHaarDetectObjectsForROC( const CvArr* _img, 
+                     CvHaarClassifierCascade* cascade, CvMemStorage* storage,
+                     std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
+                     double scaleFactor, int minNeighbors, int flags, 
+                     CvSize minSize, CvSize maxSize, bool outputRejectLevels )
 {
     const double GROUP_EPS = 0.2;
     CvMat stub, *img = (CvMat*)_img;
@@ -1119,7 +1149,7 @@ cvHaarDetectObjects( const CvArr* _img,
                          cv::HaarDetectObjects_ScaleImage_Invoker(cascade,
                                 (((sz1.height + stripCount - 1)/stripCount + ystep-1)/ystep)*ystep,
                                 factor, cv::Mat(&sum1), cv::Mat(&sqsum1), &_norm1, &_mask1,
-                                cv::Rect(equRect), allCandidates));
+                                cv::Rect(equRect), allCandidates, rejectLevels, levelWeights, outputRejectLevels));
         }
     }
     else
@@ -1250,7 +1280,16 @@ cvHaarDetectObjects( const CvArr* _img,
         std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
     
     if( minNeighbors != 0 || findBiggestObject )
-        groupRectangles(rectList, rweights, std::max(minNeighbors, 1), GROUP_EPS);
+    {
+        if( outputRejectLevels )
+        {
+            groupRectangles(rectList, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
+        }
+        else
+        {
+            groupRectangles(rectList, rweights, std::max(minNeighbors, 1), GROUP_EPS);
+        }
+    }
     else
         rweights.resize(rectList.size(),0);
         
@@ -1275,7 +1314,7 @@ cvHaarDetectObjects( const CvArr* _img,
         {
             CvAvgComp c;
             c.rect = rectList[i];
-            c.neighbors = rweights[i];
+            c.neighbors = !rweights.empty() ? rweights[i] : 0;
             cvSeqPush( result_seq, &c );
         }
     }
@@ -1283,6 +1322,19 @@ cvHaarDetectObjects( const CvArr* _img,
     return result_seq;
 }
 
+CV_IMPL CvSeq*
+cvHaarDetectObjects( const CvArr* _img, 
+                     CvHaarClassifierCascade* cascade, CvMemStorage* storage,
+                     double scaleFactor,
+                     int minNeighbors, int flags, CvSize minSize, CvSize maxSize )
+{
+    std::vector<int> fakeLevels;
+    std::vector<double> fakeWeights;
+    return cvHaarDetectObjectsForROC( _img, cascade, storage, fakeLevels, fakeWeights, 
+                                scaleFactor, minNeighbors, flags, minSize, maxSize, false );
+
+}
+
 
 static CvHaarClassifierCascade*
 icvLoadCascadeCART( const char** input_cascade, int n, CvSize orig_window_size )
index 3ac32d3..0280fcd 100644 (file)
@@ -3677,28 +3677,6 @@ static PyObject *pycvClipLine(PyObject *self, PyObject *args)
   }
 }
 
-static PyObject *pyfinddatamatrix(PyObject *self, PyObject *args)
-{
-  PyObject *pyim;
-  if (!PyArg_ParseTuple(args, "O", &pyim))
-    return NULL;
-
-  CvMat *image;
-  if (!convert_to_CvMat(pyim, &image, "image")) return NULL;
-
-  std::deque <DataMatrixCode> codes;
-  ERRWRAP(codes = cvFindDataMatrix(image));
-
-  PyObject *pycodes = PyList_New(codes.size());
-  int i;
-  for (i = 0; i < codes.size(); i++) {
-    DataMatrixCode *pc = &codes[i];
-    PyList_SetItem(pycodes, i, Py_BuildValue("(sOO)", pc->msg, FROM_CvMat(pc->corners), FROM_CvMat(pc->original)));
-  }
-
-  return pycodes;
-}
-
 static PyObject *temp_test(PyObject *self, PyObject *args)
 {
 #if 0
@@ -3993,7 +3971,6 @@ static PyMethodDef methods[] = {
   //{"_HOGDetect", (PyCFunction)pycvHOGDetect, METH_KEYWORDS, "_HOGDetect(image, svm_classifier, win_stride=block_stride, locations=None, padding=(0,0), win_size=(64,128), block_size=(16,16), block_stride=(8,8), cell_size=(8,8), nbins=9, gammaCorrection=true) -> list_of_points"},
   //{"_HOGDetectMultiScale", (PyCFunction)pycvHOGDetectMultiScale, METH_KEYWORDS, "_HOGDetectMultiScale(image, svm_classifier, win_stride=block_stride, scale=1.05, group_threshold=2, padding=(0,0), win_size=(64,128), block_size=(16,16), block_stride=(8,8), cell_size=(8,8), nbins=9, gammaCorrection=true) -> list_of_points"},
 
-  {"FindDataMatrix", pyfinddatamatrix, METH_VARARGS},
   {"temp_test", temp_test, METH_VARARGS},
 
 #include "generated1.i"
diff --git a/samples/c/JCB.png b/samples/c/JCB.png
deleted file mode 100644 (file)
index 243602a..0000000
Binary files a/samples/c/JCB.png and /dev/null differ