From: Leonid Beynenson Date: Fri, 29 Jul 2011 14:18:45 +0000 (+0000) Subject: Made additional internal header modules/objdetect/src/cascadedetect.hpp, and moved X-Git-Tag: accepted/2.0/20130307.220821~2236 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=dc13ec205dcb108bcc285a8ee28fc0e98449cb84;p=profile%2Fivi%2Fopencv.git Made additional internal header modules/objdetect/src/cascadedetect.hpp, and moved some internal classes and template functions from cascadedetect.cpp to this header. This will allow to make a child class for such classes as LBPEvaluator, etc. Also made all "private" fields in LBPEvaluator and HaarEvaluator to be "protected". --- diff --git a/modules/objdetect/src/cascadedetect.cpp b/modules/objdetect/src/cascadedetect.cpp index 1760317..d67ad23 100644 --- a/modules/objdetect/src/cascadedetect.cpp +++ b/modules/objdetect/src/cascadedetect.cpp @@ -42,6 +42,8 @@ #include "precomp.hpp" #include +#include "cascadedetect.hpp" + namespace cv { @@ -379,60 +381,6 @@ void groupRectangles_meanshift(vector& rectList, vector& foundWeig } -#define CC_CASCADE_PARAMS "cascadeParams" -#define CC_STAGE_TYPE "stageType" -#define CC_FEATURE_TYPE "featureType" -#define CC_HEIGHT "height" -#define CC_WIDTH "width" - -#define CC_STAGE_NUM "stageNum" -#define CC_STAGES "stages" -#define CC_STAGE_PARAMS "stageParams" - -#define CC_BOOST "BOOST" -#define CC_MAX_DEPTH "maxDepth" -#define CC_WEAK_COUNT "maxWeakCount" -#define CC_STAGE_THRESHOLD "stageThreshold" -#define CC_WEAK_CLASSIFIERS "weakClassifiers" -#define CC_INTERNAL_NODES "internalNodes" -#define CC_LEAF_VALUES "leafValues" - -#define CC_FEATURES "features" -#define CC_FEATURE_PARAMS "featureParams" -#define CC_MAX_CAT_COUNT "maxCatCount" - -#define CC_HAAR "HAAR" -#define CC_RECTS "rects" -#define CC_TILTED "tilted" - -#define CC_LBP "LBP" -#define CC_RECT "rect" - -#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ - /* (x, y) */ \ - (p0) = sum + (rect).x + (step) * (rect).y, \ - /* (x + w, y) */ \ - (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ - /* (x + w, y) */ \ - (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ - /* (x + w, y + h) */ \ - (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) - -#define CV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \ - /* (x, y) */ \ - (p0) = tilted + (rect).x + (step) * (rect).y, \ - /* (x - h, y + h) */ \ - (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ - /* (x + w, y + w) */ \ - (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ - /* (x + w - h, y + w + h) */ \ - (p3) = tilted + (rect).x + (rect).width - (rect).height \ - + (step) * ((rect).y + (rect).width + (rect).height) - -#define CALC_SUM_(p0, p1, p2, p3, offset) \ - ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) - -#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) FeatureEvaluator::~FeatureEvaluator() {} bool FeatureEvaluator::read(const FileNode&) {return true;} @@ -444,101 +392,6 @@ double FeatureEvaluator::calcOrd(int) const { return 0.; } int FeatureEvaluator::calcCat(int) const { return 0; } //---------------------------------------------- HaarEvaluator --------------------------------------- -class HaarEvaluator : public FeatureEvaluator -{ -public: - struct Feature - { - Feature(); - - float calc( int offset ) const; - void updatePtrs( const Mat& sum ); - bool read( const FileNode& node ); - - bool tilted; - - enum { RECT_NUM = 3 }; - - struct - { - Rect r; - float weight; - } rect[RECT_NUM]; - - const int* p[RECT_NUM][4]; - }; - - HaarEvaluator(); - virtual ~HaarEvaluator(); - - virtual bool read( const FileNode& node ); - virtual Ptr clone() const; - virtual int getFeatureType() const { return FeatureEvaluator::HAAR; } - - virtual bool setImage(const Mat&, Size origWinSize); - virtual bool setWindow(Point pt); - - double operator()(int featureIdx) const - { return featuresPtr[featureIdx].calc(offset) * varianceNormFactor; } - virtual double calcOrd(int featureIdx) const - { return (*this)(featureIdx); } - -private: - Size origWinSize; - Ptr > features; - Feature* featuresPtr; // optimization - bool hasTiltedFeatures; - - Mat sum0, sqsum0, tilted0; - Mat sum, sqsum, tilted; - - Rect normrect; - const int *p[4]; - const double *pq[4]; - - int offset; - double varianceNormFactor; -}; - -inline HaarEvaluator::Feature :: Feature() -{ - tilted = false; - rect[0].r = rect[1].r = rect[2].r = Rect(); - rect[0].weight = rect[1].weight = rect[2].weight = 0; - p[0][0] = p[0][1] = p[0][2] = p[0][3] = - p[1][0] = p[1][1] = p[1][2] = p[1][3] = - p[2][0] = p[2][1] = p[2][2] = p[2][3] = 0; -} - -inline float HaarEvaluator::Feature :: calc( int offset ) const -{ - float ret = rect[0].weight * CALC_SUM(p[0], offset) + rect[1].weight * CALC_SUM(p[1], offset); - - if( rect[2].weight != 0.0f ) - ret += rect[2].weight * CALC_SUM(p[2], offset); - - return ret; -} - -inline void HaarEvaluator::Feature :: updatePtrs( const Mat& sum ) -{ - const int* ptr = (const int*)sum.data; - size_t step = sum.step/sizeof(ptr[0]); - if (tilted) - { - CV_TILTED_PTRS( p[0][0], p[0][1], p[0][2], p[0][3], ptr, rect[0].r, step ); - CV_TILTED_PTRS( p[1][0], p[1][1], p[1][2], p[1][3], ptr, rect[1].r, step ); - if (rect[2].weight) - CV_TILTED_PTRS( p[2][0], p[2][1], p[2][2], p[2][3], ptr, rect[2].r, step ); - } - else - { - CV_SUM_PTRS( p[0][0], p[0][1], p[0][2], p[0][3], ptr, rect[0].r, step ); - CV_SUM_PTRS( p[1][0], p[1][1], p[1][2], p[1][3], ptr, rect[1].r, step ); - if (rect[2].weight) - CV_SUM_PTRS( p[2][0], p[2][1], p[2][2], p[2][3], ptr, rect[2].r, step ); - } -} bool HaarEvaluator::Feature :: read( const FileNode& node ) { @@ -670,84 +523,6 @@ bool HaarEvaluator::setWindow( Point pt ) } //---------------------------------------------- LBPEvaluator ------------------------------------- - -class LBPEvaluator : public FeatureEvaluator -{ -public: - struct Feature - { - Feature(); - Feature( int x, int y, int _block_w, int _block_h ) : - rect(x, y, _block_w, _block_h) {} - - int calc( int offset ) const; - void updatePtrs( const Mat& sum ); - bool read(const FileNode& node ); - - Rect rect; // weight and height for block - const int* p[16]; // fast - }; - - LBPEvaluator(); - virtual ~LBPEvaluator(); - - virtual bool read( const FileNode& node ); - virtual Ptr clone() const; - virtual int getFeatureType() const { return FeatureEvaluator::LBP; } - - virtual bool setImage(const Mat& image, Size _origWinSize); - virtual bool setWindow(Point pt); - - int operator()(int featureIdx) const - { return featuresPtr[featureIdx].calc(offset); } - virtual int calcCat(int featureIdx) const - { return (*this)(featureIdx); } -private: - Size origWinSize; - Ptr > features; - Feature* featuresPtr; // optimization - Mat sum0, sum; - Rect normrect; - - int offset; -}; - - -inline LBPEvaluator::Feature :: Feature() -{ - rect = Rect(); - for( int i = 0; i < 16; i++ ) - p[i] = 0; -} - -inline int LBPEvaluator::Feature :: calc( int offset ) const -{ - int cval = CALC_SUM_( p[5], p[6], p[9], p[10], offset ); - - return (CALC_SUM_( p[0], p[1], p[4], p[5], offset ) >= cval ? 128 : 0) | // 0 - (CALC_SUM_( p[1], p[2], p[5], p[6], offset ) >= cval ? 64 : 0) | // 1 - (CALC_SUM_( p[2], p[3], p[6], p[7], offset ) >= cval ? 32 : 0) | // 2 - (CALC_SUM_( p[6], p[7], p[10], p[11], offset ) >= cval ? 16 : 0) | // 5 - (CALC_SUM_( p[10], p[11], p[14], p[15], offset ) >= cval ? 8 : 0)| // 8 - (CALC_SUM_( p[9], p[10], p[13], p[14], offset ) >= cval ? 4 : 0)| // 7 - (CALC_SUM_( p[8], p[9], p[12], p[13], offset ) >= cval ? 2 : 0)| // 6 - (CALC_SUM_( p[4], p[5], p[8], p[9], offset ) >= cval ? 1 : 0); -} - -inline void LBPEvaluator::Feature :: updatePtrs( const Mat& sum ) -{ - const int* ptr = (const int*)sum.data; - size_t step = sum.step/sizeof(ptr[0]); - Rect tr = rect; - CV_SUM_PTRS( p[0], p[1], p[4], p[5], ptr, tr, step ); - tr.x += 2*rect.width; - CV_SUM_PTRS( p[2], p[3], p[6], p[7], ptr, tr, step ); - tr.y += 2*rect.height; - CV_SUM_PTRS( p[10], p[11], p[14], p[15], ptr, tr, step ); - tr.x -= 2*rect.width; - CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step ); -} - bool LBPEvaluator::Feature :: read(const FileNode& node ) { FileNode rnode = node[CC_RECT]; @@ -862,171 +637,6 @@ bool CascadeClassifier::load(const string& filename) return !oldCascade.empty(); } -template -inline int predictOrdered( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) -{ - int nstages = (int)cascade.data.stages.size(); - int nodeOfs = 0, leafOfs = 0; - FEval& featureEvaluator = (FEval&)*_featureEvaluator; - float* cascadeLeaves = &cascade.data.leaves[0]; - CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; - CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; - CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - - for( int si = 0; si < nstages; si++ ) - { - CascadeClassifier::Data::Stage& stage = cascadeStages[si]; - int wi, ntrees = stage.ntrees; - sum = 0; - - for( wi = 0; wi < ntrees; wi++ ) - { - CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; - int idx = 0, root = nodeOfs; - - do - { - CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; - double val = featureEvaluator(node.featureIdx); - idx = val < node.threshold ? node.left : node.right; - } - while( idx > 0 ); - sum += cascadeLeaves[leafOfs - idx]; - nodeOfs += weak.nodeCount; - leafOfs += weak.nodeCount + 1; - } - if( sum < stage.threshold ) - return -si; - } - return 1; -} - -template -inline int predictCategorical( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) -{ - int nstages = (int)cascade.data.stages.size(); - int nodeOfs = 0, leafOfs = 0; - FEval& featureEvaluator = (FEval&)*_featureEvaluator; - size_t subsetSize = (cascade.data.ncategories + 31)/32; - int* cascadeSubsets = &cascade.data.subsets[0]; - float* cascadeLeaves = &cascade.data.leaves[0]; - CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; - CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; - CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - - for(int si = 0; si < nstages; si++ ) - { - CascadeClassifier::Data::Stage& stage = cascadeStages[si]; - int wi, ntrees = stage.ntrees; - sum = 0; - - for( wi = 0; wi < ntrees; wi++ ) - { - CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; - int idx = 0, root = nodeOfs; - do - { - CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; - int c = featureEvaluator(node.featureIdx); - const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; - idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; - } - while( idx > 0 ); - sum += cascadeLeaves[leafOfs - idx]; - nodeOfs += weak.nodeCount; - leafOfs += weak.nodeCount + 1; - } - if( sum < stage.threshold ) - return -si; - } - return 1; -} - -template -inline int predictOrderedStump( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) -{ - int nodeOfs = 0, leafOfs = 0; - FEval& featureEvaluator = (FEval&)*_featureEvaluator; - float* cascadeLeaves = &cascade.data.leaves[0]; - CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; - CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - - int nstages = (int)cascade.data.stages.size(); - for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) - { - CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx]; - sum = 0.0; - - int ntrees = stage.ntrees; - for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 ) - { - CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; - double value = featureEvaluator(node.featureIdx); - sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ]; - } - - if( sum < stage.threshold ) - return -stageIdx; - } - - return 1; -} - -template -inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) -{ - int nstages = (int)cascade.data.stages.size(); - int nodeOfs = 0, leafOfs = 0; - FEval& featureEvaluator = (FEval&)*_featureEvaluator; - size_t subsetSize = (cascade.data.ncategories + 31)/32; - int* cascadeSubsets = &cascade.data.subsets[0]; - float* cascadeLeaves = &cascade.data.leaves[0]; - CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; - CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; - -#ifdef HAVE_TEGRA_OPTIMIZATION - float tmp; // float accumulator -- float operations are quicker -#endif - for( int si = 0; si < nstages; si++ ) - { - CascadeClassifier::Data::Stage& stage = cascadeStages[si]; - int wi, ntrees = stage.ntrees; -#ifdef HAVE_TEGRA_OPTIMIZATION - tmp = 0; -#else - sum = 0; -#endif - - for( wi = 0; wi < ntrees; wi++ ) - { - CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; - int c = featureEvaluator(node.featureIdx); - const int* subset = &cascadeSubsets[nodeOfs*subsetSize]; -#ifdef HAVE_TEGRA_OPTIMIZATION - tmp += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; -#else - sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; -#endif - nodeOfs++; - leafOfs += 2; - } -#ifdef HAVE_TEGRA_OPTIMIZATION - if( tmp < stage.threshold ) { - sum = (double)tmp; - return -si; - } -#else - if( sum < stage.threshold ) - return -si; -#endif - } - -#ifdef HAVE_TEGRA_OPTIMIZATION - sum = (double)tmp; -#endif - - return 1; -} int CascadeClassifier::runAt( Ptr& featureEvaluator, Point pt, double& weight ) { diff --git a/modules/objdetect/src/cascadedetect.hpp b/modules/objdetect/src/cascadedetect.hpp new file mode 100644 index 0000000..e1080f9 --- /dev/null +++ b/modules/objdetect/src/cascadedetect.hpp @@ -0,0 +1,410 @@ +#pragma once + +namespace cv +{ + +#define CC_CASCADE_PARAMS "cascadeParams" +#define CC_STAGE_TYPE "stageType" +#define CC_FEATURE_TYPE "featureType" +#define CC_HEIGHT "height" +#define CC_WIDTH "width" + +#define CC_STAGE_NUM "stageNum" +#define CC_STAGES "stages" +#define CC_STAGE_PARAMS "stageParams" + +#define CC_BOOST "BOOST" +#define CC_MAX_DEPTH "maxDepth" +#define CC_WEAK_COUNT "maxWeakCount" +#define CC_STAGE_THRESHOLD "stageThreshold" +#define CC_WEAK_CLASSIFIERS "weakClassifiers" +#define CC_INTERNAL_NODES "internalNodes" +#define CC_LEAF_VALUES "leafValues" + +#define CC_FEATURES "features" +#define CC_FEATURE_PARAMS "featureParams" +#define CC_MAX_CAT_COUNT "maxCatCount" + +#define CC_HAAR "HAAR" +#define CC_RECTS "rects" +#define CC_TILTED "tilted" + +#define CC_LBP "LBP" +#define CC_RECT "rect" + +#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ + /* (x, y) */ \ + (p0) = sum + (rect).x + (step) * (rect).y, \ + /* (x + w, y) */ \ + (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ + /* (x + w, y) */ \ + (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ + /* (x + w, y + h) */ \ + (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) + +#define CV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \ + /* (x, y) */ \ + (p0) = tilted + (rect).x + (step) * (rect).y, \ + /* (x - h, y + h) */ \ + (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ + /* (x + w, y + w) */ \ + (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ + /* (x + w - h, y + w + h) */ \ + (p3) = tilted + (rect).x + (rect).width - (rect).height \ + + (step) * ((rect).y + (rect).width + (rect).height) + +#define CALC_SUM_(p0, p1, p2, p3, offset) \ + ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) + +#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) + + +//---------------------------------------------- HaarEvaluator --------------------------------------- +class HaarEvaluator : public FeatureEvaluator +{ +public: + struct Feature + { + Feature(); + + float calc( int offset ) const; + void updatePtrs( const Mat& sum ); + bool read( const FileNode& node ); + + bool tilted; + + enum { RECT_NUM = 3 }; + + struct + { + Rect r; + float weight; + } rect[RECT_NUM]; + + const int* p[RECT_NUM][4]; + }; + + HaarEvaluator(); + virtual ~HaarEvaluator(); + + virtual bool read( const FileNode& node ); + virtual Ptr clone() const; + virtual int getFeatureType() const { return FeatureEvaluator::HAAR; } + + virtual bool setImage(const Mat&, Size origWinSize); + virtual bool setWindow(Point pt); + + double operator()(int featureIdx) const + { return featuresPtr[featureIdx].calc(offset) * varianceNormFactor; } + virtual double calcOrd(int featureIdx) const + { return (*this)(featureIdx); } + +protected: + Size origWinSize; + Ptr > features; + Feature* featuresPtr; // optimization + bool hasTiltedFeatures; + + Mat sum0, sqsum0, tilted0; + Mat sum, sqsum, tilted; + + Rect normrect; + const int *p[4]; + const double *pq[4]; + + int offset; + double varianceNormFactor; +}; + +inline HaarEvaluator::Feature :: Feature() +{ + tilted = false; + rect[0].r = rect[1].r = rect[2].r = Rect(); + rect[0].weight = rect[1].weight = rect[2].weight = 0; + p[0][0] = p[0][1] = p[0][2] = p[0][3] = + p[1][0] = p[1][1] = p[1][2] = p[1][3] = + p[2][0] = p[2][1] = p[2][2] = p[2][3] = 0; +} + +inline float HaarEvaluator::Feature :: calc( int offset ) const +{ + float ret = rect[0].weight * CALC_SUM(p[0], offset) + rect[1].weight * CALC_SUM(p[1], offset); + + if( rect[2].weight != 0.0f ) + ret += rect[2].weight * CALC_SUM(p[2], offset); + + return ret; +} + +inline void HaarEvaluator::Feature :: updatePtrs( const Mat& sum ) +{ + const int* ptr = (const int*)sum.data; + size_t step = sum.step/sizeof(ptr[0]); + if (tilted) + { + CV_TILTED_PTRS( p[0][0], p[0][1], p[0][2], p[0][3], ptr, rect[0].r, step ); + CV_TILTED_PTRS( p[1][0], p[1][1], p[1][2], p[1][3], ptr, rect[1].r, step ); + if (rect[2].weight) + CV_TILTED_PTRS( p[2][0], p[2][1], p[2][2], p[2][3], ptr, rect[2].r, step ); + } + else + { + CV_SUM_PTRS( p[0][0], p[0][1], p[0][2], p[0][3], ptr, rect[0].r, step ); + CV_SUM_PTRS( p[1][0], p[1][1], p[1][2], p[1][3], ptr, rect[1].r, step ); + if (rect[2].weight) + CV_SUM_PTRS( p[2][0], p[2][1], p[2][2], p[2][3], ptr, rect[2].r, step ); + } +} + + +//---------------------------------------------- LBPEvaluator ------------------------------------- + +class LBPEvaluator : public FeatureEvaluator +{ +public: + struct Feature + { + Feature(); + Feature( int x, int y, int _block_w, int _block_h ) : + rect(x, y, _block_w, _block_h) {} + + int calc( int offset ) const; + void updatePtrs( const Mat& sum ); + bool read(const FileNode& node ); + + Rect rect; // weight and height for block + const int* p[16]; // fast + }; + + LBPEvaluator(); + virtual ~LBPEvaluator(); + + virtual bool read( const FileNode& node ); + virtual Ptr clone() const; + virtual int getFeatureType() const { return FeatureEvaluator::LBP; } + + virtual bool setImage(const Mat& image, Size _origWinSize); + virtual bool setWindow(Point pt); + + int operator()(int featureIdx) const + { return featuresPtr[featureIdx].calc(offset); } + virtual int calcCat(int featureIdx) const + { return (*this)(featureIdx); } +protected: + Size origWinSize; + Ptr > features; + Feature* featuresPtr; // optimization + Mat sum0, sum; + Rect normrect; + + int offset; +}; + + +inline LBPEvaluator::Feature :: Feature() +{ + rect = Rect(); + for( int i = 0; i < 16; i++ ) + p[i] = 0; +} + +inline int LBPEvaluator::Feature :: calc( int offset ) const +{ + int cval = CALC_SUM_( p[5], p[6], p[9], p[10], offset ); + + return (CALC_SUM_( p[0], p[1], p[4], p[5], offset ) >= cval ? 128 : 0) | // 0 + (CALC_SUM_( p[1], p[2], p[5], p[6], offset ) >= cval ? 64 : 0) | // 1 + (CALC_SUM_( p[2], p[3], p[6], p[7], offset ) >= cval ? 32 : 0) | // 2 + (CALC_SUM_( p[6], p[7], p[10], p[11], offset ) >= cval ? 16 : 0) | // 5 + (CALC_SUM_( p[10], p[11], p[14], p[15], offset ) >= cval ? 8 : 0)| // 8 + (CALC_SUM_( p[9], p[10], p[13], p[14], offset ) >= cval ? 4 : 0)| // 7 + (CALC_SUM_( p[8], p[9], p[12], p[13], offset ) >= cval ? 2 : 0)| // 6 + (CALC_SUM_( p[4], p[5], p[8], p[9], offset ) >= cval ? 1 : 0); +} + +inline void LBPEvaluator::Feature :: updatePtrs( const Mat& sum ) +{ + const int* ptr = (const int*)sum.data; + size_t step = sum.step/sizeof(ptr[0]); + Rect tr = rect; + CV_SUM_PTRS( p[0], p[1], p[4], p[5], ptr, tr, step ); + tr.x += 2*rect.width; + CV_SUM_PTRS( p[2], p[3], p[6], p[7], ptr, tr, step ); + tr.y += 2*rect.height; + CV_SUM_PTRS( p[10], p[11], p[14], p[15], ptr, tr, step ); + tr.x -= 2*rect.width; + CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step ); +} + + + + +//---------------------------------------------- predictor functions ------------------------------------- + +template +inline int predictOrdered( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) +{ + int nstages = (int)cascade.data.stages.size(); + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + for( int si = 0; si < nstages; si++ ) + { + CascadeClassifier::Data::Stage& stage = cascadeStages[si]; + int wi, ntrees = stage.ntrees; + sum = 0; + + for( wi = 0; wi < ntrees; wi++ ) + { + CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; + int idx = 0, root = nodeOfs; + + do + { + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; + double val = featureEvaluator(node.featureIdx); + idx = val < node.threshold ? node.left : node.right; + } + while( idx > 0 ); + sum += cascadeLeaves[leafOfs - idx]; + nodeOfs += weak.nodeCount; + leafOfs += weak.nodeCount + 1; + } + if( sum < stage.threshold ) + return -si; + } + return 1; +} + +template +inline int predictCategorical( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) +{ + int nstages = (int)cascade.data.stages.size(); + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + size_t subsetSize = (cascade.data.ncategories + 31)/32; + int* cascadeSubsets = &cascade.data.subsets[0]; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + for(int si = 0; si < nstages; si++ ) + { + CascadeClassifier::Data::Stage& stage = cascadeStages[si]; + int wi, ntrees = stage.ntrees; + sum = 0; + + for( wi = 0; wi < ntrees; wi++ ) + { + CascadeClassifier::Data::DTree& weak = cascadeWeaks[stage.first + wi]; + int idx = 0, root = nodeOfs; + do + { + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[root + idx]; + int c = featureEvaluator(node.featureIdx); + const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; + idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; + } + while( idx > 0 ); + sum += cascadeLeaves[leafOfs - idx]; + nodeOfs += weak.nodeCount; + leafOfs += weak.nodeCount + 1; + } + if( sum < stage.threshold ) + return -si; + } + return 1; +} + +template +inline int predictOrderedStump( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) +{ + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + + int nstages = (int)cascade.data.stages.size(); + for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) + { + CascadeClassifier::Data::Stage& stage = cascadeStages[stageIdx]; + sum = 0.0; + + int ntrees = stage.ntrees; + for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 ) + { + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; + double value = featureEvaluator(node.featureIdx); + sum += cascadeLeaves[ value < node.threshold ? leafOfs : leafOfs + 1 ]; + } + + if( sum < stage.threshold ) + return -stageIdx; + } + + return 1; +} + +template +inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr &_featureEvaluator, double& sum ) +{ + int nstages = (int)cascade.data.stages.size(); + int nodeOfs = 0, leafOfs = 0; + FEval& featureEvaluator = (FEval&)*_featureEvaluator; + size_t subsetSize = (cascade.data.ncategories + 31)/32; + int* cascadeSubsets = &cascade.data.subsets[0]; + float* cascadeLeaves = &cascade.data.leaves[0]; + CascadeClassifier::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; + CascadeClassifier::Data::Stage* cascadeStages = &cascade.data.stages[0]; + +#ifdef HAVE_TEGRA_OPTIMIZATION + float tmp; // float accumulator -- float operations are quicker +#endif + for( int si = 0; si < nstages; si++ ) + { + CascadeClassifier::Data::Stage& stage = cascadeStages[si]; + int wi, ntrees = stage.ntrees; +#ifdef HAVE_TEGRA_OPTIMIZATION + tmp = 0; +#else + sum = 0; +#endif + + for( wi = 0; wi < ntrees; wi++ ) + { + CascadeClassifier::Data::DTreeNode& node = cascadeNodes[nodeOfs]; + int c = featureEvaluator(node.featureIdx); + const int* subset = &cascadeSubsets[nodeOfs*subsetSize]; +#ifdef HAVE_TEGRA_OPTIMIZATION + tmp += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; +#else + sum += cascadeLeaves[ subset[c>>5] & (1 << (c & 31)) ? leafOfs : leafOfs+1]; +#endif + nodeOfs++; + leafOfs += 2; + } +#ifdef HAVE_TEGRA_OPTIMIZATION + if( tmp < stage.threshold ) { + sum = (double)tmp; + return -si; + } +#else + if( sum < stage.threshold ) + return -si; +#endif + } + +#ifdef HAVE_TEGRA_OPTIMIZATION + sum = (double)tmp; +#endif + + return 1; +} +} +