revisions 8681 and 8688 restored. Warning fixed.
authorAlexander Smorkalov <no@email>
Thu, 21 Jun 2012 14:37:28 +0000 (14:37 +0000)
committerAlexander Smorkalov <no@email>
Thu, 21 Jun 2012 14:37:28 +0000 (14:37 +0000)
Warning: changes beak binary compatibility

modules/contrib/include/opencv2/contrib/detection_based_tracker.hpp
modules/contrib/src/detection_based_tracker.cpp
samples/android/face-detection/jni/DetectionBasedTracker_jni.cpp
samples/android/face-detection/src/org/opencv/samples/fd/DetectionBasedTracker.java
samples/android/face-detection/src/org/opencv/samples/fd/FdActivity.java
samples/cpp/CMakeLists.txt
samples/cpp/core_vision_tracking_image.cpp [new file with mode: 0644]
samples/cpp/dbt_face_detection.cpp [new file with mode: 0644]
samples/cpp/detection_based_tracker_sample.cpp

index bef80bf..239011d 100644 (file)
@@ -7,22 +7,73 @@
 
 #include <vector>
 
+namespace cv
+{
 class DetectionBasedTracker
 {
     public:
         struct Parameters
         {
-            int minObjectSize;
-            int maxObjectSize;
-            double scaleFactor;
             int maxTrackLifetime;
-            int minNeighbors;
             int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
 
             Parameters();
         };
 
-        DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params);
+        class IDetector
+        {
+            public:
+                IDetector():
+                    minObjSize(96, 96),
+                    maxObjSize(INT_MAX, INT_MAX),
+                    minNeighbours(2),
+                    scaleFactor(1.1f)
+                {}
+
+                virtual void detect(const cv::Mat& Image, std::vector<cv::Rect>& objects) = 0;
+
+                void setMinObjectSize(const cv::Size& min)
+                {
+                    minObjSize = min;
+                }
+                void setMaxObjectSize(const cv::Size& max)
+                {
+                    maxObjSize = max;
+                }
+                cv::Size getMinObjectSize() const
+                {
+                    return minObjSize;
+                }
+                cv::Size getMaxObjectSize() const
+                {
+                    return maxObjSize;
+                }
+                float getScaleFactor()
+                {
+                    return scaleFactor;
+                }
+                void setScaleFactor(float value)
+                {
+                    scaleFactor = value;
+                }
+                int getMinNeighbours()
+                {
+                    return minNeighbours;
+                }
+                void setMinNeighbours(int value)
+                {
+                    minNeighbours = value;
+                }
+                virtual ~IDetector() {}
+
+            protected:
+                cv::Size minObjSize;
+                cv::Size maxObjSize;
+                int minNeighbours;
+                float scaleFactor;
+        };
+
+        DetectionBasedTracker(cv::Ptr<IDetector> MainDetector, cv::Ptr<IDetector> TrackingDetector, const Parameters& params);
         virtual ~DetectionBasedTracker();
 
         virtual bool run();
@@ -44,7 +95,6 @@ class DetectionBasedTracker
         cv::Ptr<SeparateDetectionWork> separateDetectionWork;
         friend void* workcycleObjectDetectorFunction(void* p);
 
-
         struct InnerParameters
         {
             int numLastPositionsToTrack;
@@ -90,13 +140,11 @@ class DetectionBasedTracker
         std::vector<float> weightsPositionsSmoothing;
         std::vector<float> weightsSizesSmoothing;
 
-        cv::CascadeClassifier cascadeForTracking;
-
+        cv::Ptr<IDetector> cascadeForTracking;
 
         void updateTrackedObjects(const std::vector<cv::Rect>& detectedObjects);
         cv::Rect calcTrackedObjectPositionToShow(int i) const;
         void detectInRegion(const cv::Mat& img, const cv::Rect& r, std::vector<cv::Rect>& detectedObjectsInRegions);
 };
-
+} //end of cv namespace
 #endif
-
index d65e9d9..9edec6d 100644 (file)
@@ -40,6 +40,7 @@ static inline cv::Point2f centerRect(const cv::Rect& r)
 {
     return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
 };
+
 static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
 {
     cv::Point2f m=centerRect(r);
@@ -51,11 +52,15 @@ static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
     return cv::Rect(x, y, cvRound(width), cvRound(height));
 };
 
-void* workcycleObjectDetectorFunction(void* p);
-class DetectionBasedTracker::SeparateDetectionWork
+namespace cv
+{
+    void* workcycleObjectDetectorFunction(void* p);
+}
+
+class cv::DetectionBasedTracker::SeparateDetectionWork
 {
     public:
-        SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, const std::string& cascadeFilename);
+        SeparateDetectionWork(cv::DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector);
         virtual ~SeparateDetectionWork();
         bool communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions);
         bool run();
@@ -77,7 +82,7 @@ class DetectionBasedTracker::SeparateDetectionWork
     protected:
 
         DetectionBasedTracker& detectionBasedTracker;
-        cv::CascadeClassifier cascadeInThread;
+        cv::Ptr<DetectionBasedTracker::IDetector> cascadeInThread;
 
         pthread_t second_workthread;
         pthread_mutex_t mutex;
@@ -105,7 +110,7 @@ class DetectionBasedTracker::SeparateDetectionWork
         long long  timeWhenDetectingThreadStartedWork;
 };
 
-DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, const std::string& cascadeFilename)
+cv::DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBasedTracker& _detectionBasedTracker, cv::Ptr<DetectionBasedTracker::IDetector> _detector)
     :detectionBasedTracker(_detectionBasedTracker),
     cascadeInThread(),
     isObjectDetectingReady(false),
@@ -113,9 +118,10 @@ DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBas
     stateThread(STATE_THREAD_STOPPED),
     timeWhenDetectingThreadStartedWork(-1)
 {
-    if(!cascadeInThread.load(cascadeFilename)) {
-        CV_Error(CV_StsBadArg, "DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork: Cannot load a cascade from the file '"+cascadeFilename+"'");
-    }
+    CV_Assert(!_detector.empty());
+
+    cascadeInThread = _detector;
+
     int res=0;
     res=pthread_mutex_init(&mutex, NULL);//TODO: should be attributes?
     if (res) {
@@ -137,7 +143,7 @@ DetectionBasedTracker::SeparateDetectionWork::SeparateDetectionWork(DetectionBas
     }
 }
 
-DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
+cv::DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
 {
     if(stateThread!=STATE_THREAD_STOPPED) {
         LOGE("\n\n\nATTENTION!!! dangerous algorithm error: destructor DetectionBasedTracker::DetectionBasedTracker::~SeparateDetectionWork is called before stopping the workthread");
@@ -147,7 +153,7 @@ DetectionBasedTracker::SeparateDetectionWork::~SeparateDetectionWork()
     pthread_cond_destroy(&objectDetectorRun);
     pthread_mutex_destroy(&mutex);
 }
-bool DetectionBasedTracker::SeparateDetectionWork::run()
+bool cv::DetectionBasedTracker::SeparateDetectionWork::run()
 {
     LOGD("DetectionBasedTracker::SeparateDetectionWork::run() --- start");
     pthread_mutex_lock(&mutex);
@@ -196,18 +202,18 @@ do {
 } while(0)
 #endif
 
-void* workcycleObjectDetectorFunction(void* p)
+void* cv::workcycleObjectDetectorFunction(void* p)
 {
-    CATCH_ALL_AND_LOG({ ((DetectionBasedTracker::SeparateDetectionWork*)p)->workcycleObjectDetector(); });
+    CATCH_ALL_AND_LOG({ ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->workcycleObjectDetector(); });
     try{
-        ((DetectionBasedTracker::SeparateDetectionWork*)p)->stateThread=DetectionBasedTracker::SeparateDetectionWork::STATE_THREAD_STOPPED;
+        ((cv::DetectionBasedTracker::SeparateDetectionWork*)p)->stateThread = cv::DetectionBasedTracker::SeparateDetectionWork::STATE_THREAD_STOPPED;
     } catch(...) {
         LOGE0("DetectionBasedTracker: workcycleObjectDetectorFunction: ERROR concerning pointer, received as the function parameter");
     }
     return NULL;
 }
 
-void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
+void cv::DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
 {
     static double freq = getTickFrequency();
     LOGD0("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- start");
@@ -274,20 +280,17 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
 
         int64 t1_detect=getTickCount();
 
-        int minObjectSize=detectionBasedTracker.parameters.minObjectSize;
-        Size min_objectSize=Size(minObjectSize, minObjectSize);
-
-        int maxObjectSize=detectionBasedTracker.parameters.maxObjectSize;
-        Size max_objectSize(maxObjectSize, maxObjectSize);
+        cascadeInThread->detect(imageSeparateDetecting, objects);
 
-
-        cascadeInThread.detectMultiScale( imageSeparateDetecting, objects,
+        /*cascadeInThread.detectMultiScale( imageSeparateDetecting, objects,
                 detectionBasedTracker.parameters.scaleFactor, detectionBasedTracker.parameters.minNeighbors, 0
                 |CV_HAAR_SCALE_IMAGE
                 ,
                 min_objectSize,
                 max_objectSize
                 );
+        */
+
         LOGD("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector() --- end handling imageSeparateDetecting");
 
         if (!isWorking()) {
@@ -333,7 +336,7 @@ void DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector()
     LOGI("DetectionBasedTracker::SeparateDetectionWork::workcycleObjectDetector: Returning");
 }
 
-void DetectionBasedTracker::SeparateDetectionWork::stop()
+void cv::DetectionBasedTracker::SeparateDetectionWork::stop()
 {
     //FIXME: TODO: should add quickStop functionality
     pthread_mutex_lock(&mutex);
@@ -350,7 +353,7 @@ void DetectionBasedTracker::SeparateDetectionWork::stop()
     pthread_mutex_unlock(&mutex);
 }
 
-void DetectionBasedTracker::SeparateDetectionWork::resetTracking()
+void cv::DetectionBasedTracker::SeparateDetectionWork::resetTracking()
 {
     LOGD("DetectionBasedTracker::SeparateDetectionWork::resetTracking");
     pthread_mutex_lock(&mutex);
@@ -371,7 +374,7 @@ void DetectionBasedTracker::SeparateDetectionWork::resetTracking()
 
 }
 
-bool DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions)
+bool cv::DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThread(const Mat& imageGray, vector<Rect>& rectsWhereRegions)
 {
     static double freq = getTickFrequency();
 
@@ -420,19 +423,13 @@ bool DetectionBasedTracker::SeparateDetectionWork::communicateWithDetectingThrea
     return shouldHandleResult;
 }
 
-DetectionBasedTracker::Parameters::Parameters()
+cv::DetectionBasedTracker::Parameters::Parameters()
 {
-    minObjectSize=96;
-    maxObjectSize=INT_MAX;
-    scaleFactor=1.1;
     maxTrackLifetime=5;
-    minNeighbors=2;
     minDetectionPeriod=0;
 }
 
-
-
-DetectionBasedTracker::InnerParameters::InnerParameters()
+cv::DetectionBasedTracker::InnerParameters::InnerParameters()
 {
     numLastPositionsToTrack=4;
     numStepsToWaitBeforeFirstShow=6;
@@ -444,39 +441,32 @@ DetectionBasedTracker::InnerParameters::InnerParameters()
     coeffObjectSpeedUsingInPrediction=0.8;
 
 }
-DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params)
+
+cv::DetectionBasedTracker::DetectionBasedTracker(cv::Ptr<IDetector> MainDetector, cv::Ptr<IDetector> TrackingDetector, const Parameters& params)
     :separateDetectionWork(),
+    parameters(params),
     innerParameters(),
-    numTrackedSteps(0)
+    numTrackedSteps(0),
+    cascadeForTracking(TrackingDetector)
 {
-    CV_Assert( (params.minObjectSize > 0)
-            && (params.maxObjectSize >= 0)
-            && (params.scaleFactor > 1.0)
-            && (params.maxTrackLifetime >= 0) );
-
-    if (!cascadeForTracking.load(cascadeFilename)) {
-        CV_Error(CV_StsBadArg, "DetectionBasedTracker::DetectionBasedTracker: Cannot load a cascade from the file '"+cascadeFilename+"'");
-    }
+    CV_Assert( (params.maxTrackLifetime >= 0)
+            && (!MainDetector.empty())
+            && (!TrackingDetector.empty()) );
 
-    parameters=params;
-
-    separateDetectionWork=new SeparateDetectionWork(*this, cascadeFilename);
+    separateDetectionWork = new SeparateDetectionWork(*this, MainDetector);
 
     weightsPositionsSmoothing.push_back(1);
     weightsSizesSmoothing.push_back(0.5);
     weightsSizesSmoothing.push_back(0.3);
     weightsSizesSmoothing.push_back(0.2);
-
 }
-DetectionBasedTracker::~DetectionBasedTracker()
+
+cv::DetectionBasedTracker::~DetectionBasedTracker()
 {
 }
 
-
-
 void DetectionBasedTracker::process(const Mat& imageGray)
 {
-
     CV_Assert(imageGray.type()==CV_8UC1);
 
     if (!separateDetectionWork->isWorking()) {
@@ -494,15 +484,9 @@ void DetectionBasedTracker::process(const Mat& imageGray)
 
     Mat imageDetect=imageGray;
 
-    int D=parameters.minObjectSize;
-    if (D < 1)
-        D=1;
-
     vector<Rect> rectsWhereRegions;
     bool shouldHandleResult=separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
 
-
-
     if (shouldHandleResult) {
         LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
     } else {
@@ -517,7 +501,6 @@ void DetectionBasedTracker::process(const Mat& imageGray)
                 continue;
             }
 
-
             //correction by speed of rectangle
             if (n > 1) {
                 Point2f center=centerRect(r);
@@ -547,7 +530,7 @@ void DetectionBasedTracker::process(const Mat& imageGray)
     updateTrackedObjects(detectedObjectsInRegions);
 }
 
-void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
+void cv::DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
 {
     result.clear();
 
@@ -560,7 +543,8 @@ void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
         LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
     }
 }
-void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
+
+void cv::DetectionBasedTracker::getObjects(std::vector<Object>& result) const
 {
     result.clear();
 
@@ -574,25 +558,23 @@ void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
     }
 }
 
-
-
-bool DetectionBasedTracker::run()
+bool cv::DetectionBasedTracker::run()
 {
     return separateDetectionWork->run();
 }
 
-void DetectionBasedTracker::stop()
+void cv::DetectionBasedTracker::stop()
 {
     separateDetectionWork->stop();
 }
 
-void DetectionBasedTracker::resetTracking()
+void cv::DetectionBasedTracker::resetTracking()
 {
     separateDetectionWork->resetTracking();
     trackedObjects.clear();
 }
 
-void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObjects)
+void cv::DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObjects)
 {
     enum {
         NEW_RECTANGLE=-1,
@@ -711,7 +693,8 @@ void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObj
         }
     }
 }
-Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
+
+Rect cv::DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
 {
     if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
         LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=%d", i);
@@ -743,8 +726,8 @@ Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
         double sum=0;
         for(int j=0; j < Nsize; j++) {
             int k=N-j-1;
-            w+= lastPositions[k].width  * weightsSizesSmoothing[j];
-            h+= lastPositions[k].height * weightsSizesSmoothing[j];
+            w += lastPositions[k].width  * weightsSizesSmoothing[j];
+            h += lastPositions[k].height * weightsSizesSmoothing[j];
             sum+=weightsSizesSmoothing[j];
         }
         w /= sum;
@@ -762,7 +745,7 @@ Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
             Point br(lastPositions[k].br());
             Point2f c1;
             c1=tl;
-                c1=c1* 0.5f;
+            c1=c1* 0.5f;
             Point2f c2;
             c2=br;
             c2=c2*0.5f;
@@ -792,7 +775,7 @@ Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
     return res;
 }
 
-void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
+void cv::DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
 {
     Rect r0(Point(), img.size());
     Rect r1=scale_rect(r, innerParameters.coeffTrackingWindowSize);
@@ -802,8 +785,7 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
         return;
     }
 
-    int d=std::min(r.width, r.height);
-    d=cvRound(d * innerParameters.coeffObjectSizeToTrack);
+    int d = cvRound(std::min(r.width, r.height) * innerParameters.coeffObjectSizeToTrack);
 
     vector<Rect> tmpobjects;
 
@@ -811,17 +793,17 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
     LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d",
             img1.size().width, img1.size().height, d);
 
-    int maxObjectSize=parameters.maxObjectSize;
-    Size max_objectSize(maxObjectSize, maxObjectSize);
-
-    cascadeForTracking.detectMultiScale( img1, tmpobjects,
+    cascadeForTracking->setMinObjectSize(Size(d, d));
+    cascadeForTracking->detect(img1, tmpobjects);
+            /*
+            detectMultiScale( img1, tmpobjects,
             parameters.scaleFactor, parameters.minNeighbors, 0
             |CV_HAAR_FIND_BIGGEST_OBJECT
             |CV_HAAR_SCALE_IMAGE
             ,
             Size(d,d),
             max_objectSize
-            );
+            );*/
 
     for(size_t i=0; i < tmpobjects.size(); i++) {
         Rect curres(tmpobjects[i].tl() + r1.tl(), tmpobjects[i].size());
@@ -829,12 +811,9 @@ void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector
     }
 }
 
-bool DetectionBasedTracker::setParameters(const Parameters& params)
+bool cv::DetectionBasedTracker::setParameters(const Parameters& params)
 {
-    if ( (params.minObjectSize <= 0)
-            || (params.maxObjectSize < 0)
-            || (params.scaleFactor <= 1.0)
-            || (params.maxTrackLifetime < 0) )
+    if ( params.maxTrackLifetime < 0 )
     {
         LOGE("DetectionBasedTracker::setParameters: ERROR: wrong parameters value");
         return false;
@@ -846,7 +825,7 @@ bool DetectionBasedTracker::setParameters(const Parameters& params)
     return true;
 }
 
-const DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters()
+const cv::DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters()
 {
     return parameters;
 }
index 9f45a60..c78c5ac 100644 (file)
@@ -18,6 +18,29 @@ inline void vector_Rect_to_Mat(vector<Rect>& v_rect, Mat& mat)
     mat = Mat(v_rect, true);
 }
 
+class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
+{
+public:
+    CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
+    IDetector(),
+    Detector(detector)
+    {
+       CV_Assert(!detector.empty());
+    }
+    
+    void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
+    {
+       Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
+    }
+    
+    virtual ~CascadeDetectorAdapter()
+    {}
+    
+private:
+    CascadeDetectorAdapter();
+    cv::Ptr<cv::CascadeClassifier> Detector;
+};
+
 JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeCreateObject
 (JNIEnv * jenv, jclass, jstring jFileName, jint faceSize)
 {
@@ -27,25 +50,26 @@ JNIEXPORT jlong JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeC
 
     try
     {
-    DetectionBasedTracker::Parameters DetectorParams;
-    if (faceSize > 0)
-        DetectorParams.minObjectSize = faceSize;
-    result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams);
+       // TODO: Reimplement using adapter
+//     DetectionBasedTracker::Parameters DetectorParams;
+//     if (faceSize > 0)
+//         DetectorParams.minObjectSize = faceSize;
+//     result = (jlong)new DetectionBasedTracker(stdFileName, DetectorParams);
     }
     catch(cv::Exception e)
     {
-    LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
-    jclass je = jenv->FindClass("org/opencv/core/CvException");
-    if(!je)
-        je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, e.what());
+       LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
+       jclass je = jenv->FindClass("org/opencv/core/CvException");
+       if(!je)
+           je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, e.what());
     }
     catch (...)
     {
-    LOGD("nativeCreateObject catched unknown exception");
-    jclass je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
-    return 0;
+       LOGD("nativeCreateObject catched unknown exception");
+       jclass je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
+       return 0;
     }
 
     return result;
@@ -56,22 +80,22 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
 {
     try
     {
-    ((DetectionBasedTracker*)thiz)->stop();
-    delete (DetectionBasedTracker*)thiz;
+       ((DetectionBasedTracker*)thiz)->stop();
+       delete (DetectionBasedTracker*)thiz;
     }
     catch(cv::Exception e)
     {
-    LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
-    jclass je = jenv->FindClass("org/opencv/core/CvException");
-    if(!je)
-        je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, e.what());
+       LOGD("nativeestroyObject catched cv::Exception: %s", e.what());
+       jclass je = jenv->FindClass("org/opencv/core/CvException");
+       if(!je)
+           je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, e.what());
     }
     catch (...)
     {
-    LOGD("nativeDestroyObject catched unknown exception");
-    jclass je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
+       LOGD("nativeDestroyObject catched unknown exception");
+       jclass je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
     }
 }
 
@@ -80,21 +104,21 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSt
 {
     try
     {
-    ((DetectionBasedTracker*)thiz)->run();
+       ((DetectionBasedTracker*)thiz)->run();
     }
     catch(cv::Exception e)
     {
-    LOGD("nativeStart catched cv::Exception: %s", e.what());
-    jclass je = jenv->FindClass("org/opencv/core/CvException");
-    if(!je)
-        je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, e.what());
+       LOGD("nativeStart catched cv::Exception: %s", e.what());
+       jclass je = jenv->FindClass("org/opencv/core/CvException");
+       if(!je)
+           je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, e.what());
     }
     catch (...)
     {
-    LOGD("nativeStart catched unknown exception");
-    jclass je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
+       LOGD("nativeStart catched unknown exception");
+       jclass je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
     }
 }
 
@@ -103,21 +127,21 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSt
 {
     try
     {
-    ((DetectionBasedTracker*)thiz)->stop();
+       ((DetectionBasedTracker*)thiz)->stop();
     }
     catch(cv::Exception e)
     {
-    LOGD("nativeStop catched cv::Exception: %s", e.what());
-    jclass je = jenv->FindClass("org/opencv/core/CvException");
-    if(!je)
-        je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, e.what());
+       LOGD("nativeStop catched cv::Exception: %s", e.what());
+       jclass je = jenv->FindClass("org/opencv/core/CvException");
+       if(!je)
+           je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, e.what());
     }
     catch (...)
     {
-    LOGD("nativeStop catched unknown exception");
-    jclass je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
+       LOGD("nativeStop catched unknown exception");
+       jclass je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
     }
 }
 
@@ -126,28 +150,27 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeSe
 {
     try
     {
-    if (faceSize > 0)
-    {
-        DetectionBasedTracker::Parameters DetectorParams = \
-        ((DetectionBasedTracker*)thiz)->getParameters();
-        DetectorParams.minObjectSize = faceSize;
-        ((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
-    }
-
+       if (faceSize > 0)
+       {
+       // TODO: Reimplement using adapter
+//        DetectionBasedTracker::Parameters DetectorParams = ((DetectionBasedTracker*)thiz)->getParameters();
+//        DetectorParams.minObjectSize = faceSize;
+//        ((DetectionBasedTracker*)thiz)->setParameters(DetectorParams);
+       }
     }
     catch(cv::Exception e)
     {
-    LOGD("nativeStop catched cv::Exception: %s", e.what());
-    jclass je = jenv->FindClass("org/opencv/core/CvException");
-    if(!je)
-        je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, e.what());
+       LOGD("nativeStop catched cv::Exception: %s", e.what());
+       jclass je = jenv->FindClass("org/opencv/core/CvException");
+       if(!je)
+           je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, e.what());
     }
     catch (...)
     {
-    LOGD("nativeSetFaceSize catched unknown exception");
-    jclass je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
+       LOGD("nativeSetFaceSize catched unknown exception");
+       jclass je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
     }
 }
 
@@ -157,23 +180,23 @@ JNIEXPORT void JNICALL Java_org_opencv_samples_fd_DetectionBasedTracker_nativeDe
 {
     try
     {
-    vector<Rect> RectFaces;
-    ((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
-    ((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
-    vector_Rect_to_Mat(RectFaces, *((Mat*)faces));
+       vector<Rect> RectFaces;
+       ((DetectionBasedTracker*)thiz)->process(*((Mat*)imageGray));
+       ((DetectionBasedTracker*)thiz)->getObjects(RectFaces);
+       *((Mat*)faces) = Mat(RectFaces, true);
     }
     catch(cv::Exception e)
     {
-    LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
-    jclass je = jenv->FindClass("org/opencv/core/CvException");
-    if(!je)
-        je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, e.what());
+       LOGD("nativeCreateObject catched cv::Exception: %s", e.what());
+       jclass je = jenv->FindClass("org/opencv/core/CvException");
+       if(!je)
+           je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, e.what());
     }
     catch (...)
     {
-    LOGD("nativeDetect catched unknown exception");
-    jclass je = jenv->FindClass("java/lang/Exception");
-    jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
+       LOGD("nativeDetect catched unknown exception");
+       jclass je = jenv->FindClass("java/lang/Exception");
+       jenv->ThrowNew(je, "Unknown exception in JNI code {highgui::VideoCapture_n_1VideoCapture__()}");
     }
-}
\ No newline at end of file
+}
index 04350ca..0f76043 100644 (file)
@@ -7,7 +7,9 @@ public class DetectionBasedTracker
 {      
        public DetectionBasedTracker(String cascadeName, int minFaceSize)
        {
-               mNativeObj = nativeCreateObject(cascadeName, minFaceSize);
+               mMainDetector = nativeCreateDetector(cascadeName, minFaceSize);
+               mTrackingDetector = nativeCreateDetector(cascadeName, minFaceSize);
+               mNativeObj = nativeCreateTracker(mMainDetector, mTrackingDetector);
        }
        
        public void start()
@@ -22,7 +24,8 @@ public class DetectionBasedTracker
        
        public void setMinFaceSize(int size)
        {
-               nativeSetFaceSize(mNativeObj, size);
+               nativeSetFaceSize(mMainDetector, size);
+               nativeSetFaceSize(mTrackingDetector, size);
        }
        
        public void detect(Mat imageGray, MatOfRect faces)
@@ -32,17 +35,25 @@ public class DetectionBasedTracker
        
        public void release()
        {
-               nativeDestroyObject(mNativeObj);
+               nativeDestroyTracker(mNativeObj);
+               nativeDestroyDetector(mMainDetector);
+               nativeDestroyDetector(mTrackingDetector);
                mNativeObj = 0;
+               mMainDetector = 0;
+               mTrackingDetector = 0;
        }
        
        private long mNativeObj = 0;
+       private long mMainDetector = 0;
+       private long mTrackingDetector = 0;
        
-       private static native long nativeCreateObject(String cascadeName, int minFaceSize);
-       private static native void nativeDestroyObject(long thiz);
+       private static native long nativeCreateDetector(String cascadeName, int minFaceSize);
+       private static native long nativeCreateTracker(long mainDetector, long trackingDetector);
+       private static native void nativeDestroyTracker(long tracker);
+       private static native void nativeDestroyDetector(long detector);
        private static native void nativeStart(long thiz);
        private static native void nativeStop(long thiz);
-       private static native void nativeSetFaceSize(long thiz, int size);
+       private static native void nativeSetFaceSize(long detector, int size);
        private static native void nativeDetect(long thiz, long inputImage, long faces);
        
        static
index 5494000..118bf55 100644 (file)
@@ -91,7 +91,7 @@ public class FdActivity extends Activity {
                        ad.setMessage("Fatal error: can't open camera!");  
                        ad.setButton("OK", new DialogInterface.OnClickListener() {  
                            public void onClick(DialogInterface dialog, int which) {  
-                               dialog.dismiss();                      
+                               dialog.dismiss();
                                        finish();
                            }  
                        });  
index 3f4bb6a..a0e03fd 100644 (file)
@@ -3,7 +3,7 @@
 #
 # ----------------------------------------------------------------------------
 
-SET(OPENCV_CPP_SAMPLES_REQUIRED_DEPS opencv_core opencv_flann opencv_imgproc
+SET(OPENCV_CPP_SAMPLES_REQUIRED_DEPS opencv_core_vision_api opencv_core opencv_flann opencv_imgproc
     opencv_highgui opencv_ml opencv_video opencv_objdetect opencv_photo opencv_nonfree
     opencv_features2d opencv_calib3d opencv_legacy opencv_contrib opencv_stitching opencv_videostab)
 
diff --git a/samples/cpp/core_vision_tracking_image.cpp b/samples/cpp/core_vision_tracking_image.cpp
new file mode 100644 (file)
index 0000000..9de13ef
--- /dev/null
@@ -0,0 +1,55 @@
+#include <opencv2/core/core.hpp>        // Basic OpenCV structures (cv::Mat, Scalar)
+#include <opencv2/highgui/highgui.hpp>  // OpenCV window I/O
+#include <opencv2/core_vision_api/tracker.hpp>
+
+#include <stdio.h>
+#include <string>
+#include <vector>
+
+using namespace std;
+using namespace cv;
+
+const string WindowName = "Face Detection example";
+const Scalar RectColor = CV_RGB(0,255,0);
+
+int main()
+{
+    namedWindow(WindowName);
+    cv::moveWindow(WindowName, 100, 100);
+
+    Mat Viewport;
+    Mat ReferenceFrame = imread("board.jpg");
+    if (ReferenceFrame.empty())
+    {
+        printf("Error: Cannot load input image\n");
+        return 1;
+    }
+
+    cv::Ptr<nv::Tracker> tracker = nv::Algorithm::create<nv::Tracker>("nv::Tracker::OpticalFlow");
+
+    tracker->initialize();
+
+    // First frame for initialization
+    tracker->feed(ReferenceFrame);
+
+    nv::Tracker::TrackedObjectHandler obj = tracker->addObject(cv::Rect(100,100, 200, 200));
+
+    while(true)
+    {
+        tracker->feed(ReferenceFrame);
+
+        if (obj->getStatus() == nv::Tracker::LOST_STATUS)
+            break;
+
+        cv::Rect currentLocation = obj->getLocation();
+
+        ReferenceFrame.copyTo(Viewport);
+        rectangle(Viewport, currentLocation, RectColor);
+
+        imshow(WindowName, Viewport);
+
+        if (cvWaitKey(30) >= 0) break;
+    }
+
+    return 0;
+}
diff --git a/samples/cpp/dbt_face_detection.cpp b/samples/cpp/dbt_face_detection.cpp
new file mode 100644 (file)
index 0000000..7a19e30
--- /dev/null
@@ -0,0 +1,104 @@
+#if 0 //defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
+
+#include <opencv2/imgproc/imgproc.hpp>  // Gaussian Blur
+#include <opencv2/core/core.hpp>        // Basic OpenCV structures (cv::Mat, Scalar)
+#include <opencv2/highgui/highgui.hpp>  // OpenCV window I/O
+#include <opencv2/features2d/features2d.hpp>
+#include <opencv2/contrib/detection_based_tracker.hpp>
+
+#include <stdio.h>
+#include <string>
+#include <vector>
+
+using namespace std;
+using namespace cv;
+
+const string WindowName = "Face Detection example";
+
+class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
+{
+    public:
+        CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
+            IDetector(),
+            Detector(detector)
+        {
+            CV_Assert(!detector.empty());
+        }
+
+        void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
+        {
+            Detector->detectMultiScale(Image, objects, scaleFactor, minNeighbours, 0, minObjSize, maxObjSize);
+        }
+
+        virtual ~CascadeDetectorAdapter()
+        {}
+
+    private:
+        CascadeDetectorAdapter();
+        cv::Ptr<cv::CascadeClassifier> Detector;
+ };
+
+int main(int argc, char* argv[])
+{
+    namedWindow(WindowName);
+
+    VideoCapture VideoStream(0);
+
+    if (!VideoStream.isOpened())
+    {
+        printf("Error: Cannot open video stream from camera\n");
+        return 1;
+    }
+
+    std::string cascadeFrontalfilename = "../../data/lbpcascades/lbpcascade_frontalface.xml";
+    cv::Ptr<cv::CascadeClassifier> cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
+    cv::Ptr<DetectionBasedTracker::IDetector> MainDetector = new CascadeDetectorAdapter(cascade);
+
+    cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
+    cv::Ptr<DetectionBasedTracker::IDetector> TrackingDetector = new CascadeDetectorAdapter(cascade);
+
+    DetectionBasedTracker::Parameters params;
+    DetectionBasedTracker Detector(MainDetector, TrackingDetector, params);
+
+    if (!Detector.run())
+    {
+        printf("Error: Detector initialization failed\n");
+        return 2;
+    }
+
+    Mat ReferenceFrame;
+    Mat GrayFrame;
+    vector<Rect> Faces;
+
+    while(true)
+    {
+        VideoStream >> ReferenceFrame;
+        cvtColor(ReferenceFrame, GrayFrame, COLOR_RGB2GRAY);
+        Detector.process(GrayFrame);
+        Detector.getObjects(Faces);
+
+        for (size_t i = 0; i < Faces.size(); i++)
+        {
+            rectangle(ReferenceFrame, Faces[i], CV_RGB(0,255,0));
+        }
+
+        imshow(WindowName, ReferenceFrame);
+
+        if (cvWaitKey(30) >= 0) break;
+    }
+
+    Detector.stop();
+
+    return 0;
+}
+
+#else
+
+#include <stdio.h>
+int main()
+{
+    printf("This sample works for UNIX or ANDROID only\n");
+    return 0;
+}
+
+#endif
index 6aeb1b2..61106f3 100644 (file)
@@ -43,8 +43,6 @@
 #define LOGE(...) do{} while(0)
 #endif
 
-
-
 using namespace cv;
 using namespace std;
 
@@ -63,9 +61,31 @@ static void usage()
     LOGE0("\t       (e.g.\"opencv/data/lbpcascades/lbpcascade_frontalface.xml\" ");
 }
 
+class CascadeDetectorAdapter: public DetectionBasedTracker::IDetector
+{
+    public:
+        CascadeDetectorAdapter(cv::Ptr<cv::CascadeClassifier> detector):
+            Detector(detector)
+        {
+            CV_Assert(!detector.empty());
+        }
+
+        void detect(const cv::Mat &Image, std::vector<cv::Rect> &objects)
+        {
+            Detector->detectMultiScale(Image, objects, 1.1, 3, 0, minObjSize, maxObjSize);
+        }
+        virtual ~CascadeDetectorAdapter()
+        {}
+
+    private:
+        CascadeDetectorAdapter();
+        cv::Ptr<cv::CascadeClassifier> Detector;
+ };
+
 static int test_FaceDetector(int argc, char *argv[])
 {
-    if (argc < 4) {
+    if (argc < 4)
+    {
         usage();
         return -1;
     }
@@ -80,12 +100,14 @@ static int test_FaceDetector(int argc, char *argv[])
     vector<Mat> images;
     {
         char filename[256];
-        for(int n=1; ; n++) {
+        for(int n=1; ; n++)
+        {
             snprintf(filename, sizeof(filename), filepattern, n);
             LOGD("filename='%s'", filename);
             Mat m0;
             m0=imread(filename);
-            if (m0.empty()) {
+            if (m0.empty())
+            {
                 LOGI0("Cannot read the file --- break");
                 break;
             }
@@ -94,10 +116,15 @@ static int test_FaceDetector(int argc, char *argv[])
         LOGD("read %d images", (int)images.size());
     }
 
-    DetectionBasedTracker::Parameters params;
     std::string cascadeFrontalfilename=cascadefile;
+    cv::Ptr<cv::CascadeClassifier> cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
+    cv::Ptr<DetectionBasedTracker::IDetector> MainDetector = new CascadeDetectorAdapter(cascade);
+
+    cascade = new cv::CascadeClassifier(cascadeFrontalfilename);
+    cv::Ptr<DetectionBasedTracker::IDetector> TrackingDetector = new CascadeDetectorAdapter(cascade);
 
-    DetectionBasedTracker fd(cascadeFrontalfilename, params);
+    DetectionBasedTracker::Parameters params;
+    DetectionBasedTracker fd(MainDetector, TrackingDetector, params);
 
     fd.run();
 
@@ -108,12 +135,13 @@ static int test_FaceDetector(int argc, char *argv[])
     double freq=getTickFrequency();
 
     int num_images=images.size();
-    for(int n=1; n <= num_images; n++) {
+    for(int n=1; n <= num_images; n++)
+    {
         int64 tcur=getTickCount();
         int64 dt=tcur-tprev;
         tprev=tcur;
         double t_ms=((double)dt)/freq * 1000.0;
-        LOGD("\n\nSTEP n=%d        from prev step %f ms\n\n", n, t_ms);
+        LOGD("\n\nSTEP n=%d        from prev step %f ms\n", n, t_ms);
         m=images[n-1];
         CV_Assert(! m.empty());
         cvtColor(m, gray, CV_BGR2GRAY);
@@ -123,11 +151,8 @@ static int test_FaceDetector(int argc, char *argv[])
         vector<Rect> result;
         fd.getObjects(result);
 
-
-
-
-
-        for(size_t i=0; i < result.size(); i++) {
+        for(size_t i=0; i < result.size(); i++)
+        {
             Rect r=result[i];
             CV_Assert(r.area() > 0);
             Point tl=r.tl();
@@ -136,14 +161,14 @@ static int test_FaceDetector(int argc, char *argv[])
             rectangle(m, tl, br, color, 3);
         }
     }
+
+    char outfilename[256];
+    for(int n=1; n <= num_images; n++)
     {
-        char outfilename[256];
-        for(int n=1; n <= num_images; n++) {
-            snprintf(outfilename, sizeof(outfilename), outfilepattern, n);
-            LOGD("outfilename='%s'", outfilename);
-            m=images[n-1];
-            imwrite(outfilename, m);
-        }
+        snprintf(outfilename, sizeof(outfilename), outfilepattern, n);
+        LOGD("outfilename='%s'", outfilename);
+        m=images[n-1];
+        imwrite(outfilename, m);
     }
 
     fd.stop();
@@ -151,8 +176,6 @@ static int test_FaceDetector(int argc, char *argv[])
     return 0;
 }
 
-
-
 int main(int argc, char *argv[])
 {
     return test_FaceDetector(argc, argv);