Committed the first version of DetectionBasedTracker.
authorLeonid Beynenson <no@email>
Wed, 16 Nov 2011 17:29:10 +0000 (17:29 +0000)
committerLeonid Beynenson <no@email>
Wed, 16 Nov 2011 17:29:10 +0000 (17:29 +0000)
modules/contrib/include/opencv2/contrib/detection_based_tracker.hpp [new file with mode: 0644]
modules/contrib/src/detection_based_tracker.cpp [new file with mode: 0644]
samples/cpp/detection_based_tracker_sample.cpp [new file with mode: 0644]

diff --git a/modules/contrib/include/opencv2/contrib/detection_based_tracker.hpp b/modules/contrib/include/opencv2/contrib/detection_based_tracker.hpp
new file mode 100644 (file)
index 0000000..4d7be59
--- /dev/null
@@ -0,0 +1,128 @@
+#pragma once
+
+#if defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
+
+#include <opencv2/core/core.hpp>
+#include <opencv2/objdetect/objdetect.hpp>
+
+#include <vector>
+
+class DetectionBasedTracker
+{
+    public:
+        struct Parameters
+        {
+            int minObjectSize;
+            int maxObjectSize;
+            double scaleFactor;
+            int maxTrackLifetime;
+            int minNeighbors;
+            int minDetectionPeriod; //the minimal time between run of the big object detector (on the whole frame) in ms (1000 mean 1 sec), default=0
+
+            Parameters();
+        };
+
+        DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params);
+        virtual ~DetectionBasedTracker();
+
+        virtual bool run();
+        virtual void stop();
+        virtual void resetTracking();
+
+        virtual void process(const cv::Mat& imageGray);
+
+        bool setParameters(const Parameters& params);
+        const Parameters& getParameters();
+
+        
+        typedef std::pair<cv::Rect, int> Object;
+        virtual void getObjects(std::vector<cv::Rect>& result) const;
+        virtual void getObjects(std::vector<Object>& result) const;
+
+    protected:
+
+        struct InnerParameters
+        {
+            int numLastPositionsToTrack;
+            int numStepsToWaitBeforeFirstShow;
+            int numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown;
+            int numStepsToShowWithoutDetecting;
+
+            float coeffTrackingWindowSize;
+            float coeffObjectSizeToTrack;
+            float coeffObjectSpeedUsingInPrediction;
+
+            InnerParameters();
+        };
+        struct TrackedObject
+        {
+            typedef std::vector<cv::Rect> PositionsVector;
+
+            PositionsVector lastPositions;
+
+            int numDetectedFrames;
+            int numFramesNotDetected;
+            int id;
+
+            TrackedObject(const cv::Rect& rect):numDetectedFrames(1), numFramesNotDetected(0)
+            {
+                lastPositions.push_back(rect);
+                id=getNextId();
+            };
+
+            static int getNextId()
+            {
+                static int _id=0;
+                return _id++;
+            }
+        };
+
+        Parameters parameters;
+        InnerParameters innerParameters;
+
+        int numTrackedSteps;
+        std::vector<TrackedObject> trackedObjects;
+
+        std::vector<float> weightsPositionsSmoothing;
+        std::vector<float> weightsSizesSmoothing;
+
+        cv::CascadeClassifier cascadeInThread;
+        cv::CascadeClassifier cascadeForTracking;
+
+        cv::Mat imageSeparateDetecting;
+
+        void workcycleObjectDetector();
+        friend void* workcycleObjectDetectorFunction(void* p);
+
+        pthread_t second_workthread;
+        pthread_mutex_t mutex;
+        pthread_cond_t objectDetectorRun;
+        pthread_cond_t objectDetectorThreadStartStop;
+
+        std::vector<cv::Rect> resultDetect;
+        volatile bool isObjectDetectingReady;
+        volatile bool shouldObjectDetectingResultsBeForgot;
+
+        enum StateSeparatedThread {
+            STATE_THREAD_STOPPED=0,
+            STATE_THREAD_WORKING,
+            STATE_THREAD_STOPPING
+        };
+        volatile StateSeparatedThread stateThread;
+
+        enum StateSeparatedObjectDetector{
+            OBJECT_DETECTOR_NOT_STARTED,
+            OBJECT_DETECTOR_WAITING_IMAGE,
+            OBJECT_DETECTOR_HANDLING_IMAGE
+        };
+        volatile StateSeparatedObjectDetector stateSeparatedObjectDetector;
+
+        void updateTrackedObjects(const std::vector<cv::Rect>& detectedObjects);
+
+        cv::Rect calcTrackedObjectPositionToShow(int i) const;
+
+        void detectInRegion(const cv::Mat& img, const cv::Rect& r, std::vector<cv::Rect>& detectedObjectsInRegions);
+};
+
+#endif
+
diff --git a/modules/contrib/src/detection_based_tracker.cpp b/modules/contrib/src/detection_based_tracker.cpp
new file mode 100644 (file)
index 0000000..146975a
--- /dev/null
@@ -0,0 +1,748 @@
+#if defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
+#include "opencv2/contrib/detection_based_tracker.hpp"
+
+#define DEBUGLOGS 1
+
+#if ANDROID
+#include <android/log.h>
+#define LOG_TAG "OBJECT_DETECTOR"
+#define LOGD0(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
+#define LOGI0(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
+#define LOGW0(...) ((void)__android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__))
+#define LOGE0(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
+#else
+
+#include <stdio.h>
+
+#define LOGD0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#define LOGI0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#define LOGW0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#define LOGE0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#endif
+
+#if DEBUGLOGS
+#define LOGD(_str, ...) LOGD0(_str , ## __VA_ARGS__)
+#define LOGI(_str, ...) LOGI0(_str , ## __VA_ARGS__)
+#define LOGW(_str, ...) LOGW0(_str , ## __VA_ARGS__)
+#define LOGE(_str, ...) LOGE0(_str , ## __VA_ARGS__)
+#else 
+#define LOGD(...) do{} while(0)
+#define LOGI(...) do{} while(0)
+#define LOGW(...) do{} while(0)
+#define LOGE(...) do{} while(0)
+#endif
+
+
+using namespace cv;
+using namespace std;
+
+static inline cv::Point2f centerRect(const cv::Rect& r)
+{
+    return cv::Point2f(r.x+((float)r.width)/2, r.y+((float)r.height)/2);
+};
+static inline cv::Rect scale_rect(const cv::Rect& r, float scale)
+{
+    cv::Point2f m=centerRect(r);
+    float width  = r.width  * scale;
+    float height = r.height * scale;
+    int x=cvRound(m.x - width/2);
+    int y=cvRound(m.y - height/2);
+
+    return cv::Rect(x, y, cvRound(width), cvRound(height));
+};
+
+DetectionBasedTracker::Parameters::Parameters()
+{
+    minObjectSize=96;
+    maxObjectSize=INT_MAX;
+    scaleFactor=1.1;
+    maxTrackLifetime=5;
+    minNeighbors=2;
+    minDetectionPeriod=0;
+}
+
+
+
+DetectionBasedTracker::InnerParameters::InnerParameters()
+{
+    numLastPositionsToTrack=4;
+    numStepsToWaitBeforeFirstShow=6;
+    numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown=3;
+    numStepsToShowWithoutDetecting=3;
+
+    coeffTrackingWindowSize=2.0;
+    coeffObjectSizeToTrack=0.85;
+    coeffObjectSpeedUsingInPrediction=0.8;
+
+}
+DetectionBasedTracker::DetectionBasedTracker(const std::string& cascadeFilename, const Parameters& params)
+    :innerParameters(),
+    numTrackedSteps(0),
+    cascadeInThread(cascadeFilename),
+    cascadeForTracking(cascadeFilename),
+    isObjectDetectingReady(false),
+    shouldObjectDetectingResultsBeForgot(false),
+    stateThread(STATE_THREAD_STOPPED),
+    stateSeparatedObjectDetector(OBJECT_DETECTOR_NOT_STARTED)
+{
+    CV_Assert( (params.minObjectSize > 0)
+            && (params.maxObjectSize >= 0)
+            && (params.scaleFactor > 1.0)
+            && (params.maxTrackLifetime >= 0) );
+
+    parameters=params;
+
+    weightsPositionsSmoothing.push_back(1);
+    weightsSizesSmoothing.push_back(0.5);
+    weightsSizesSmoothing.push_back(0.3);
+    weightsSizesSmoothing.push_back(0.2);
+
+    int res=0;
+    res=pthread_mutex_init(&mutex, NULL);//TODO: should be attributes?
+    if (res) {
+        LOGE("ERROR in DetectionBasedTracker::DetectionBasedTracker in pthread_mutex_init(&mutex, NULL) is %d", res);
+        throw(std::exception());
+    }
+    res=pthread_cond_init (&objectDetectorRun, NULL);
+    if (res) {
+        LOGE("ERROR in DetectionBasedTracker::DetectionBasedTracker in pthread_cond_init(&objectDetectorRun,, NULL) is %d", res);
+        pthread_mutex_destroy(&mutex);
+        throw(std::exception());
+    }
+    res=pthread_cond_init (&objectDetectorThreadStartStop, NULL);
+    if (res) {
+        LOGE("ERROR in DetectionBasedTracker::DetectionBasedTracker in pthread_cond_init(&objectDetectorThreadStartStop,, NULL) is %d", res);
+        pthread_cond_destroy(&objectDetectorRun);
+        pthread_mutex_destroy(&mutex);
+        throw(std::exception());
+    }
+}
+DetectionBasedTracker::~DetectionBasedTracker()
+{
+    if(stateThread!=STATE_THREAD_STOPPED) {
+        LOGE("\n\n\nATTENTION!!! dangerous algorithm error: destructor DetectionBasedTracker::~DetectionBasedTracker is called before stopping the workthread");
+    }
+
+    pthread_cond_destroy(&objectDetectorThreadStartStop);
+    pthread_cond_destroy(&objectDetectorRun);
+    pthread_mutex_destroy(&mutex);
+}
+
+
+
+void DetectionBasedTracker::process(const Mat& imageGray)
+{
+    static long long  timeWhenDetectingThreadStartedWork=-1;
+
+    CV_Assert(imageGray.type()==CV_8UC1);
+
+    if (stateThread == STATE_THREAD_STOPPED) {
+        run();
+    }
+
+    static double freq = getTickFrequency();
+    static long long time_when_last_call_started=getTickCount();
+
+    {
+        double delta_time_from_prev_call=1000.0 * (((double)(getTickCount()  - time_when_last_call_started)) / freq);
+        LOGD("DetectionBasedTracker::process: time from the previous call is %f ms", (double)delta_time_from_prev_call);
+        time_when_last_call_started=getTickCount();
+    }
+
+    Mat imageDetect=imageGray;
+
+    Size sz=imageDetect.size();
+    int D=parameters.minObjectSize;
+    if (D < 1) 
+        D=1;
+    Size objectSize=Size(D,D);
+
+
+    bool shouldHandleResult=false;
+
+    vector<Rect> rectsWhereRegions;
+
+
+    bool shouldCommunicateWithDetectingThread=(stateSeparatedObjectDetector==OBJECT_DETECTOR_WAITING_IMAGE);
+    LOGD("DetectionBasedTracker::process: shouldCommunicateWithDetectingThread=%d", (shouldCommunicateWithDetectingThread?1:0));
+
+    if (shouldCommunicateWithDetectingThread) {
+        pthread_mutex_lock(&mutex);
+
+        if (isObjectDetectingReady) {
+            shouldHandleResult=true;
+            rectsWhereRegions=resultDetect;
+            isObjectDetectingReady=false;
+
+            double lastBigDetectionDuration=1000.0 * (((double)(getTickCount()  - timeWhenDetectingThreadStartedWork )) / freq);
+            LOGD("DetectionBasedTracker::process: lastBigDetectionDuration=%f ms", (double)lastBigDetectionDuration);
+        }
+
+        bool shouldSendNewDataToWorkThread=true;
+        if (timeWhenDetectingThreadStartedWork > 0) {
+            double time_from_previous_launch_in_ms=1000.0 * (((double)(getTickCount()  - timeWhenDetectingThreadStartedWork )) / freq); //the same formula as for lastBigDetectionDuration
+            shouldSendNewDataToWorkThread = (time_from_previous_launch_in_ms >= parameters.minDetectionPeriod);
+            LOGD("DetectionBasedTracker::process: shouldSendNewDataToWorkThread was 1, now it is %d, since time_from_previous_launch_in_ms=%.2f, minDetectionPeriod=%d", 
+                    (shouldSendNewDataToWorkThread?1:0), time_from_previous_launch_in_ms, parameters.minDetectionPeriod);
+        }
+
+        if (shouldSendNewDataToWorkThread) {
+            
+            imageSeparateDetecting.create(imageGray.size(), CV_8UC1);
+            {
+                uchar* databefore=imageSeparateDetecting.data; //for debugging only
+
+                imageGray.copyTo(imageSeparateDetecting);//may change imageSeparateDetecting ptr. But should not.
+
+                uchar* dataafter=imageSeparateDetecting.data; //for debugging only
+                CV_Assert(databefore == dataafter); // for debugging only
+            }
+
+            timeWhenDetectingThreadStartedWork = getTickCount() ;
+
+            pthread_cond_signal(&objectDetectorRun);
+        }
+
+        pthread_mutex_unlock(&mutex);
+    }
+
+    if (shouldHandleResult) {
+        LOGD("DetectionBasedTracker::process: get _rectsWhereRegions were got from resultDetect");
+    } else {
+        LOGD("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
+        for(size_t i=0; i < trackedObjects.size(); i++) {
+            int n=trackedObjects[i].lastPositions.size();
+            CV_Assert(n > 0);
+
+            Rect r=trackedObjects[i].lastPositions[n-1];
+            if(r.area()==0) {
+                LOGE("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
+                continue;
+            }
+
+
+            //correction by speed of rectangle
+            if (n > 1) {
+                Point2f center=centerRect(r);
+                Point2f center_prev=centerRect(trackedObjects[i].lastPositions[n-2]);
+                Point2f shift=(center - center_prev) * innerParameters.coeffObjectSpeedUsingInPrediction;
+
+                r.x+=cvRound(shift.x);
+                r.y+=cvRound(shift.y);
+            }
+
+
+            rectsWhereRegions.push_back(r);
+        }
+    }
+    LOGI("DetectionBasedTracker::process: tracked objects num==%d", (int)trackedObjects.size());
+
+    vector<Rect> detectedObjectsInRegions;
+
+    LOGD("DetectionBasedTracker::process: rectsWhereRegions.size()=%d", (int)rectsWhereRegions.size());
+    for(size_t i=0; i < rectsWhereRegions.size(); i++) {
+        Rect r=rectsWhereRegions[i];
+
+        detectInRegion(imageDetect, r, detectedObjectsInRegions);
+    }
+    LOGD("DetectionBasedTracker::process: detectedObjectsInRegions.size()=%d", (int)detectedObjectsInRegions.size());
+
+    updateTrackedObjects(detectedObjectsInRegions);
+}
+
+void DetectionBasedTracker::getObjects(std::vector<cv::Rect>& result) const
+{
+    result.clear();
+
+    for(size_t i=0; i < trackedObjects.size(); i++) {
+        Rect r=calcTrackedObjectPositionToShow(i);
+        if (r.area()==0) {
+            continue;
+        }
+        result.push_back(r);
+        LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
+    }
+}
+void DetectionBasedTracker::getObjects(std::vector<Object>& result) const
+{
+    result.clear();
+
+    for(size_t i=0; i < trackedObjects.size(); i++) {
+        Rect r=calcTrackedObjectPositionToShow(i);
+        if (r.area()==0) {
+            continue;
+        }
+        result.push_back(Object(r, trackedObjects[i].id));
+        LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
+    }
+}
+
+#ifdef __GNUC__
+#define CATCH_ALL_AND_REPEATE(_block)                                                       \
+while(true) {                                                                               \
+    try {                                                                                   \
+        _block;                                                                             \
+        break;                                                                              \
+    }                                                                                       \
+    catch(cv::Exception& e) {                                                               \
+        LOGE("\n %s: ERROR: OpenCV Exception caught: \n'%s'\n\n", __func__, e.what());      \
+    } catch(std::exception& e) {                                                            \
+        LOGE("\n %s: ERROR: Exception caught: \n'%s'\n\n", __func__, e.what());             \
+    } catch(...) {                                                                          \
+        LOGE("\n %s: ERROR: UNKNOWN Exception caught\n\n", __func__);                       \
+    }                                                                                       \
+}
+#else
+#define CATCH_ALL_AND_REPEATE(_block)                                                       \
+while(true) {                                                                               \
+    try {                                                                                   \
+        _block;                                                                             \
+        break;                                                                              \
+    }                                                                                       \
+    catch(cv::Exception& e) {                                                               \
+        LOGE("\n ERROR: OpenCV Exception caught: \n'%s'\n\n", e.what());                    \
+    } catch(std::exception& e) {                                                            \
+        LOGE("\n ERROR: Exception caught: \n'%s'\n\n", e.what());                           \
+    } catch(...) {                                                                          \
+        LOGE("\n ERROR: UNKNOWN Exception caught\n\n");                                     \
+    }                                                                                       \
+}
+#endif
+
+void* workcycleObjectDetectorFunction(void* p)
+{
+    CATCH_ALL_AND_REPEATE({ ((DetectionBasedTracker*)p)->workcycleObjectDetector(); });
+    return NULL;
+}
+
+bool DetectionBasedTracker::run()
+{
+    LOGD("DetectionBasedTracker::run() --- start");
+    pthread_mutex_lock(&mutex);
+    if (stateThread != STATE_THREAD_STOPPED) {
+        LOGE("DetectionBasedTracker::run is called while the previous run is not stopped");
+        pthread_mutex_unlock(&mutex);
+        return false;
+    }
+    stateThread=STATE_THREAD_WORKING;
+    pthread_create(&second_workthread, NULL, workcycleObjectDetectorFunction, (void*)this); //TODO: add attributes?
+    pthread_cond_wait(&objectDetectorThreadStartStop, &mutex);
+    pthread_mutex_unlock(&mutex);
+    LOGD("DetectionBasedTracker::run() --- end");
+    return true;
+}
+
+void DetectionBasedTracker::workcycleObjectDetector()
+{
+    static double freq = getTickFrequency();
+    LOGD0("DetectionBasedTracker::workcycleObjectDetector() --- start");
+    vector<Rect> objects;
+
+    pthread_mutex_lock(&mutex); 
+    {
+        pthread_cond_signal(&objectDetectorThreadStartStop);
+
+        LOGD("DetectionBasedTracker::workcycleObjectDetector() --- before waiting");
+        stateSeparatedObjectDetector=OBJECT_DETECTOR_WAITING_IMAGE;
+        pthread_cond_wait(&objectDetectorRun, &mutex);
+        stateSeparatedObjectDetector=OBJECT_DETECTOR_HANDLING_IMAGE;
+        LOGD("DetectionBasedTracker::workcycleObjectDetector() --- after waiting");
+    }
+    pthread_mutex_unlock(&mutex);
+
+    bool isFirstStep=true;
+
+    isObjectDetectingReady=false;
+
+    while(stateThread == STATE_THREAD_WORKING)
+    {
+        LOGD("DetectionBasedTracker::workcycleObjectDetector() --- next step");
+
+        if (! isFirstStep) {
+            LOGD("DetectionBasedTracker::workcycleObjectDetector() --- before waiting");
+            pthread_mutex_lock(&mutex);
+            stateSeparatedObjectDetector=OBJECT_DETECTOR_WAITING_IMAGE;
+            pthread_cond_wait(&objectDetectorRun, &mutex);
+            stateSeparatedObjectDetector=OBJECT_DETECTOR_HANDLING_IMAGE;
+            pthread_mutex_unlock(&mutex);
+            LOGD("DetectionBasedTracker::workcycleObjectDetector() --- after waiting");
+        } else {
+            isFirstStep=false;
+        }
+
+        if (stateThread != STATE_THREAD_WORKING) {
+            LOGD("DetectionBasedTracker::workcycleObjectDetector() --- go out from the workcycle just after waiting");
+            break;
+        }
+
+
+        if (imageSeparateDetecting.empty()) {
+            LOGD("DetectionBasedTracker::workcycleObjectDetector() --- imageSeparateDetecting is empty, continue");
+            continue;
+        }
+        LOGD("DetectionBasedTracker::workcycleObjectDetector() --- start handling imageSeparateDetecting, img.size=%dx%d, img.data=0x%p", 
+                imageSeparateDetecting.size().width, imageSeparateDetecting.size().height, (void*)imageSeparateDetecting.data);
+
+
+        int64 t1_detect=getTickCount();
+
+        int minObjectSize=parameters.minObjectSize;
+        Size min_objectSize=Size(minObjectSize, minObjectSize);
+
+        int maxObjectSize=parameters.maxObjectSize;
+        Size max_objectSize(maxObjectSize, maxObjectSize);
+
+
+        cascadeInThread.detectMultiScale( imageSeparateDetecting, objects,
+                parameters.scaleFactor, parameters.minNeighbors, 0
+                |CV_HAAR_SCALE_IMAGE
+                ,
+                min_objectSize,
+                max_objectSize
+                );
+        LOGD("DetectionBasedTracker::workcycleObjectDetector() --- end handling imageSeparateDetecting");
+
+        if (stateThread != STATE_THREAD_WORKING) {
+            LOGD("DetectionBasedTracker::workcycleObjectDetector() --- go out from the workcycle just after detecting");
+            break;
+        }
+
+        int64 t2_detect=getTickCount();
+        int64 dt_detect=t2_detect-t1_detect;
+        double dt_detect_ms=((double)dt_detect)/freq * 1000.0;
+
+        LOGI("DetectionBasedTracker::workcycleObjectDetector() --- objects num==%d, t_ms=%.4f", (int)objects.size(), dt_detect_ms);
+
+        pthread_mutex_lock(&mutex);
+        if (!shouldObjectDetectingResultsBeForgot) {
+            resultDetect=objects;
+            isObjectDetectingReady=true;
+        } else { //shouldObjectDetectingResultsBeForgot==true
+            resultDetect.clear();
+            isObjectDetectingReady=false;
+            shouldObjectDetectingResultsBeForgot=false;
+        }
+        pthread_mutex_unlock(&mutex);
+
+        objects.clear();
+    }// while(stateThread == STATE_THREAD_WORKING)
+
+
+    pthread_mutex_lock(&mutex);
+
+    if (stateThread == STATE_THREAD_STOPPING) {
+        stateThread=STATE_THREAD_STOPPED;
+    }
+    stateSeparatedObjectDetector=OBJECT_DETECTOR_NOT_STARTED;
+    isObjectDetectingReady=false;
+    shouldObjectDetectingResultsBeForgot=false;
+
+    pthread_cond_signal(&objectDetectorThreadStartStop);
+
+    pthread_mutex_unlock(&mutex);
+
+    LOGI("DetectionBasedTracker::workcycleObjectDetector: Returning");
+}
+void DetectionBasedTracker::stop()
+{
+    //FIXME: TODO: should add quickStop functionality
+    pthread_mutex_lock(&mutex);
+    if (stateThread != STATE_THREAD_WORKING) {
+        pthread_mutex_unlock(&mutex);
+        LOGE("SimpleHighguiDemoCore::stop is called but the SimpleHighguiDemoCore pthread is not active");
+        return;
+    }
+    stateThread=STATE_THREAD_STOPPING;
+    LOGD("DetectionBasedTracker::stop: before going to sleep to wait for the signal from the workthread");
+    pthread_cond_signal(&objectDetectorRun);
+    pthread_cond_wait(&objectDetectorThreadStartStop, &mutex);
+    LOGD("DetectionBasedTracker::stop: after receiving the signal from the workthread, stateThread=%d", (int)stateThread);
+    pthread_mutex_unlock(&mutex);
+}
+
+void DetectionBasedTracker::resetTracking()
+{
+    LOGD("DetectionBasedTracker::resetTracking");
+    pthread_mutex_lock(&mutex);
+
+    if ((stateThread == STATE_THREAD_WORKING) && (stateSeparatedObjectDetector == OBJECT_DETECTOR_HANDLING_IMAGE)) {
+        LOGD("DetectionBasedTracker::resetTracking: since workthread is detecting objects at the moment, we should make cascadeInThread stop detecting and forget the detecting results");
+        shouldObjectDetectingResultsBeForgot=true;
+        //cascadeInThread.setStopFlag();//FIXME: TODO: this feature also should be contributed to OpenCV
+    } else {
+        LOGD("DetectionBasedTracker::resetTracking: since workthread is NOT detecting objects at the moment, we should NOT make any additional actions");
+    }
+
+    resultDetect.clear();
+    trackedObjects.clear();
+    isObjectDetectingReady=false;
+
+
+    pthread_mutex_unlock(&mutex);
+    
+}
+
+void DetectionBasedTracker::updateTrackedObjects(const vector<Rect>& detectedObjects)
+{
+    enum {
+        NEW_RECTANGLE=-1,
+        INTERSECTED_RECTANGLE=-2
+    };
+
+    int N1=trackedObjects.size();
+    int N2=detectedObjects.size();
+    LOGD("DetectionBasedTracker::updateTrackedObjects: N1=%d, N2=%d", N1, N2);
+
+    for(int i=0; i < N1; i++) {
+        trackedObjects[i].numDetectedFrames++;
+    }
+
+    vector<int> correspondence(detectedObjects.size(), NEW_RECTANGLE);
+    correspondence.clear();
+    correspondence.resize(detectedObjects.size(), NEW_RECTANGLE);
+
+    for(int i=0; i < N1; i++) {
+        LOGD("DetectionBasedTracker::updateTrackedObjects: i=%d", i);
+        TrackedObject& curObject=trackedObjects[i];
+
+        int bestIndex=-1;
+        int bestArea=-1;
+
+        int numpositions=curObject.lastPositions.size();
+        CV_Assert(numpositions > 0);
+        Rect prevRect=curObject.lastPositions[numpositions-1];
+        LOGD("DetectionBasedTracker::updateTrackedObjects: prevRect[%d]={%d, %d, %d x %d}", i, prevRect.x, prevRect.y, prevRect.width, prevRect.height);
+
+        for(int j=0; j < N2; j++) {
+            LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d", j);
+            if (correspondence[j] >= 0) {
+                LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d is rejected, because it has correspondence=%d", j, correspondence[j]);
+                continue;
+            }
+            if (correspondence[j] !=NEW_RECTANGLE) {
+                LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d is rejected, because it is intersected with another rectangle", j);
+                continue;
+            }
+            LOGD("DetectionBasedTracker::updateTrackedObjects: detectedObjects[%d]={%d, %d, %d x %d}", 
+                    j, detectedObjects[j].x, detectedObjects[j].y, detectedObjects[j].width, detectedObjects[j].height);
+
+            Rect r=prevRect & detectedObjects[j];
+            if ( (r.width > 0) && (r.height > 0) ) {
+                LOGD("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect, r={%d, %d, %d x %d}",
+                        r.x, r.y, r.width, r.height);
+                correspondence[j]=INTERSECTED_RECTANGLE;
+
+                if ( r.area() > bestArea) {
+                    LOGD("DetectionBasedTracker::updateTrackedObjects: The area of intersection is %d, it is better than bestArea=%d", r.area(), bestArea);
+                    bestIndex=j;
+                    bestArea=r.area();
+                }
+            }
+        }
+        if (bestIndex >= 0) {
+            LOGD("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=%d is j=%d", i, bestIndex);
+            correspondence[bestIndex]=i;
+
+            for(int j=0; j < N2; j++) {
+                if (correspondence[j] >= 0)
+                    continue;
+
+                Rect r=detectedObjects[j] & detectedObjects[bestIndex];
+                if ( (r.width > 0) && (r.height > 0) ) {
+                    LOGD("DetectionBasedTracker::updateTrackedObjects: Found intersection between "
+                            "rectangles j=%d and bestIndex=%d, rectangle j=%d is marked as intersected", j, bestIndex, j);
+                    correspondence[j]=INTERSECTED_RECTANGLE;
+                }
+            }
+        } else {
+            LOGD("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i=%d ", i);
+            curObject.numFramesNotDetected++;
+        }
+    }
+
+    LOGD("DetectionBasedTracker::updateTrackedObjects: start second cycle");
+    for(int j=0; j < N2; j++) {
+        LOGD("DetectionBasedTracker::updateTrackedObjects: j=%d", j);
+        int i=correspondence[j];
+        if (i >= 0) {//add position
+            LOGD("DetectionBasedTracker::updateTrackedObjects: add position");
+            trackedObjects[i].lastPositions.push_back(detectedObjects[j]);
+            while ((int)trackedObjects[i].lastPositions.size() > (int) innerParameters.numLastPositionsToTrack) {
+                trackedObjects[i].lastPositions.erase(trackedObjects[i].lastPositions.begin());
+            }
+            trackedObjects[i].numFramesNotDetected=0;
+        } else if (i==NEW_RECTANGLE){ //new object
+            LOGD("DetectionBasedTracker::updateTrackedObjects: new object");
+            trackedObjects.push_back(detectedObjects[j]);
+        } else {
+            LOGD("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
+        }
+    }
+
+    std::vector<TrackedObject>::iterator it=trackedObjects.begin();
+    while( it != trackedObjects.end() ) {
+        if ( (it->numFramesNotDetected > parameters.maxTrackLifetime) 
+                ||
+                ( 
+                 (it->numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow)
+                 &&
+                 (it->numFramesNotDetected > innerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown)
+                )
+           )
+        {
+            int numpos=it->lastPositions.size();
+            CV_Assert(numpos > 0);
+            Rect r = it->lastPositions[numpos-1];
+            LOGD("DetectionBasedTracker::updateTrackedObjects: deleted object {%d, %d, %d x %d}",
+                    r.x, r.y, r.width, r.height);
+            it=trackedObjects.erase(it);
+        } else {
+            it++;
+        }
+    }
+}
+Rect DetectionBasedTracker::calcTrackedObjectPositionToShow(int i) const
+{
+    if ( (i < 0) || (i >= (int)trackedObjects.size()) ) {
+        LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: wrong i=%d", i);
+        return Rect();
+    }
+    if (trackedObjects[i].numDetectedFrames <= innerParameters.numStepsToWaitBeforeFirstShow){
+        LOGI("DetectionBasedTracker::calcTrackedObjectPositionToShow: trackedObjects[%d].numDetectedFrames=%d <= numStepsToWaitBeforeFirstShow=%d --- return empty Rect()", 
+                i, trackedObjects[i].numDetectedFrames, innerParameters.numStepsToWaitBeforeFirstShow);
+        return Rect();
+    }
+    if (trackedObjects[i].numFramesNotDetected > innerParameters.numStepsToShowWithoutDetecting) {
+        return Rect();
+    }
+
+    const TrackedObject::PositionsVector& lastPositions=trackedObjects[i].lastPositions;
+
+    int N=lastPositions.size();
+    if (N<=0) {
+        LOGE("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=%d", i);
+        return Rect();
+    }
+
+    int Nsize=std::min(N, (int)weightsSizesSmoothing.size());
+    int Ncenter= std::min(N, (int)weightsPositionsSmoothing.size());
+
+    Point2f center;
+    double w=0, h=0;
+    if (Nsize > 0) {
+        double sum=0;
+        for(int j=0; j < Nsize; j++) {
+            int k=N-j-1;
+            w+= lastPositions[k].width  * weightsSizesSmoothing[j];
+            h+= lastPositions[k].height * weightsSizesSmoothing[j];
+            sum+=weightsSizesSmoothing[j];
+        }
+        w /= sum;
+        h /= sum;
+    } else {
+        w=lastPositions[N-1].width;
+        h=lastPositions[N-1].height;
+    }
+
+    if (Ncenter > 0) {
+        double sum=0;
+        for(int j=0; j < Ncenter; j++) {
+            int k=N-j-1;
+            Point tl(lastPositions[k].tl());
+            Point br(lastPositions[k].br());
+            Point2f c1;
+            c1=tl;
+                c1=c1* 0.5f;
+            Point2f c2;
+            c2=br;
+            c2=c2*0.5f;
+            c1=c1+c2;
+
+            center=center+  (c1  * weightsPositionsSmoothing[j]);
+            sum+=weightsPositionsSmoothing[j];
+        }
+        center *= (float)(1 / sum);
+    } else {
+        int k=N-1;
+        Point tl(lastPositions[k].tl());
+        Point br(lastPositions[k].br());
+        Point2f c1;
+        c1=tl;
+        c1=c1* 0.5f;
+        Point2f c2;
+        c2=br;
+        c2=c2*0.5f;
+
+        center=c1+c2;
+    }
+    Point2f tl=center-(Point2f(w,h)*0.5);
+    Rect res(cvRound(tl.x), cvRound(tl.y), cvRound(w), cvRound(h));
+    LOGD("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=%d: {%d, %d, %d x %d}", i, res.x, res.y, res.width, res.height);
+
+    return res;
+}
+
+void DetectionBasedTracker::detectInRegion(const Mat& img, const Rect& r, vector<Rect>& detectedObjectsInRegions)
+{
+    Rect r0(Point(), img.size());
+    Rect r1=scale_rect(r, innerParameters.coeffTrackingWindowSize);
+    r1=r1 & r0;
+    if ( (r1.width <=0) || (r1.height <= 0) ) {
+        LOGD("DetectionBasedTracker::detectInRegion: Empty intersection");
+        return;
+    }
+
+    int d=std::min(r.width, r.height);
+    d=cvRound(d * innerParameters.coeffObjectSizeToTrack);
+
+    vector<Rect> tmpobjects;
+
+    Mat img1(img, r1);//subimage for rectangle -- without data copying
+    LOGD("DetectionBasedTracker::detectInRegion: img1.size()=%d x %d, d=%d",
+            img1.size().width, img1.size().height, d);
+
+    int maxObjectSize=parameters.maxObjectSize;
+    Size max_objectSize(maxObjectSize, maxObjectSize);
+
+    cascadeForTracking.detectMultiScale( img1, tmpobjects,
+            parameters.scaleFactor, parameters.minNeighbors, 0
+            |CV_HAAR_FIND_BIGGEST_OBJECT
+            |CV_HAAR_SCALE_IMAGE
+            ,
+            Size(d,d),
+            max_objectSize
+            );
+
+    for(size_t i=0; i < tmpobjects.size(); i++) {
+        Rect curres(tmpobjects[i].tl() + r1.tl(), tmpobjects[i].size());
+        detectedObjectsInRegions.push_back(curres);
+    }
+}
+
+bool DetectionBasedTracker::setParameters(const Parameters& params)
+{
+    if ( (params.minObjectSize <= 0)
+            || (params.maxObjectSize < 0)
+            || (params.scaleFactor <= 1.0)
+            || (params.maxTrackLifetime < 0) )
+    {
+        LOGE("DetectionBasedTracker::setParameters: ERROR: wrong parameters value");
+        return false;
+    }
+
+    pthread_mutex_lock(&mutex);
+
+
+    parameters=params;
+
+    pthread_mutex_unlock(&mutex);
+    return true;
+}
+
+const DetectionBasedTracker::Parameters& DetectionBasedTracker::getParameters()
+{
+    return parameters;
+}
+
+#endif
+
diff --git a/samples/cpp/detection_based_tracker_sample.cpp b/samples/cpp/detection_based_tracker_sample.cpp
new file mode 100644 (file)
index 0000000..33b9b20
--- /dev/null
@@ -0,0 +1,170 @@
+#if defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
+
+#include <opencv2/core/core.hpp>
+#include <opencv2/core/internal.hpp>
+#include <opencv2/imgproc/imgproc.hpp>
+#include <opencv2/highgui/highgui.hpp>
+#include <opencv2/objdetect/objdetect.hpp>
+#include "opencv2/contrib/detection_based_tracker.hpp"
+
+#include <vector>
+#include <iostream>
+#include <stdio.h>
+
+#define DEBUGLOGS 1
+
+
+#if ANDROID
+#include <android/log.h>
+#define LOG_TAG "DETECTIONBASEDTRACKER__TEST_APPLICAT"
+#define LOGD0(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
+#define LOGI0(...) ((void)__android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__))
+#define LOGW0(...) ((void)__android_log_print(ANDROID_LOG_WARN, LOG_TAG, __VA_ARGS__))
+#define LOGE0(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
+#else
+
+#include <stdio.h>
+
+#define LOGD0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#define LOGI0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#define LOGW0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#define LOGE0(_str, ...) do{printf(_str , ## __VA_ARGS__); printf("\n");fflush(stdout);} while(0)
+#endif
+
+#if DEBUGLOGS
+#define LOGD(_str, ...) LOGD0(_str , ## __VA_ARGS__)
+#define LOGI(_str, ...) LOGI0(_str , ## __VA_ARGS__)
+#define LOGW(_str, ...) LOGW0(_str , ## __VA_ARGS__)
+#define LOGE(_str, ...) LOGE0(_str , ## __VA_ARGS__)
+#else 
+#define LOGD(...) do{} while(0)
+#define LOGI(...) do{} while(0)
+#define LOGW(...) do{} while(0)
+#define LOGE(...) do{} while(0)
+#endif
+
+
+
+using namespace cv;
+using namespace std;
+
+#define ORIGINAL 0
+#define SHOULD_USE_EXTERNAL_BUFFERS 1
+
+void usage()
+{
+    LOGE0("usage: filepattern outfilepattern cascadefile");
+    LOGE0("\t where ");
+    LOGE0("\t filepattern --- pattern for the paths to the source images");
+    LOGE0("\t       (e.g.\"./Videos/FACESJPG2/Faces2_%%08d.jpg\" ");
+    LOGE0("\t outfilepattern --- pattern for the paths for images which will be generated");
+    LOGE0("\t       (e.g.\"./resFaces2_%%08d.jpg\" ");
+    LOGE0("\t cascadefile --- path to the cascade file");
+    LOGE0("\t       (e.g.\"opencv/data/lbpcascades/lbpcascade_frontalface.xml\" ");
+}
+
+int test_FaceDetector(int argc, char *argv[])
+{
+    if (argc < 4) {
+        usage();
+        return -1;
+    }
+
+    const char* filepattern=argv[1];
+    const char* outfilepattern=argv[2];
+    const char* cascadefile=argv[3];
+    LOGD0("filepattern='%s'", filepattern);
+    LOGD0("outfilepattern='%s'", outfilepattern);
+    LOGD0("cascadefile='%s'", cascadefile);
+
+    vector<Mat> images;
+    {
+        char filename[256];
+        for(int n=1; ; n++) {
+            snprintf(filename, sizeof(filename), filepattern, n);
+            LOGD("filename='%s'", filename);
+            Mat m0;
+            m0=imread(filename);
+            if (m0.empty()) {
+                LOGI0("Cannot read the file --- break");
+                break;
+            }
+            images.push_back(m0);
+        }
+        LOGD("read %d images", (int)images.size());
+    }
+
+    DetectionBasedTracker::Parameters params;
+    std::string cascadeFrontalfilename=cascadefile;
+
+    DetectionBasedTracker fd(cascadeFrontalfilename, params);
+
+    fd.run();
+
+    Mat gray;
+       Mat m;
+
+    int64 tprev=getTickCount();
+    double freq=getTickFrequency();
+
+    int num_images=images.size();
+    for(int n=1; n <= num_images; n++) {
+        int64 tcur=getTickCount();
+        int64 dt=tcur-tprev;
+        tprev=tcur;
+        double t_ms=((double)dt)/freq * 1000.0;
+        LOGD("\n\nSTEP n=%d        from prev step %f ms\n\n", n, t_ms);
+        m=images[n-1];
+        CV_Assert(! m.empty());
+        cvtColor(m, gray, CV_BGR2GRAY);
+
+        fd.process(gray);
+
+        vector<Rect> result;
+        fd.getObjects(result);
+
+
+
+
+
+        for(size_t i=0; i < result.size(); i++) {
+            Rect r=result[i];
+            CV_Assert(r.area() > 0);
+            Point tl=r.tl();
+            Point br=r.br();
+            Scalar color=Scalar(0, 250, 0);
+            rectangle(m, tl, br, color, 3);
+        }
+    }
+    {
+        char outfilename[256];
+        for(int n=1; n <= num_images; n++) {
+            snprintf(outfilename, sizeof(outfilename), outfilepattern, n);
+            LOGD("outfilename='%s'", outfilename);
+            m=images[n-1];
+            imwrite(outfilename, m);
+        }
+    }
+
+    fd.stop();
+
+    return 0;
+}
+
+
+
+int main(int argc, char *argv[])
+{
+    return test_FaceDetector(argc, argv);
+}
+
+#else // #if defined(__linux__) || defined(LINUX) || defined(__APPLE__) || defined(ANDROID)
+
+#include <stdio.h>
+int main()
+{
+    printf("This sample works for UNIX or ANDROID only\n");
+    return 0;
+}
+
+#endif