stateThread(STATE_THREAD_STOPPED),
timeWhenDetectingThreadStartedWork(-1)
{
- CV_Assert(!_detector.empty());
+ CV_Assert(_detector);
cascadeInThread = _detector;
cascadeForTracking(trackingDetector)
{
CV_Assert( (params.maxTrackLifetime >= 0)
-// && (!mainDetector.empty())
- && (!trackingDetector.empty()) );
+// && mainDetector
+ && trackingDetector );
- if (!mainDetector.empty()) {
- separateDetectionWork = new SeparateDetectionWork(*this, mainDetector);
+ if (mainDetector) {
+ separateDetectionWork.reset(new SeparateDetectionWork(*this, mainDetector));
}
weightsPositionsSmoothing.push_back(1);
{
CV_Assert(imageGray.type()==CV_8UC1);
- if ( (!separateDetectionWork.empty()) && (!separateDetectionWork->isWorking()) ) {
+ if ( separateDetectionWork && !separateDetectionWork->isWorking() ) {
separateDetectionWork->run();
}
std::vector<Rect> rectsWhereRegions;
bool shouldHandleResult=false;
- if (!separateDetectionWork.empty()) {
+ if (separateDetectionWork) {
shouldHandleResult = separateDetectionWork->communicateWithDetectingThread(imageGray, rectsWhereRegions);
}
bool cv::DetectionBasedTracker::run()
{
- if (!separateDetectionWork.empty()) {
+ if (separateDetectionWork) {
return separateDetectionWork->run();
}
return false;
void cv::DetectionBasedTracker::stop()
{
- if (!separateDetectionWork.empty()) {
+ if (separateDetectionWork) {
separateDetectionWork->stop();
}
}
void cv::DetectionBasedTracker::resetTracking()
{
- if (!separateDetectionWork.empty()) {
+ if (separateDetectionWork) {
separateDetectionWork->resetTracking();
}
trackedObjects.clear();
return false;
}
- if (!separateDetectionWork.empty()) {
+ if (separateDetectionWork) {
separateDetectionWork->lock();
}
parameters=params;
- if (!separateDetectionWork.empty()) {
+ if (separateDetectionWork) {
separateDetectionWork->unlock();
}
return true;
Ptr<FaceRecognizer> createEigenFaceRecognizer(int num_components, double threshold)
{
- return new Eigenfaces(num_components, threshold);
+ return makePtr<Eigenfaces>(num_components, threshold);
}
Ptr<FaceRecognizer> createFisherFaceRecognizer(int num_components, double threshold)
{
- return new Fisherfaces(num_components, threshold);
+ return makePtr<Fisherfaces>(num_components, threshold);
}
Ptr<FaceRecognizer> createLBPHFaceRecognizer(int radius, int neighbors,
int grid_x, int grid_y, double threshold)
{
- return new LBPH(radius, neighbors, grid_x, grid_y, threshold);
+ return makePtr<LBPH>(radius, neighbors, grid_x, grid_y, threshold);
}
CV_INIT_ALGORITHM(Eigenfaces, "FaceRecognizer.Eigenfaces",
{
case CvFeatureTrackerParams::SIFT:
dd = Algorithm::create<Feature2D>("Feature2D.SIFT");
- if( dd.empty() )
+ if( !dd )
CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without SIFT support");
dd->set("nOctaveLayers", 5);
dd->set("contrastThreshold", 0.04);
break;
case CvFeatureTrackerParams::SURF:
dd = Algorithm::create<Feature2D>("Feature2D.SURF");
- if( dd.empty() )
+ if( !dd )
CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without SURF support");
dd->set("hessianThreshold", 400);
dd->set("nOctaves", 3);
break;
}
- matcher = new BFMatcher(NORM_L2);
+ matcher = makePtr<BFMatcher>(int(NORM_L2));
}
CvFeatureTracker::~CvFeatureTracker()