From: Eric Zavesky Date: Mon, 15 Feb 2016 10:27:28 +0000 (-0600) Subject: - add ROI selection, allow device input, use BB for area exclusion X-Git-Tag: accepted/tizen/6.0/unified/20201030.111113~1970^2~3 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=4c5af4f3618c68f3c2bc2a940f8bf2256f50acb9;p=platform%2Fupstream%2Fopencv.git - add ROI selection, allow device input, use BB for area exclusion - previously BB was ignored for first frame, now masks analysis area - also allow real-time feedback of processing --- diff --git a/samples/cpp/tutorial_code/features2D/AKAZE_tracking/planar_tracking.cpp b/samples/cpp/tutorial_code/features2D/AKAZE_tracking/planar_tracking.cpp index a146f1e..85ca003 100755 --- a/samples/cpp/tutorial_code/features2D/AKAZE_tracking/planar_tracking.cpp +++ b/samples/cpp/tutorial_code/features2D/AKAZE_tracking/planar_tracking.cpp @@ -1,6 +1,8 @@ #include #include #include +#include //for ROI +#include //for imshow #include #include #include @@ -17,6 +19,7 @@ const double nn_match_ratio = 0.8f; // Nearest-neighbour matching ratio const int bb_min_inliers = 100; // Minimal number of inliers to draw bounding box const int stats_update_period = 10; // On-screen statistics are updated every 10 frames +namespace example { class Tracker { public: @@ -40,12 +43,22 @@ protected: void Tracker::setFirstFrame(const Mat frame, vector bb, string title, Stats& stats) { + cv::Point *ptMask = new cv::Point[bb.size()]; + const Point* ptContain = { &ptMask[0] }; + int iSize = static_cast(bb.size()); + for (size_t i=0; idetectAndCompute(first_frame, noArray(), first_kp, first_desc); + cv::Mat matMask = cv::Mat::zeros(frame.size(), CV_8UC1); + cv::fillPoly(matMask, &ptContain, &iSize, 1, cv::Scalar::all(255)); + detector->detectAndCompute(first_frame, matMask, first_kp, first_desc); stats.keypoints = (int)first_kp.size(); drawBoundingBox(first_frame, bb); putText(first_frame, title, Point(0, 60), FONT_HERSHEY_PLAIN, 5, Scalar::all(0), 4); object_bb = bb; + delete ptMask; } Mat Tracker::process(const Mat frame, Stats& stats) @@ -104,17 +117,35 @@ Mat Tracker::process(const Mat frame, Stats& stats) Scalar(255, 0, 0), Scalar(255, 0, 0)); return res; } +} int main(int argc, char **argv) { - if(argc < 4) { + if(argc < 3) { cerr << "Usage: " << endl << - "akaze_track input_path output_path bounding_box" << endl; + "akaze_track input_path output_path [bounding_box_path]" << endl + " (for camera input_path=N for camera N)" << endl; return 1; } - VideoCapture video_in(argv[1]); - VideoWriter video_out(argv[2], - (int)video_in.get(CAP_PROP_FOURCC), + + std::string video_name = argv[1]; + std::stringstream ssFormat; + ssFormat << atoi(argv[1]); + VideoCapture video_in; + int iFourCC = 0, frame_count = 0; + if (video_name.compare(ssFormat.str())==0) { //test str==str(num) + video_in.open(atoi(argv[1])); + cerr << "Capturing for 10 seconds from camera..." << endl; + iFourCC = CV_FOURCC('D', 'I', 'V', 'X'); //default to mp4 (sample) + frame_count = 10*static_cast(video_in.get(CAP_PROP_FPS)); + } + else { + video_in.open(video_name); + iFourCC = static_cast(video_in.get(CAP_PROP_FOURCC)); + frame_count = static_cast(video_in.get(CAP_PROP_FRAME_COUNT)); + } + + VideoWriter video_out(argv[2], iFourCC, (int)video_in.get(CAP_PROP_FPS), Size(2 * (int)video_in.get(CAP_PROP_FRAME_WIDTH), 2 * (int)video_in.get(CAP_PROP_FRAME_HEIGHT))); @@ -128,33 +159,43 @@ int main(int argc, char **argv) return 1; } - vector bb; - FileStorage fs(argv[3], FileStorage::READ); - if(fs["bounding_box"].empty()) { - cerr << "Couldn't read bounding_box from " << argv[3] << endl; - return 1; - } - fs["bounding_box"] >> bb; - Stats stats, akaze_stats, orb_stats; Ptr akaze = AKAZE::create(); akaze->setThreshold(akaze_thresh); Ptr orb = ORB::create(); Ptr matcher = DescriptorMatcher::create("BruteForce-Hamming"); - Tracker akaze_tracker(akaze, matcher); - Tracker orb_tracker(orb, matcher); + example::Tracker akaze_tracker(akaze, matcher); + example::Tracker orb_tracker(orb, matcher); Mat frame; video_in >> frame; + vector bb; + if (argc < 4) { //attempt to alow GUI selection + cv::Rect2d uBox = selectROI(video_name, frame); + bb.push_back(cv::Point2f(uBox.x, uBox.y)); + bb.push_back(cv::Point2f(uBox.x+uBox.width, uBox.y)); + bb.push_back(cv::Point2f(uBox.x+uBox.width, uBox.y+uBox.height)); + bb.push_back(cv::Point2f(uBox.x, uBox.y+uBox.height)); + } + else { + FileStorage fs(argv[3], FileStorage::READ); + if(fs["bounding_box"].empty()) { + cerr << "Couldn't read bounding_box from " << argv[3] << endl; + return 1; + } + fs["bounding_box"] >> bb; + } akaze_tracker.setFirstFrame(frame, bb, "AKAZE", stats); orb_tracker.setFirstFrame(frame, bb, "ORB", stats); Stats akaze_draw_stats, orb_draw_stats; - int frame_count = (int)video_in.get(CAP_PROP_FRAME_COUNT); Mat akaze_res, orb_res, res_frame; - for(int i = 1; i < frame_count; i++) { + int i = 1; + for(i = 1; i < frame_count; i++) { bool update_stats = (i % stats_update_period == 0); video_in >> frame; + // stop the program if no more images + if(frame.rows==0 || frame.cols==0) break; akaze_res = akaze_tracker.process(frame, stats); akaze_stats += stats; @@ -173,10 +214,14 @@ int main(int argc, char **argv) drawStatistics(orb_res, orb_draw_stats); vconcat(akaze_res, orb_res, res_frame); video_out << res_frame; + cv::imshow(video_name, res_frame); + if (i==1) //resize for easier display + cv::resizeWindow(video_name, frame.cols, frame.rows); + if(cv::waitKey(1)==27)break; //quit on ESC button cout << i << "/" << frame_count - 1 << endl; } - akaze_stats /= frame_count - 1; - orb_stats /= frame_count - 1; + akaze_stats /= i - 1; + orb_stats /= i - 1; printStatistics("AKAZE", akaze_stats); printStatistics("ORB", orb_stats); return 0;