From: edgarriba Date: Sat, 9 Aug 2014 15:14:24 +0000 (+0200) Subject: fixed warnings X-Git-Tag: submit/tizen_ivi/20141117.190038~2^2~165^2~35 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=8c4b8cc0b43de69ad3642d676a53bbfa07ba19c4;p=profile%2Fivi%2Fopencv.git fixed warnings --- diff --git a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.cpp b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.cpp index 48c8be5..c469537 100644 --- a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.cpp +++ b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.cpp @@ -13,9 +13,15 @@ #include +/* Functions headers */ +cv::Point3f CROSS(cv::Point3f v1, cv::Point3f v2); +double DOT(cv::Point3f v1, cv::Point3f v2); +cv::Point3f SUB(cv::Point3f v1, cv::Point3f v2); +cv::Point3f get_nearest_3D_point(std::vector &points_list, cv::Point3f origin); + + +/* Functions for Möller–Trumbore intersection algorithm */ -/* Functions for Möller–Trumbore intersection algorithm - * */ cv::Point3f CROSS(cv::Point3f v1, cv::Point3f v2) { cv::Point3f tmp_p; diff --git a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.h b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.h index da189d0..61277a7 100644 --- a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.h +++ b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/PnPProblem.h @@ -50,4 +50,9 @@ private: cv::Mat _P_matrix; }; +// Functions for Möller–Trumbore intersection algorithm +cv::Point3f CROSS(cv::Point3f v1, cv::Point3f v2); +double DOT(cv::Point3f v1, cv::Point3f v2); +cv::Point3f SUB(cv::Point3f v1, cv::Point3f v2); + #endif /* PNPPROBLEM_H_ */ diff --git a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/RobustMatcher.cpp b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/RobustMatcher.cpp index 7ef9f88..862bdd1 100644 --- a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/RobustMatcher.cpp +++ b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/RobustMatcher.cpp @@ -113,9 +113,9 @@ void RobustMatcher::robustMatch( const cv::Mat& frame, std::vector& // 3. Remove matches for which NN ratio is > than threshold // clean image 1 -> image 2 matches - int removed1 = ratioTest(matches12); + ratioTest(matches12); // clean image 2 -> image 1 matches - int removed2 = ratioTest(matches21); + ratioTest(matches21); // 4. Remove non-symmetrical matches symmetryTest(matches12, matches21, good_matches); @@ -140,7 +140,7 @@ void RobustMatcher::fastRobustMatch( const cv::Mat& frame, std::vectorknnMatch(descriptors_frame, descriptors_model, matches, 2); // 3. Remove matches for which NN ratio is > than threshold - int removed = ratioTest(matches); + ratioTest(matches); // 4. Fill good matches container for ( std::vector >::iterator diff --git a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.cpp b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.cpp index 915c4e1..74649fc 100644 --- a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.cpp +++ b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/Utils.cpp @@ -114,7 +114,7 @@ void drawArrow(cv::Mat image, cv::Point2i p, cv::Point2i q, cv::Scalar color, in { //Draw the principle line cv::line(image, p, q, color, thickness, line_type, shift); - const double PI = 3.141592653; + const double PI = CV_PI; //compute the angle alpha double angle = atan2((double)p.y-q.y, (double)p.x-q.x); //compute the coordinates of the first segment @@ -137,9 +137,6 @@ void draw3DCoordinateAxes(cv::Mat image, const std::vector &list_po cv::Scalar blue(255,0,0); cv::Scalar black(0,0,0); - const double PI = 3.141592653; - int length = 50; - cv::Point2i origin = list_points2d[0]; cv::Point2i pointX = list_points2d[1]; cv::Point2i pointY = list_points2d[2]; @@ -196,13 +193,11 @@ cv::Mat rot2euler(const cv::Mat & rotationMatrix) cv::Mat euler(3,1,CV_64F); double m00 = rotationMatrix.at(0,0); - double m01 = rotationMatrix.at(0,1); double m02 = rotationMatrix.at(0,2); double m10 = rotationMatrix.at(1,0); double m11 = rotationMatrix.at(1,1); double m12 = rotationMatrix.at(1,2); double m20 = rotationMatrix.at(2,0); - double m21 = rotationMatrix.at(2,1); double m22 = rotationMatrix.at(2,2); double x, y, z; diff --git a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_detection.cpp b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_detection.cpp index e5cf965..82d97c4 100644 --- a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_detection.cpp +++ b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_detection.cpp @@ -1,12 +1,14 @@ +// C++ #include #include - +// OpenCV #include +#include #include #include #include #include - +// PnP Tutorial #include "Mesh.h" #include "Model.h" #include "PnPProblem.h" @@ -14,35 +16,15 @@ #include "ModelRegistration.h" #include "Utils.h" -std::string tutorial_path = "../../samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/"; +/** GLOBAL VARIABLES **/ -// COOKIES VIDEO -std::string video_read_path = tutorial_path + "Data/box.mp4"; // mesh +std::string tutorial_path = "../../samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/"; // path to tutorial -// COOKIES BOX - ORB +std::string video_read_path = tutorial_path + "Data/box.mp4"; // recorded video std::string yml_read_path = tutorial_path + "Data/cookies_ORB.yml"; // 3dpts + descriptors +std::string ply_read_path = tutorial_path + "Data/box.ply"; // mesh -// COOKIES BOX MESH -std::string ply_read_path = tutorial_path + "Data/box.ply"; // mesh - -void help() -{ -std::cout -<< "--------------------------------------------------------------------------" << std::endl -<< "This program shows how to detect an object given its 3D textured model. You can choose to " -<< "use a recorded video or the webcam." << std::endl -<< "Usage:" << std::endl -<< "./pnp_detection ~/path_to_video/box.mp4" << std::endl -<< "./pnp_detection " << std::endl -<< "--------------------------------------------------------------------------" << std::endl -<< std::endl; -} - - -/* - * Set up the intrinsic camera parameters: UVC WEBCAM - */ - +// Intrinsic camera parameters: UVC WEBCAM double f = 55; // focal length in mm double sx = 22.3, sy = 14.9; // sensor size double width = 640, height = 480; // image size @@ -52,10 +34,7 @@ double params_WEBCAM[] = { width*f/sx, // fx width/2, // cx height/2}; // cy - -/* - * Set up some basic colors - */ +// Some basic colors cv::Scalar red(0, 0, 255); cv::Scalar green(0,255,0); cv::Scalar blue(255,0,0); @@ -63,59 +42,82 @@ cv::Scalar yellow(0,255,255); // Robust Matcher parameters - int numKeyPoints = 2000; // number of detected keypoints float ratio = 0.70f; // ratio test bool fast_match = true; // fastRobustMatch() or robustMatch() - // RANSAC parameters - int iterationsCount = 500; // number of Ransac iterations. float reprojectionError = 2.0; // maximum allowed distance to consider it an inlier. float confidence = 0.95; // ransac successful confidence. - // Kalman Filter parameters - int minInliersKalman = 30; // Kalman threshold updating +// PnP parameters +int pnpMethod = cv::ITERATIVE; -/**********************************************************************************************************/ +/** Functions headers **/ +void help(); void initKalmanFilter( cv::KalmanFilter &KF, int nStates, int nMeasurements, int nInputs, double dt); - - -/**********************************************************************************************************/ - - void updateKalmanFilter( cv::KalmanFilter &KF, cv::Mat &measurements, cv::Mat &translation_estimated, cv::Mat &rotation_estimated ); - -/**********************************************************************************************************/ - - void fillMeasurements( cv::Mat &measurements, const cv::Mat &translation_measured, const cv::Mat &rotation_measured); -/**********************************************************************************************************/ - - +/** Main program **/ int main(int argc, char *argv[]) { help(); + const cv::String keys = + "{help h | | print this message }" + "{camera c | | use real time camera }" + "{video v | | path to recorded video }" + "{model | | path to yml model }" + "{mesh | | path to ply mesh }" + "{keypoints k |2000 | number of keypoints to detect }" + "{ratio r |0.7 | threshold for ratio test }" + "{iterations it |500 | RANSAC maximum iterations count }" + "{error e |2.0 | RANSAC reprojection errror }" + "{confidence c |0.95 | RANSAC confidence }" + "{inliers in |30 | minimum inliers for Kalman update }" + "{method pnp |0 | PnP method: (0) ITERATIVE - (1) EPNP - (2) P3P - (3) DLS}" + "{fast f |true | use of robust fast match }" + ; + cv::CommandLineParser parser(argc, argv, keys); + + if (parser.has("help")) + { + parser.printMessage(); + return 0; + } + else + { + video_read_path = parser.has("video") ? parser.get(0) : video_read_path; + yml_read_path = parser.has("model") ? parser.get(1) : yml_read_path; + ply_read_path = parser.has("mesh") ? parser.get(2) : ply_read_path; + numKeyPoints = !parser.has("keypoints") ? parser.get("keypoints") : numKeyPoints; + ratio = !parser.has("ratio") ? parser.get("ratio") : ratio; + fast_match = !parser.has("fast") ? parser.get("fast") : fast_match; + iterationsCount = !parser.has("iterations") ? parser.get("iterations") : iterationsCount; + reprojectionError = !parser.has("error") ? parser.get("error") : reprojectionError; + confidence = !parser.has("confidence") ? parser.get("confidence") : confidence; + minInliersKalman = !parser.has("inliers") ? parser.get("inliers") : minInliersKalman; + pnpMethod = !parser.has("method") ? parser.get("method") : pnpMethod; + } + PnPProblem pnp_detection(params_WEBCAM); PnPProblem pnp_detection_est(params_WEBCAM); Model model; // instantiate Model object model.load(yml_read_path); // load a 3D textured object model - Mesh mesh; // instantiate Mesh object - mesh.load(ply_read_path); // load an object mesh - + Mesh mesh; // instantiate Mesh object + mesh.load(ply_read_path); // load an object mesh RobustMatcher rmatcher; // instantiate RobustMatcher @@ -125,33 +127,25 @@ int main(int argc, char *argv[]) rmatcher.setFeatureDetector(detector); // set feature detector rmatcher.setDescriptorExtractor(extractor); // set descriptor extractor - cv::Ptr indexParams = cv::makePtr(6, 12, 1); // instantiate LSH index parameters cv::Ptr searchParams = cv::makePtr(50); // instantiate flann search parameters cv::DescriptorMatcher * matcher = new cv::FlannBasedMatcher(indexParams, searchParams); // instantiate FlannBased matcher rmatcher.setDescriptorMatcher(matcher); // set matcher - - rmatcher.setRatio(ratio); // set ratio test parameter - cv::KalmanFilter KF; // instantiate Kalman Filter - int nStates = 18; // the number of states int nMeasurements = 6; // the number of measured states - int nInputs = 0; // the number of action control - + int nInputs = 0; // the number of control actions double dt = 0.125; // time between measurements (1/FPS) initKalmanFilter(KF, nStates, nMeasurements, nInputs, dt); // init function - cv::Mat measurements(nMeasurements, 1, CV_64F); measurements.setTo(cv::Scalar(0)); bool good_measurement = false; // Get the MODEL INFO - std::vector list_points3d_model = model.get_points3d(); // list with model 3D coordinates cv::Mat descriptors_model = model.get_descriptors(); // list with descriptors of each 3D coordinate @@ -161,8 +155,7 @@ int main(int argc, char *argv[]) cv::VideoCapture cap; // instantiate VideoCapture - (argc < 2) ? cap.open(video_read_path) : cap.open(argv[1]); // open the default camera device - // or a recorder video + cap.open(video_read_path); // open a recorded video if(!cap.isOpened()) // check if we succeeded { @@ -170,25 +163,23 @@ int main(int argc, char *argv[]) return -1; } + // Input parameters + if(argc > 2) pnpMethod = 0; + // start and end times time_t start, end; // fps calculated using number of frames / seconds - double fps; + // floating point seconds elapsed since start + double fps, sec; // frame counter int counter = 0; - // floating point seconds elapsed since start - double sec; - // start the clock time(&start); - double tstart2, tstop2, ttime2; // algorithm metrics - double tstart, tstop, ttime; // algorithm metrics - cv::Mat frame, frame_vis; while(cap.read(frame) && cv::waitKey(30) != 27) // capture frame until ESC is pressed @@ -236,10 +227,9 @@ int main(int argc, char *argv[]) if(good_matches.size() > 0) // None matches, then RANSAC crashes { - // -- Step 3: Estimate the pose using RANSAC approach pnp_detection.estimatePoseRANSAC( list_points3d_model_match, list_points2d_scene_match, - cv::ITERATIVE, inliers_idx, + pnpMethod, inliers_idx, iterationsCount, reprojectionError, confidence ); // -- Step 4: Catch the inliers keypoints to draw @@ -322,8 +312,8 @@ int main(int argc, char *argv[]) fps = counter / sec; drawFPS(frame_vis, fps, yellow); // frame ratio - double ratio = ((double)inliers_idx.rows/(double)good_matches.size())*100; - drawConfidence(frame_vis, ratio, yellow); + double detection_ratio = ((double)inliers_idx.rows/(double)good_matches.size())*100; + drawConfidence(frame_vis, detection_ratio, yellow); // -- Step X: Draw some debugging text @@ -341,9 +331,6 @@ int main(int argc, char *argv[]) drawText2(frame_vis, text2, red); cv::imshow("REAL TIME DEMO", frame_vis); - - //cv::waitKey(0); - } // Close and Destroy Window @@ -354,6 +341,21 @@ int main(int argc, char *argv[]) } /**********************************************************************************************************/ +void help() +{ +std::cout +<< "--------------------------------------------------------------------------" << std::endl +<< "This program shows how to detect an object given its 3D textured model. You can choose to " +<< "use a recorded video or the webcam." << std::endl +<< "Usage:" << std::endl +<< "./cpp-tutorial-pnp_detection []" << std::endl +<< "Keys:" << std::endl +<< "(0) ITERATIVE - (1) EPNP - (2) P3P - (3) DLS" << std::endl +<< "--------------------------------------------------------------------------" << std::endl +<< std::endl; +} + +/**********************************************************************************************************/ void initKalmanFilter(cv::KalmanFilter &KF, int nStates, int nMeasurements, int nInputs, double dt) { @@ -424,9 +426,6 @@ void initKalmanFilter(cv::KalmanFilter &KF, int nStates, int nMeasurements, int KF.measurementMatrix.at(4,10) = 1; // pitch KF.measurementMatrix.at(5,11) = 1; // yaw - //std::cout << "A " << std::endl << KF.transitionMatrix << std::endl; - //std::cout << "C " << std::endl << KF.measurementMatrix << std::endl; - } /**********************************************************************************************************/ diff --git a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_registration.cpp b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_registration.cpp index c3f6ab6..3b2517b 100644 --- a/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_registration.cpp +++ b/samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/src/main_registration.cpp @@ -17,16 +17,14 @@ * Set up the images paths */ - std::string tutorial_path = "../../samples/cpp/tutorial_code/calib3d/real_time_pose_estimation/"; - // COOKIES BOX [718x480] - std::string img_path = tutorial_path + "Data/resized_IMG_3875.JPG"; // f 55 + std::string img_path = "../Data/resized_IMG_3875.JPG"; // f 55 // COOKIES BOX MESH - std::string ply_read_path = tutorial_path + "Data/box.ply"; + std::string ply_read_path = "../Data/box.ply"; // YAML writting path - std::string write_path = tutorial_path + "Data/cookies_ORB.yml"; + std::string write_path = "../Data/cookies_ORB.yml"; void help() {