1 /*********************************************************************
2 * Software License Agreement (BSD License)
4 * Copyright (c) 2008-2010, Willow Garage, Inc.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above
14 * copyright notice, this list of conditions and the following
15 * disclaimer in the documentation and/or other materials provided
16 * with the distribution.
17 * * Neither the name of the Willow Garage nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
27 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
30 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
31 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32 * POSSIBILITY OF SUCH DAMAGE.
33 *********************************************************************/
36 // The original code was written by
38 // and later modified and prepared
39 // for integration into OpenCV by
40 // Antonella Cascitelli,
41 // Marco Di Stefano and
46 #include "precomp.hpp"
47 #include "opencv2/opencv_modules.hpp"
48 #ifdef HAVE_OPENCV_HIGHGUI
49 # include "opencv2/highgui/highgui.hpp"
59 typedef std::pair<int,int> coordinate_t;
60 typedef float orientation_t;
61 typedef std::vector<coordinate_t> template_coords_t;
62 typedef std::vector<orientation_t> template_orientations_t;
63 typedef std::pair<Point, float> location_scale_t;
71 float min_match_distance_;
73 ///////////////////////// Image iterators ////////////////////////////
78 virtual ~ImageIterator() {}
79 virtual bool hasNext() const = 0;
80 virtual location_scale_t next() = 0;
86 virtual ImageIterator* iterator() const = 0;
87 virtual ~ImageRange() {}
92 class SlidingWindowImageRange : public ImageRange
103 SlidingWindowImageRange(int width, int height, int x_step = 3, int y_step = 3, int _scales = 5, float min_scale = 0.6, float max_scale = 1.6) :
104 width_(width), height_(height), x_step_(x_step),y_step_(y_step), scales_(_scales), min_scale_(min_scale), max_scale_(max_scale)
109 ImageIterator* iterator() const;
112 class LocationImageRange : public ImageRange
114 const std::vector<Point>& locations_;
120 LocationImageRange(const LocationImageRange&);
121 LocationImageRange& operator=(const LocationImageRange&);
124 LocationImageRange(const std::vector<Point>& locations, int _scales = 5, float min_scale = 0.6, float max_scale = 1.6) :
125 locations_(locations), scales_(_scales), min_scale_(min_scale), max_scale_(max_scale)
129 ImageIterator* iterator() const
131 return new LocationImageIterator(locations_, scales_, min_scale_, max_scale_);
136 class LocationScaleImageRange : public ImageRange
138 const std::vector<Point>& locations_;
139 const std::vector<float>& scales_;
141 LocationScaleImageRange(const LocationScaleImageRange&);
142 LocationScaleImageRange& operator=(const LocationScaleImageRange&);
144 LocationScaleImageRange(const std::vector<Point>& locations, const std::vector<float>& _scales) :
145 locations_(locations), scales_(_scales)
147 assert(locations.size()==_scales.size());
150 ImageIterator* iterator() const
152 return new LocationScaleImageIterator(locations_, scales_);
161 * Class that represents a template for chamfer matching.
165 friend class ChamferMatcher::Matching;
166 friend class ChamferMatcher;
170 std::vector<Template*> scaled_templates;
171 std::vector<int> addr;
174 template_coords_t coords;
176 template_orientations_t orientations;
181 Template() : addr_width(-1)
185 Template(Mat& edge_image, float scale_ = 1);
189 for (size_t i=0;i<scaled_templates.size();++i) {
190 delete scaled_templates[i];
192 scaled_templates.clear();
194 orientations.clear();
204 * @param scale Scale to be resized to
206 Template* rescale(float scale);
208 std::vector<int>& getTemplateAddresses(int width);
214 * Used to represent a matching result.
225 typedef std::vector<Match> Matches;
229 * Implements the chamfer matching algorithm on images taking into account both distance from
230 * the template pixels to the nearest pixels and orientation alignment between template and image
236 bool use_orientation_;
238 std::vector<Template*> templates;
240 Matching(bool use_orientation = true, float _truncate = 10) : truncate_(_truncate), use_orientation_(use_orientation)
246 for (size_t i = 0; i<templates.size(); i++) {
252 * Add a template to the detector from an edge image.
253 * @param templ An edge image
255 void addTemplateFromImage(Mat& templ, float scale = 1.0);
258 * Run matching using an edge image.
259 * @param edge_img Edge image
260 * @return a match object
262 ChamferMatcher::Matches* matchEdgeImage(Mat& edge_img, const ImageRange& range, float orientation_weight = 0.5, int max_matches = 20, float min_match_distance = 10.0);
264 void addTemplate(Template& template_);
268 float orientation_diff(float o1, float o2)
274 * Computes the chamfer matching cost for one position in the target image.
275 * @param offset Offset where to compute cost
276 * @param dist_img Distance transform image.
277 * @param orientation_img Orientation image.
278 * @param tpl Template
279 * @param templ_orientations Orientations of the target points.
280 * @return matching result
282 ChamferMatcher::Match* localChamferDistance(Point offset, Mat& dist_img, Mat& orientation_img, Template* tpl, float orientation_weight);
286 * Matches all templates.
287 * @param dist_img Distance transform image.
288 * @param orientation_img Orientation image.
290 ChamferMatcher::Matches* matchTemplates(Mat& dist_img, Mat& orientation_img, const ImageRange& range, float orientation_weight);
292 void computeDistanceTransform(Mat& edges_img, Mat& dist_img, Mat& annotate_img, float truncate_dt, float a, float b);
293 void computeEdgeOrientations(Mat& edge_img, Mat& orientation_img);
294 void fillNonContourOrientations(Mat& annotated_img, Mat& orientation_img);
299 * Finds a contour in an edge image. The original image is altered by removing the found contour.
300 * @param templ_img Edge image
301 * @param coords Coordinates forming the contour.
302 * @return True while a contour is still found in the image.
304 static bool findContour(Mat& templ_img, template_coords_t& coords);
307 * Computes contour points orientations using the approach from:
309 * Matas, Shao and Kittler - Estimation of Curvature and Tangent Direction by
310 * Median Filtered Differencing
312 * @param coords Contour points
313 * @param orientations Contour points orientations
315 static void findContourOrientations(const template_coords_t& coords, template_orientations_t& orientations);
319 * Computes the angle of a line segment.
321 * @param a One end of the line segment
322 * @param b The other end.
325 * @return Angle in radians.
327 static float getAngle(coordinate_t a, coordinate_t b, int& dx, int& dy);
330 * Finds a point in the image from which to start contour following.
336 static bool findFirstContourPoint(Mat& templ_img, coordinate_t& p);
338 * Method that extracts a single continuous contour from an image given a starting point.
339 * When it extracts the contour it tries to maintain the same direction (at a T-join for example).
345 static void followContour(Mat& templ_img, template_coords_t& coords, int direction);
353 class LocationImageIterator : public ImageIterator
355 const std::vector<Point>& locations_;
369 LocationImageIterator(const LocationImageIterator&);
370 LocationImageIterator& operator=(const LocationImageIterator&);
373 LocationImageIterator(const std::vector<Point>& locations, int _scales, float min_scale, float max_scale);
375 bool hasNext() const {
379 location_scale_t next();
382 class LocationScaleImageIterator : public ImageIterator
384 const std::vector<Point>& locations_;
385 const std::vector<float>& scales_;
391 LocationScaleImageIterator(const LocationScaleImageIterator&);
392 LocationScaleImageIterator& operator=(const LocationScaleImageIterator&);
395 LocationScaleImageIterator(const std::vector<Point>& locations, const std::vector<float>& _scales) :
396 locations_(locations), scales_(_scales)
398 assert(locations.size()==_scales.size());
405 has_next_ = (locations_.size()==0 ? false : true);
408 bool hasNext() const {
412 location_scale_t next();
415 class SlidingWindowImageIterator : public ImageIterator
436 SlidingWindowImageIterator(int width, int height, int x_step, int y_step, int scales, float min_scale, float max_scale);
438 bool hasNext() const {
442 location_scale_t next();
455 float orientation_weight;
460 ChamferMatcher(int _max_matches = 20, float _min_match_distance = 1.0, int _pad_x = 3,
461 int _pad_y = 3, int _scales = 5, float _minScale = 0.6, float _maxScale = 1.6,
462 float _orientation_weight = 0.5, float _truncate = 20)
464 max_matches_ = _max_matches;
465 min_match_distance_ = _min_match_distance;
469 minScale = _minScale;
470 maxScale = _maxScale;
471 orientation_weight = _orientation_weight;
472 truncate = _truncate;
475 matches.resize(max_matches_);
476 chamfer_ = new Matching(true);
484 void showMatch(Mat& img, int index = 0);
485 void showMatch(Mat& img, Match match_);
487 const Matches& matching(Template&, Mat&);
490 ChamferMatcher(const ChamferMatcher&);
491 ChamferMatcher& operator=(const ChamferMatcher&);
492 void addMatch(float cost, Point offset, const Template* tpl);
498 ///////////////////// implementation ///////////////////////////
500 ChamferMatcher::SlidingWindowImageIterator::SlidingWindowImageIterator( int width,
505 float min_scale = 0.6,
506 float max_scale = 1.6) :
513 min_scale_(min_scale),
514 max_scale_(max_scale)
521 scale_step_ = (max_scale_-min_scale_)/scales_;
524 location_scale_t ChamferMatcher::SlidingWindowImageIterator::next()
526 location_scale_t next_val = std::make_pair(Point(x_,y_),scale_);
536 scale_ += scale_step_;
539 if (scale_cnt_ == scales_) {
552 ChamferMatcher::ImageIterator* ChamferMatcher::SlidingWindowImageRange::iterator() const
554 return new SlidingWindowImageIterator(width_, height_, x_step_, y_step_, scales_, min_scale_, max_scale_);
559 ChamferMatcher::LocationImageIterator::LocationImageIterator(const std::vector<Point>& locations,
561 float min_scale = 0.6,
562 float max_scale = 1.6) :
563 locations_(locations),
565 min_scale_(min_scale),
566 max_scale_(max_scale)
571 has_next_ = (locations_.size()==0 ? false : true);
572 scale_step_ = (max_scale_-min_scale_)/scales_;
575 location_scale_t ChamferMatcher::LocationImageIterator:: next()
577 location_scale_t next_val = std::make_pair(locations_[iter_],scale_);
580 if (iter_==locations_.size()) {
582 scale_ += scale_step_;
585 if (scale_cnt_ == scales_) {
596 location_scale_t ChamferMatcher::LocationScaleImageIterator::next()
598 location_scale_t next_val = std::make_pair(locations_[iter_],scales_[iter_]);
601 if (iter_==locations_.size()) {
612 bool ChamferMatcher::Matching::findFirstContourPoint(Mat& templ_img, coordinate_t& p)
614 for (int y=0;y<templ_img.rows;++y) {
615 for (int x=0;x<templ_img.cols;++x) {
616 if (templ_img.at<uchar>(y,x)!=0) {
628 void ChamferMatcher::Matching::followContour(Mat& templ_img, template_coords_t& coords, int direction = -1)
630 const int dir[][2] = { {-1,-1}, {-1,0}, {-1,1}, {0,1}, {1,1}, {1,0}, {1,-1}, {0,-1} };
634 assert (direction==-1 || !coords.empty());
636 coordinate_t crt = coords.back();
638 // mark the current pixel as visited
639 templ_img.at<uchar>(crt.second,crt.first) = 0;
641 for (int j = 0; j<7; ++j) {
642 next.first = crt.first + dir[j][1];
643 next.second = crt.second + dir[j][0];
644 if (next.first >= 0 && next.first < templ_img.cols &&
645 next.second >= 0 && next.second < templ_img.rows){
646 ptr = templ_img.at<uchar>(next.second, next.first);
648 coords.push_back(next);
649 followContour(templ_img, coords,j);
650 // try to continue contour in the other direction
651 reverse(coords.begin(), coords.end());
652 followContour(templ_img, coords, (j+4)%8);
661 next.first = crt.first + dir[k][1];
662 next.second = crt.second + dir[k][0];
663 if (next.first >= 0 && next.first < templ_img.cols &&
664 next.second >= 0 && next.second < templ_img.rows){
665 ptr = templ_img.at<uchar>(next.second, next.first);
667 k_cost = std::abs(dir[k][1]) + std::abs(dir[k][0]);
672 for (int j = 0 ;j<3; ++j) {
675 next.first = crt.first + dir[p][1];
676 next.second = crt.second + dir[p][0];
677 if (next.first >= 0 && next.first < templ_img.cols &&
678 next.second >= 0 && next.second < templ_img.rows){
679 ptr = templ_img.at<uchar>(next.second, next.first);
681 int p_cost = std::abs(dir[p][1]) + std::abs(dir[p][0]);
687 next.first = crt.first + dir[n][1];
688 next.second = crt.second + dir[n][0];
689 if (next.first >= 0 && next.first < templ_img.cols &&
690 next.second >= 0 && next.second < templ_img.rows){
691 ptr = templ_img.at<uchar>(next.second, next.first);
693 int n_cost = std::abs(dir[n][1]) + std::abs(dir[n][0]);
704 next.first = crt.first + dir[k][1];
705 next.second = crt.second + dir[k][0];
706 if (next.first >= 0 && next.first < templ_img.cols &&
707 next.second >= 0 && next.second < templ_img.rows) {
708 coords.push_back(next);
709 followContour(templ_img, coords, k);
717 bool ChamferMatcher::Matching::findContour(Mat& templ_img, template_coords_t& coords)
719 coordinate_t start_point;
721 bool found = findFirstContourPoint(templ_img,start_point);
723 coords.push_back(start_point);
724 followContour(templ_img, coords);
732 float ChamferMatcher::Matching::getAngle(coordinate_t a, coordinate_t b, int& dx, int& dy)
734 dx = b.first-a.first;
735 dy = -(b.second-a.second); // in image coordinated Y axis points downward
736 float angle = atan2((float)dy,(float)dx);
747 void ChamferMatcher::Matching::findContourOrientations(const template_coords_t& coords, template_orientations_t& orientations)
750 int coords_size = (int)coords.size();
752 std::vector<float> angles(2*M);
753 orientations.insert(orientations.begin(), coords_size, float(-3*CV_PI)); // mark as invalid in the beginning
755 if (coords_size<2*M+1) { // if contour not long enough to estimate orientations, abort
759 for (int i=M;i<coords_size-M;++i) {
760 coordinate_t crt = coords[i];
764 // compute previous M angles
765 for (int j=M;j>0;--j) {
767 angles[k++] = getAngle(other,crt, dx, dy);
769 // compute next M angles
770 for (int j=1;j<=M;++j) {
772 angles[k++] = getAngle(crt, other, dx, dy);
775 // get the middle two angles
776 std::nth_element(angles.begin(), angles.begin()+M-1, angles.end());
777 std::nth_element(angles.begin()+M-1, angles.begin()+M, angles.end());
778 // sort(angles.begin(), angles.end());
780 // average them to compute tangent
781 orientations[i] = (angles[M-1]+angles[M])/2;
785 //////////////////////// Template /////////////////////////////////////
787 ChamferMatcher::Template::Template(Mat& edge_image, float scale_) : addr_width(-1), scale(scale_)
789 template_coords_t local_coords;
790 template_orientations_t local_orientations;
792 while (ChamferMatcher::Matching::findContour(edge_image, local_coords)) {
793 ChamferMatcher::Matching::findContourOrientations(local_coords, local_orientations);
795 coords.insert(coords.end(), local_coords.begin(), local_coords.end());
796 orientations.insert(orientations.end(), local_orientations.begin(), local_orientations.end());
797 local_coords.clear();
798 local_orientations.clear();
802 size = edge_image.size();
810 for (size_t i=0;i<coords.size();++i) {
811 center.x += coords[i].first;
812 center.y += coords[i].second;
814 if (min.x>coords[i].first) min.x = coords[i].first;
815 if (min.y>coords[i].second) min.y = coords[i].second;
816 if (max.x<coords[i].first) max.x = coords[i].first;
817 if (max.y<coords[i].second) max.y = coords[i].second;
820 size.width = max.x - min.x;
821 size.height = max.y - min.y;
822 int coords_size = (int)coords.size();
824 center.x /= MAX(coords_size, 1);
825 center.y /= MAX(coords_size, 1);
827 for (int i=0;i<coords_size;++i) {
828 coords[i].first -= center.x;
829 coords[i].second -= center.y;
834 vector<int>& ChamferMatcher::Template::getTemplateAddresses(int width)
836 if (addr_width!=width) {
837 addr.resize(coords.size());
840 for (size_t i=0; i<coords.size();++i) {
841 addr[i] = coords[i].second*width+coords[i].first;
851 * @param scale Scale to be resized to
853 ChamferMatcher::Template* ChamferMatcher::Template::rescale(float new_scale)
856 if (fabs(scale-new_scale)<1e-6) return this;
858 for (size_t i=0;i<scaled_templates.size();++i) {
859 if (fabs(scaled_templates[i]->scale-new_scale)<1e-6) {
860 return scaled_templates[i];
864 float scale_factor = new_scale/scale;
866 Template* tpl = new Template();
867 tpl->scale = new_scale;
869 tpl->center.x = int(center.x*scale_factor+0.5);
870 tpl->center.y = int(center.y*scale_factor+0.5);
872 tpl->size.width = int(size.width*scale_factor+0.5);
873 tpl->size.height = int(size.height*scale_factor+0.5);
875 tpl->coords.resize(coords.size());
876 tpl->orientations.resize(orientations.size());
877 for (size_t i=0;i<coords.size();++i) {
878 tpl->coords[i].first = int(coords[i].first*scale_factor+0.5);
879 tpl->coords[i].second = int(coords[i].second*scale_factor+0.5);
880 tpl->orientations[i] = orientations[i];
882 scaled_templates.push_back(tpl);
890 void ChamferMatcher::Template::show() const
893 //Attention size is not correct
894 Mat templ_color (Size(size.width+(pad*2), size.height+(pad*2)), CV_8UC3);
895 templ_color.setTo(0);
897 for (size_t i=0;i<coords.size();++i) {
899 int x = center.x+coords[i].first+pad;
900 int y = center.y+coords[i].second+pad;
901 templ_color.at<Vec3b>(y,x)[1]=255;
902 //CV_PIXEL(unsigned char, templ_color,x,y)[1] = 255;
905 if (orientations[i] < -CV_PI) {
912 p2.x = x + pad*(int)(sin(orientations[i])*100)/100;
913 p2.y = y + pad*(int)(cos(orientations[i])*100)/100;
915 line(templ_color, p1,p2, CV_RGB(255,0,0));
919 circle(templ_color,Point(center.x + pad, center.y + pad),1,CV_RGB(0,255,0));
921 #ifdef HAVE_OPENCV_HIGHGUI
922 namedWindow("templ",1);
923 imshow("templ",templ_color);
927 CV_Error(CV_StsNotImplemented, "OpenCV has been compiled without GUI support");
930 templ_color.release();
934 //////////////////////// Matching /////////////////////////////////////
937 void ChamferMatcher::Matching::addTemplateFromImage(Mat& templ, float scale)
939 Template* cmt = new Template(templ, scale);
941 templates.push_back(cmt);
945 void ChamferMatcher::Matching::addTemplate(Template& template_){
947 templates.push_back(&template_);
950 * Alternative version of computeDistanceTransform, will probably be used to compute distance
951 * transform annotated with edge orientation.
953 void ChamferMatcher::Matching::computeDistanceTransform(Mat& edges_img, Mat& dist_img, Mat& annotate_img, float truncate_dt, float a = 1.0, float b = 1.5)
955 int d[][2] = { {-1,-1}, { 0,-1}, { 1,-1},
957 {-1,1}, { 0,1}, { 1,1} };
960 Size s = edges_img.size();
963 // set distance to the edge pixels to 0 and put them in the queue
964 std::queue<std::pair<int,int> > q;
966 for (int y=0;y<h;++y) {
967 for (int x=0;x<w;++x) {
969 if (&annotate_img!=NULL) {
970 annotate_img.at<Vec2i>(y,x)[0]=x;
971 annotate_img.at<Vec2i>(y,x)[1]=y;
974 uchar edge_val = edges_img.at<uchar>(y,x);
975 if( (edge_val!=0) ) {
976 q.push(std::make_pair(x,y));
977 dist_img.at<float>(y,x)= 0;
980 dist_img.at<float>(y,x)=-1;
985 // breadth first computation of distance transform
986 std::pair<int,int> crt;
994 float dist_orig = dist_img.at<float>(y,x);
997 for (size_t i=0;i<sizeof(d)/sizeof(d[0]);++i) {
998 int nx = x + d[i][0];
999 int ny = y + d[i][1];
1001 if (nx<0 || ny<0 || nx>=w || ny>=h) continue;
1003 if (std::abs(d[i][0]+d[i][1])==1) {
1004 dist = (dist_orig)+a;
1007 dist = (dist_orig)+b;
1010 float dt = dist_img.at<float>(ny,nx);
1012 if (dt==-1 || dt>dist) {
1013 dist_img.at<float>(ny,nx) = dist;
1014 q.push(std::make_pair(nx,ny));
1016 if (&annotate_img!=NULL) {
1017 annotate_img.at<Vec2i>(ny,nx)[0]=annotate_img.at<Vec2i>(y,x)[0];
1018 annotate_img.at<Vec2i>(ny,nx)[1]=annotate_img.at<Vec2i>(y,x)[1];
1025 if (truncate_dt>0) {
1026 Mat dist_img_thr = dist_img.clone();
1027 threshold(dist_img, dist_img_thr, truncate_dt,0.0 ,THRESH_TRUNC);
1028 dist_img_thr.copyTo(dist_img);
1033 void ChamferMatcher::Matching::computeEdgeOrientations(Mat& edge_img, Mat& orientation_img)
1035 Mat contour_img(edge_img.size(), CV_8UC1);
1037 orientation_img.setTo(3*(-CV_PI));
1038 template_coords_t coords;
1039 template_orientations_t orientations;
1041 while (ChamferMatcher::Matching::findContour(edge_img, coords)) {
1043 ChamferMatcher::Matching::findContourOrientations(coords, orientations);
1045 // set orientation pixel in orientation image
1046 for (size_t i = 0; i<coords.size();++i) {
1047 int x = coords[i].first;
1048 int y = coords[i].second;
1049 // if (orientations[i]>-CV_PI)
1051 //CV_PIXEL(unsigned char, contour_img, x, y)[0] = 255;
1052 contour_img.at<uchar>(y,x)=255;
1054 //CV_PIXEL(float, orientation_img, x, y)[0] = orientations[i];
1055 orientation_img.at<float>(y,x)=orientations[i];
1060 orientations.clear();
1063 //imwrite("contours.pgm", contour_img);
1067 void ChamferMatcher::Matching::fillNonContourOrientations(Mat& annotated_img, Mat& orientation_img)
1069 int cols = annotated_img.cols;
1070 int rows = annotated_img.rows;
1072 assert(orientation_img.cols==cols && orientation_img.rows==rows);
1074 for (int y=0;y<rows;++y) {
1075 for (int x=0;x<cols;++x) {
1076 int xorig = annotated_img.at<Vec2i>(y,x)[0];
1077 int yorig = annotated_img.at<Vec2i>(y,x)[1];
1079 if (x!=xorig || y!=yorig) {
1080 //orientation_img.at<float>(yorig,xorig)=orientation_img.at<float>(y,x);
1081 orientation_img.at<float>(y,x)=orientation_img.at<float>(yorig,xorig);
1088 ChamferMatcher::Match* ChamferMatcher::Matching::localChamferDistance(Point offset, Mat& dist_img, Mat& orientation_img,
1089 ChamferMatcher::Template* tpl, float alpha)
1094 float beta = 1-alpha;
1096 std::vector<int>& addr = tpl->getTemplateAddresses(dist_img.cols);
1098 float* ptr = dist_img.ptr<float>(y)+x;
1101 float sum_distance = 0;
1102 for (size_t i=0; i<addr.size();++i) {
1103 if(addr[i] < (dist_img.cols*dist_img.rows) - (offset.y*dist_img.cols + offset.x)){
1104 sum_distance += *(ptr+addr[i]);
1108 float cost = (sum_distance/truncate_)/addr.size();
1111 if (&orientation_img!=NULL) {
1112 float* optr = orientation_img.ptr<float>(y)+x;
1113 float sum_orientation = 0;
1114 int cnt_orientation = 0;
1116 for (size_t i=0;i<addr.size();++i) {
1118 if(addr[i] < (orientation_img.cols*orientation_img.rows) - (offset.y*orientation_img.cols + offset.x)){
1119 if (tpl->orientations[i]>=-CV_PI && (*(optr+addr[i]))>=-CV_PI) {
1120 sum_orientation += orientation_diff(tpl->orientations[i], (*(optr+addr[i])));
1126 if (cnt_orientation>0) {
1127 cost = (float)(beta*cost+alpha*(sum_orientation/(2*CV_PI))/cnt_orientation);
1133 ChamferMatcher::Match* istance = new ChamferMatcher::Match();
1134 istance->cost = cost;
1135 istance->offset = offset;
1145 ChamferMatcher::Matches* ChamferMatcher::Matching::matchTemplates(Mat& dist_img, Mat& orientation_img, const ImageRange& range, float _orientation_weight)
1148 ChamferMatcher::Matches* pmatches(new Matches());
1149 // try each template
1150 for(size_t i = 0; i < templates.size(); i++) {
1151 ImageIterator* it = range.iterator();
1152 while (it->hasNext()) {
1153 location_scale_t crt = it->next();
1155 Point loc = crt.first;
1156 float scale = crt.second;
1157 Template* tpl = templates[i]->rescale(scale);
1160 if (loc.x-tpl->center.x<0 || loc.x+tpl->size.width/2>=dist_img.cols) continue;
1161 if (loc.y-tpl->center.y<0 || loc.y+tpl->size.height/2>=dist_img.rows) continue;
1163 ChamferMatcher::Match* is = localChamferDistance(loc, dist_img, orientation_img, tpl, _orientation_weight);
1166 pmatches->push_back(*is);
1179 * Run matching using an edge image.
1180 * @param edge_img Edge image
1181 * @return a match object
1183 ChamferMatcher::Matches* ChamferMatcher::Matching::matchEdgeImage(Mat& edge_img, const ImageRange& range,
1184 float _orientation_weight, int /*max_matches*/, float /*min_match_distance*/)
1186 CV_Assert(edge_img.channels()==1);
1190 Mat orientation_img;
1192 annotated_img.create(edge_img.size(), CV_32SC2);
1193 dist_img.create(edge_img.size(),CV_32FC1);
1195 // Computing distance transform
1196 computeDistanceTransform(edge_img,dist_img, annotated_img, truncate_);
1199 //orientation_img = NULL;
1200 if (use_orientation_) {
1201 orientation_img.create(edge_img.size(), CV_32FC1);
1202 orientation_img.setTo(0);
1203 Mat edge_clone = edge_img.clone();
1204 computeEdgeOrientations(edge_clone, orientation_img );
1205 edge_clone.release();
1206 fillNonContourOrientations(annotated_img, orientation_img);
1210 // Template matching
1211 ChamferMatcher::Matches* pmatches = matchTemplates( dist_img,
1214 _orientation_weight);
1217 if (use_orientation_) {
1218 orientation_img.release();
1221 annotated_img.release();
1227 void ChamferMatcher::addMatch(float cost, Point offset, const Template* tpl)
1229 bool new_match = true;
1230 for (int i=0; i<count; ++i) {
1231 if (std::abs(matches[i].offset.x-offset.x)+std::abs(matches[i].offset.y-offset.y)<min_match_distance_) {
1232 // too close, not a new match
1234 // if better cost, replace existing match
1235 if (cost<matches[i].cost) {
1236 matches[i].cost = cost;
1237 matches[i].offset = offset;
1238 matches[i].tpl = tpl;
1240 // re-bubble to keep ordered
1243 if (matches[k-1].cost>matches[k].cost) {
1244 std::swap(matches[k-1],matches[k]);
1254 // if we don't have enough matches yet, add it to the array
1255 if (count<max_matches_) {
1256 matches[count].cost = cost;
1257 matches[count].offset = offset;
1258 matches[count].tpl = tpl;
1261 // otherwise find the right position to insert it
1263 // if higher cost than the worst current match, just ignore it
1264 if (matches[count-1].cost<cost) {
1269 // skip all matches better than current one
1270 while (matches[j].cost<cost) j++;
1272 // shift matches one position
1275 matches[k+1] = matches[k];
1279 matches[j].cost = cost;
1280 matches[j].offset = offset;
1281 matches[j].tpl = tpl;
1286 void ChamferMatcher::showMatch(Mat& img, int index)
1289 std::cout << "Index too big.\n" << std::endl;
1292 assert(img.channels()==3);
1294 Match match = matches[index];
1296 const template_coords_t& templ_coords = match.tpl->coords;
1298 for (size_t i=0;i<templ_coords.size();++i) {
1299 x = match.offset.x + templ_coords[i].first;
1300 y = match.offset.y + templ_coords[i].second;
1302 if ( x > img.cols-1 || x < 0 || y > img.rows-1 || y < 0) continue;
1303 img.at<Vec3b>(y,x)[0]=0;
1304 img.at<Vec3b>(y,x)[2]=0;
1305 img.at<Vec3b>(y,x)[1]=255;
1309 void ChamferMatcher::showMatch(Mat& img, Match match)
1311 assert(img.channels()==3);
1313 const template_coords_t& templ_coords = match.tpl->coords;
1314 for (size_t i=0;i<templ_coords.size();++i) {
1315 int x = match.offset.x + templ_coords[i].first;
1316 int y = match.offset.y + templ_coords[i].second;
1317 if ( x > img.cols-1 || x < 0 || y > img.rows-1 || y < 0) continue;
1318 img.at<Vec3b>(y,x)[0]=0;
1319 img.at<Vec3b>(y,x)[2]=0;
1320 img.at<Vec3b>(y,x)[1]=255;
1325 const ChamferMatcher::Matches& ChamferMatcher::matching(Template& tpl, Mat& image_){
1326 chamfer_->addTemplate(tpl);
1329 matches.resize(max_matches_);
1333 Matches* matches_ = chamfer_->matchEdgeImage( image_,
1335 SlidingWindowImageRange(image_.cols,
1344 min_match_distance_);
1348 for(int i = 0; i < (int)matches_->size(); i++){
1349 addMatch(matches_->at(i).cost, matches_->at(i).offset, matches_->at(i).tpl);
1356 matches.resize(count);
1364 int chamerMatching( Mat& img, Mat& templ,
1365 std::vector<std::vector<Point> >& results, std::vector<float>& costs,
1366 double templScale, int maxMatches, double minMatchDistance, int padX,
1367 int padY, int scales, double minScale, double maxScale,
1368 double orientationWeight, double truncate )
1370 CV_Assert(img.type() == CV_8UC1 && templ.type() == CV_8UC1);
1372 ChamferMatcher matcher_(maxMatches, (float)minMatchDistance, padX, padY, scales,
1373 (float)minScale, (float)maxScale,
1374 (float)orientationWeight, (float)truncate);
1376 ChamferMatcher::Template template_(templ, (float)templScale);
1377 ChamferMatcher::Matches match_instances = matcher_.matching(template_, img);
1379 size_t i, nmatches = match_instances.size();
1381 results.resize(nmatches);
1382 costs.resize(nmatches);
1385 double minCost = DBL_MAX;
1387 for( i = 0; i < nmatches; i++ )
1389 const ChamferMatcher::Match& match = match_instances[i];
1390 double cval = match.cost;
1396 costs[i] = (float)cval;
1398 const template_coords_t& templ_coords = match.tpl->coords;
1399 std::vector<Point>& templPoints = results[i];
1400 size_t j, npoints = templ_coords.size();
1401 templPoints.resize(npoints);
1403 for (j = 0; j < npoints; j++ )
1405 int x = match.offset.x + templ_coords[j].first;
1406 int y = match.offset.y + templ_coords[j].second;
1407 templPoints[j] = Point(x,y);