1 /*********************************************************************
2 * Software License Agreement (BSD License)
4 * Copyright (c) 2008-2010, Willow Garage, Inc.
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
11 * * Redistributions of source code must retain the above copyright
12 * notice, this list of conditions and the following disclaimer.
13 * * Redistributions in binary form must reproduce the above
14 * copyright notice, this list of conditions and the following
15 * disclaimer in the documentation and/or other materials provided
16 * with the distribution.
17 * * Neither the name of the Willow Garage nor the names of its
18 * contributors may be used to endorse or promote products derived
19 * from this software without specific prior written permission.
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
22 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
23 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
24 * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
25 * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
26 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
27 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
28 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
29 * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
30 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
31 * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
32 * POSSIBILITY OF SUCH DAMAGE.
33 *********************************************************************/
36 // The original code was written by
38 // and later modified and prepared
39 // for integration into OpenCV by
40 // Antonella Cascitelli,
41 // Marco Di Stefano and
46 #include "precomp.hpp"
47 #include "opencv2/opencv_modules.hpp"
48 #ifdef HAVE_OPENCV_HIGHGUI
49 # include "opencv2/highgui.hpp"
57 typedef std::pair<int,int> coordinate_t;
58 typedef float orientation_t;
59 typedef std::vector<coordinate_t> template_coords_t;
60 typedef std::vector<orientation_t> template_orientations_t;
61 typedef std::pair<Point, float> location_scale_t;
69 float min_match_distance_;
71 ///////////////////////// Image iterators ////////////////////////////
76 virtual ~ImageIterator() {}
77 virtual bool hasNext() const = 0;
78 virtual location_scale_t next() = 0;
84 virtual ImageIterator* iterator() const = 0;
85 virtual ~ImageRange() {}
90 class SlidingWindowImageRange : public ImageRange
101 SlidingWindowImageRange(int width, int height, int x_step = 3, int y_step = 3, int _scales = 5, float min_scale = 0.6, float max_scale = 1.6) :
102 width_(width), height_(height), x_step_(x_step),y_step_(y_step), scales_(_scales), min_scale_(min_scale), max_scale_(max_scale)
107 ImageIterator* iterator() const;
110 class LocationImageRange : public ImageRange
112 const std::vector<Point>& locations_;
118 LocationImageRange(const LocationImageRange&);
119 LocationImageRange& operator=(const LocationImageRange&);
122 LocationImageRange(const std::vector<Point>& locations, int _scales = 5, float min_scale = 0.6, float max_scale = 1.6) :
123 locations_(locations), scales_(_scales), min_scale_(min_scale), max_scale_(max_scale)
127 ImageIterator* iterator() const
129 return new LocationImageIterator(locations_, scales_, min_scale_, max_scale_);
134 class LocationScaleImageRange : public ImageRange
136 const std::vector<Point>& locations_;
137 const std::vector<float>& scales_;
139 LocationScaleImageRange(const LocationScaleImageRange&);
140 LocationScaleImageRange& operator=(const LocationScaleImageRange&);
142 LocationScaleImageRange(const std::vector<Point>& locations, const std::vector<float>& _scales) :
143 locations_(locations), scales_(_scales)
145 CV_Assert(locations.size()==_scales.size());
148 ImageIterator* iterator() const
150 return new LocationScaleImageIterator(locations_, scales_);
159 * Class that represents a template for chamfer matching.
163 friend class ChamferMatcher::Matching;
164 friend class ChamferMatcher;
168 std::vector<Template*> scaled_templates;
169 std::vector<int> addr;
172 template_coords_t coords;
174 template_orientations_t orientations;
179 Template() : addr_width(-1)
183 Template(Mat& edge_image, float scale_ = 1);
187 for (size_t i=0;i<scaled_templates.size();++i) {
188 delete scaled_templates[i];
190 scaled_templates.clear();
192 orientations.clear();
202 * @param scale Scale to be resized to
204 Template* rescale(float scale);
206 std::vector<int>& getTemplateAddresses(int width);
212 * Used to represent a matching result.
223 typedef std::vector<Match> Matches;
227 * Implements the chamfer matching algorithm on images taking into account both distance from
228 * the template pixels to the nearest pixels and orientation alignment between template and image
234 bool use_orientation_;
236 std::vector<Template*> templates;
238 Matching(bool use_orientation = true, float _truncate = 10) : truncate_(_truncate), use_orientation_(use_orientation)
244 for (size_t i = 0; i<templates.size(); i++) {
250 * Add a template to the detector from an edge image.
251 * @param templ An edge image
253 void addTemplateFromImage(Mat& templ, float scale = 1.0);
256 * Run matching using an edge image.
257 * @param edge_img Edge image
258 * @return a match object
260 ChamferMatcher::Matches* matchEdgeImage(Mat& edge_img, const ImageRange& range, float orientation_weight = 0.5, int max_matches = 20, float min_match_distance = 10.0);
262 void addTemplate(Template& template_);
266 float orientation_diff(float o1, float o2)
272 * Computes the chamfer matching cost for one position in the target image.
273 * @param offset Offset where to compute cost
274 * @param dist_img Distance transform image.
275 * @param orientation_img Orientation image.
276 * @param tpl Template
277 * @param templ_orientations Orientations of the target points.
278 * @return matching result
280 ChamferMatcher::Match* localChamferDistance(Point offset, Mat& dist_img, Mat& orientation_img, Template* tpl, float orientation_weight);
284 * Matches all templates.
285 * @param dist_img Distance transform image.
286 * @param orientation_img Orientation image.
288 ChamferMatcher::Matches* matchTemplates(Mat& dist_img, Mat& orientation_img, const ImageRange& range, float orientation_weight);
290 void computeDistanceTransform(Mat& edges_img, Mat& dist_img, Mat& annotate_img, float truncate_dt, float a, float b);
291 void computeEdgeOrientations(Mat& edge_img, Mat& orientation_img);
292 void fillNonContourOrientations(Mat& annotated_img, Mat& orientation_img);
297 * Finds a contour in an edge image. The original image is altered by removing the found contour.
298 * @param templ_img Edge image
299 * @param coords Coordinates forming the contour.
300 * @return True while a contour is still found in the image.
302 static bool findContour(Mat& templ_img, template_coords_t& coords);
305 * Computes contour points orientations using the approach from:
307 * Matas, Shao and Kittler - Estimation of Curvature and Tangent Direction by
308 * Median Filtered Differencing
310 * @param coords Contour points
311 * @param orientations Contour points orientations
313 static void findContourOrientations(const template_coords_t& coords, template_orientations_t& orientations);
317 * Computes the angle of a line segment.
319 * @param a One end of the line segment
320 * @param b The other end.
323 * @return Angle in radians.
325 static float getAngle(coordinate_t a, coordinate_t b, int& dx, int& dy);
328 * Finds a point in the image from which to start contour following.
334 static bool findFirstContourPoint(Mat& templ_img, coordinate_t& p);
336 * Method that extracts a single continuous contour from an image given a starting point.
337 * When it extracts the contour it tries to maintain the same direction (at a T-join for example).
343 static void followContour(Mat& templ_img, template_coords_t& coords, int direction);
351 class LocationImageIterator : public ImageIterator
353 const std::vector<Point>& locations_;
367 LocationImageIterator(const LocationImageIterator&);
368 LocationImageIterator& operator=(const LocationImageIterator&);
371 LocationImageIterator(const std::vector<Point>& locations, int _scales, float min_scale, float max_scale);
373 bool hasNext() const {
377 location_scale_t next();
380 class LocationScaleImageIterator : public ImageIterator
382 const std::vector<Point>& locations_;
383 const std::vector<float>& scales_;
389 LocationScaleImageIterator(const LocationScaleImageIterator&);
390 LocationScaleImageIterator& operator=(const LocationScaleImageIterator&);
393 LocationScaleImageIterator(const std::vector<Point>& locations, const std::vector<float>& _scales) :
394 locations_(locations), scales_(_scales)
396 CV_Assert(locations.size()==_scales.size());
403 has_next_ = (locations_.size()==0 ? false : true);
406 bool hasNext() const {
410 location_scale_t next();
413 class SlidingWindowImageIterator : public ImageIterator
434 SlidingWindowImageIterator(int width, int height, int x_step, int y_step, int scales, float min_scale, float max_scale);
436 bool hasNext() const {
440 location_scale_t next();
453 float orientation_weight;
458 ChamferMatcher(int _max_matches = 20, float _min_match_distance = 1.0, int _pad_x = 3,
459 int _pad_y = 3, int _scales = 5, float _minScale = 0.6, float _maxScale = 1.6,
460 float _orientation_weight = 0.5, float _truncate = 20)
462 max_matches_ = _max_matches;
463 min_match_distance_ = _min_match_distance;
467 minScale = _minScale;
468 maxScale = _maxScale;
469 orientation_weight = _orientation_weight;
470 truncate = _truncate;
473 matches.resize(max_matches_);
474 chamfer_ = new Matching(true);
482 void showMatch(Mat& img, int index = 0);
483 void showMatch(Mat& img, Match match_);
485 const Matches& matching(Template&, Mat&);
488 ChamferMatcher(const ChamferMatcher&);
489 ChamferMatcher& operator=(const ChamferMatcher&);
490 void addMatch(float cost, Point offset, const Template* tpl);
496 ///////////////////// implementation ///////////////////////////
498 ChamferMatcher::SlidingWindowImageIterator::SlidingWindowImageIterator( int width,
503 float min_scale = 0.6,
504 float max_scale = 1.6) :
511 min_scale_(min_scale),
512 max_scale_(max_scale)
519 scale_step_ = (max_scale_-min_scale_)/scales_;
522 location_scale_t ChamferMatcher::SlidingWindowImageIterator::next()
524 location_scale_t next_val = std::make_pair(Point(x_,y_),scale_);
534 scale_ += scale_step_;
537 if (scale_cnt_ == scales_) {
550 ChamferMatcher::ImageIterator* ChamferMatcher::SlidingWindowImageRange::iterator() const
552 return new SlidingWindowImageIterator(width_, height_, x_step_, y_step_, scales_, min_scale_, max_scale_);
557 ChamferMatcher::LocationImageIterator::LocationImageIterator(const std::vector<Point>& locations,
559 float min_scale = 0.6,
560 float max_scale = 1.6) :
561 locations_(locations),
563 min_scale_(min_scale),
564 max_scale_(max_scale)
569 has_next_ = (locations_.size()==0 ? false : true);
570 scale_step_ = (max_scale_-min_scale_)/scales_;
573 location_scale_t ChamferMatcher::LocationImageIterator:: next()
575 location_scale_t next_val = std::make_pair(locations_[iter_],scale_);
578 if (iter_==locations_.size()) {
580 scale_ += scale_step_;
583 if (scale_cnt_ == scales_) {
594 location_scale_t ChamferMatcher::LocationScaleImageIterator::next()
596 location_scale_t next_val = std::make_pair(locations_[iter_],scales_[iter_]);
599 if (iter_==locations_.size()) {
610 bool ChamferMatcher::Matching::findFirstContourPoint(Mat& templ_img, coordinate_t& p)
612 for (int y=0;y<templ_img.rows;++y) {
613 for (int x=0;x<templ_img.cols;++x) {
614 if (templ_img.at<uchar>(y,x)!=0) {
626 void ChamferMatcher::Matching::followContour(Mat& templ_img, template_coords_t& coords, int direction = -1)
628 const int dir[][2] = { {-1,-1}, {-1,0}, {-1,1}, {0,1}, {1,1}, {1,0}, {1,-1}, {0,-1} };
632 CV_Assert (direction==-1 || !coords.empty());
634 coordinate_t crt = coords.back();
636 // mark the current pixel as visited
637 templ_img.at<uchar>(crt.second,crt.first) = 0;
639 for (int j = 0; j<7; ++j) {
640 next.first = crt.first + dir[j][1];
641 next.second = crt.second + dir[j][0];
642 if (next.first >= 0 && next.first < templ_img.cols &&
643 next.second >= 0 && next.second < templ_img.rows){
644 ptr = templ_img.at<uchar>(next.second, next.first);
646 coords.push_back(next);
647 followContour(templ_img, coords,j);
648 // try to continue contour in the other direction
649 reverse(coords.begin(), coords.end());
650 followContour(templ_img, coords, (j+4)%8);
659 next.first = crt.first + dir[k][1];
660 next.second = crt.second + dir[k][0];
661 if (next.first >= 0 && next.first < templ_img.cols &&
662 next.second >= 0 && next.second < templ_img.rows){
663 ptr = templ_img.at<uchar>(next.second, next.first);
665 k_cost = std::abs(dir[k][1]) + std::abs(dir[k][0]);
670 for (int j = 0 ;j<3; ++j) {
673 next.first = crt.first + dir[p][1];
674 next.second = crt.second + dir[p][0];
675 if (next.first >= 0 && next.first < templ_img.cols &&
676 next.second >= 0 && next.second < templ_img.rows){
677 ptr = templ_img.at<uchar>(next.second, next.first);
679 int p_cost = std::abs(dir[p][1]) + std::abs(dir[p][0]);
685 next.first = crt.first + dir[n][1];
686 next.second = crt.second + dir[n][0];
687 if (next.first >= 0 && next.first < templ_img.cols &&
688 next.second >= 0 && next.second < templ_img.rows){
689 ptr = templ_img.at<uchar>(next.second, next.first);
691 int n_cost = std::abs(dir[n][1]) + std::abs(dir[n][0]);
702 next.first = crt.first + dir[k][1];
703 next.second = crt.second + dir[k][0];
704 if (next.first >= 0 && next.first < templ_img.cols &&
705 next.second >= 0 && next.second < templ_img.rows) {
706 coords.push_back(next);
707 followContour(templ_img, coords, k);
715 bool ChamferMatcher::Matching::findContour(Mat& templ_img, template_coords_t& coords)
717 coordinate_t start_point;
719 bool found = findFirstContourPoint(templ_img,start_point);
721 coords.push_back(start_point);
722 followContour(templ_img, coords);
730 float ChamferMatcher::Matching::getAngle(coordinate_t a, coordinate_t b, int& dx, int& dy)
732 dx = b.first-a.first;
733 dy = -(b.second-a.second); // in image coordinated Y axis points downward
734 float angle = atan2((float)dy,(float)dx);
745 void ChamferMatcher::Matching::findContourOrientations(const template_coords_t& coords, template_orientations_t& orientations)
748 int coords_size = (int)coords.size();
750 std::vector<float> angles(2*M);
751 orientations.insert(orientations.begin(), coords_size, float(-3*CV_PI)); // mark as invalid in the beginning
753 if (coords_size<2*M+1) { // if contour not long enough to estimate orientations, abort
757 for (int i=M;i<coords_size-M;++i) {
758 coordinate_t crt = coords[i];
762 // compute previous M angles
763 for (int j=M;j>0;--j) {
765 angles[k++] = getAngle(other,crt, dx, dy);
767 // compute next M angles
768 for (int j=1;j<=M;++j) {
770 angles[k++] = getAngle(crt, other, dx, dy);
773 // get the middle two angles
774 std::nth_element(angles.begin(), angles.begin()+M-1, angles.end());
775 std::nth_element(angles.begin()+M-1, angles.begin()+M, angles.end());
776 // sort(angles.begin(), angles.end());
778 // average them to compute tangent
779 orientations[i] = (angles[M-1]+angles[M])/2;
783 //////////////////////// Template /////////////////////////////////////
785 ChamferMatcher::Template::Template(Mat& edge_image, float scale_) : addr_width(-1), scale(scale_)
787 template_coords_t local_coords;
788 template_orientations_t local_orientations;
790 while (ChamferMatcher::Matching::findContour(edge_image, local_coords)) {
791 ChamferMatcher::Matching::findContourOrientations(local_coords, local_orientations);
793 coords.insert(coords.end(), local_coords.begin(), local_coords.end());
794 orientations.insert(orientations.end(), local_orientations.begin(), local_orientations.end());
795 local_coords.clear();
796 local_orientations.clear();
800 size = edge_image.size();
808 for (size_t i=0;i<coords.size();++i) {
809 center.x += coords[i].first;
810 center.y += coords[i].second;
812 if (min.x>coords[i].first) min.x = coords[i].first;
813 if (min.y>coords[i].second) min.y = coords[i].second;
814 if (max.x<coords[i].first) max.x = coords[i].first;
815 if (max.y<coords[i].second) max.y = coords[i].second;
818 size.width = max.x - min.x;
819 size.height = max.y - min.y;
820 int coords_size = (int)coords.size();
822 center.x /= MAX(coords_size, 1);
823 center.y /= MAX(coords_size, 1);
825 for (int i=0;i<coords_size;++i) {
826 coords[i].first -= center.x;
827 coords[i].second -= center.y;
832 std::vector<int>& ChamferMatcher::Template::getTemplateAddresses(int width)
834 if (addr_width!=width) {
835 addr.resize(coords.size());
838 for (size_t i=0; i<coords.size();++i) {
839 addr[i] = coords[i].second*width+coords[i].first;
849 * @param scale Scale to be resized to
851 ChamferMatcher::Template* ChamferMatcher::Template::rescale(float new_scale)
854 if (fabs(scale-new_scale)<1e-6) return this;
856 for (size_t i=0;i<scaled_templates.size();++i) {
857 if (fabs(scaled_templates[i]->scale-new_scale)<1e-6) {
858 return scaled_templates[i];
862 float scale_factor = new_scale/scale;
864 Template* tpl = new Template();
865 tpl->scale = new_scale;
867 tpl->center.x = int(center.x*scale_factor+0.5);
868 tpl->center.y = int(center.y*scale_factor+0.5);
870 tpl->size.width = int(size.width*scale_factor+0.5);
871 tpl->size.height = int(size.height*scale_factor+0.5);
873 tpl->coords.resize(coords.size());
874 tpl->orientations.resize(orientations.size());
875 for (size_t i=0;i<coords.size();++i) {
876 tpl->coords[i].first = int(coords[i].first*scale_factor+0.5);
877 tpl->coords[i].second = int(coords[i].second*scale_factor+0.5);
878 tpl->orientations[i] = orientations[i];
880 scaled_templates.push_back(tpl);
888 void ChamferMatcher::Template::show() const
891 //Attention size is not correct
892 Mat templ_color (Size(size.width+(pad*2), size.height+(pad*2)), CV_8UC3);
893 templ_color.setTo(0);
895 for (size_t i=0;i<coords.size();++i) {
897 int x = center.x+coords[i].first+pad;
898 int y = center.y+coords[i].second+pad;
899 templ_color.at<Vec3b>(y,x)[1]=255;
900 //CV_PIXEL(unsigned char, templ_color,x,y)[1] = 255;
903 if (orientations[i] < -CV_PI) {
910 p2.x = x + pad*(int)(sin(orientations[i])*100)/100;
911 p2.y = y + pad*(int)(cos(orientations[i])*100)/100;
913 line(templ_color, p1,p2, Scalar(255,0,0));
917 circle(templ_color,Point(center.x + pad, center.y + pad),1,Scalar(0,255,0));
919 #ifdef HAVE_OPENCV_HIGHGUI
920 namedWindow("templ",1);
921 imshow("templ",templ_color);
924 CV_Error(Error::StsNotImplemented, "OpenCV has been compiled without GUI support");
927 templ_color.release();
931 //////////////////////// Matching /////////////////////////////////////
934 void ChamferMatcher::Matching::addTemplateFromImage(Mat& templ, float scale)
936 Template* cmt = new Template(templ, scale);
938 templates.push_back(cmt);
942 void ChamferMatcher::Matching::addTemplate(Template& template_){
944 templates.push_back(&template_);
947 * Alternative version of computeDistanceTransform, will probably be used to compute distance
948 * transform annotated with edge orientation.
950 void ChamferMatcher::Matching::computeDistanceTransform(Mat& edges_img, Mat& dist_img, Mat& annotate_img, float truncate_dt, float a = 1.0, float b = 1.5)
952 int d[][2] = { {-1,-1}, { 0,-1}, { 1,-1},
954 {-1,1}, { 0,1}, { 1,1} };
957 Size s = edges_img.size();
960 // set distance to the edge pixels to 0 and put them in the queue
961 std::queue<std::pair<int,int> > q;
963 for (int y=0;y<h;++y) {
964 for (int x=0;x<w;++x) {
966 if (&annotate_img!=NULL) {
967 annotate_img.at<Vec2i>(y,x)[0]=x;
968 annotate_img.at<Vec2i>(y,x)[1]=y;
971 uchar edge_val = edges_img.at<uchar>(y,x);
972 if( (edge_val!=0) ) {
973 q.push(std::make_pair(x,y));
974 dist_img.at<float>(y,x)= 0;
977 dist_img.at<float>(y,x)=-1;
982 // breadth first computation of distance transform
983 std::pair<int,int> crt;
991 float dist_orig = dist_img.at<float>(y,x);
994 for (size_t i=0;i<sizeof(d)/sizeof(d[0]);++i) {
995 int nx = x + d[i][0];
996 int ny = y + d[i][1];
998 if (nx<0 || ny<0 || nx>=w || ny>=h) continue;
1000 if (std::abs(d[i][0]+d[i][1])==1) {
1001 dist = (dist_orig)+a;
1004 dist = (dist_orig)+b;
1007 float dt = dist_img.at<float>(ny,nx);
1009 if (dt==-1 || dt>dist) {
1010 dist_img.at<float>(ny,nx) = dist;
1011 q.push(std::make_pair(nx,ny));
1013 if (&annotate_img!=NULL) {
1014 annotate_img.at<Vec2i>(ny,nx)[0]=annotate_img.at<Vec2i>(y,x)[0];
1015 annotate_img.at<Vec2i>(ny,nx)[1]=annotate_img.at<Vec2i>(y,x)[1];
1022 if (truncate_dt>0) {
1023 Mat dist_img_thr = dist_img.clone();
1024 threshold(dist_img, dist_img_thr, truncate_dt,0.0 ,THRESH_TRUNC);
1025 dist_img_thr.copyTo(dist_img);
1030 void ChamferMatcher::Matching::computeEdgeOrientations(Mat& edge_img, Mat& orientation_img)
1032 Mat contour_img(edge_img.size(), CV_8UC1);
1034 orientation_img.setTo(3*(-CV_PI));
1035 template_coords_t coords;
1036 template_orientations_t orientations;
1038 while (ChamferMatcher::Matching::findContour(edge_img, coords)) {
1040 ChamferMatcher::Matching::findContourOrientations(coords, orientations);
1042 // set orientation pixel in orientation image
1043 for (size_t i = 0; i<coords.size();++i) {
1044 int x = coords[i].first;
1045 int y = coords[i].second;
1046 // if (orientations[i]>-CV_PI)
1048 //CV_PIXEL(unsigned char, contour_img, x, y)[0] = 255;
1049 contour_img.at<uchar>(y,x)=255;
1051 //CV_PIXEL(float, orientation_img, x, y)[0] = orientations[i];
1052 orientation_img.at<float>(y,x)=orientations[i];
1057 orientations.clear();
1060 //imwrite("contours.pgm", contour_img);
1064 void ChamferMatcher::Matching::fillNonContourOrientations(Mat& annotated_img, Mat& orientation_img)
1066 int cols = annotated_img.cols;
1067 int rows = annotated_img.rows;
1069 CV_Assert(orientation_img.cols==cols && orientation_img.rows==rows);
1071 for (int y=0;y<rows;++y) {
1072 for (int x=0;x<cols;++x) {
1073 int xorig = annotated_img.at<Vec2i>(y,x)[0];
1074 int yorig = annotated_img.at<Vec2i>(y,x)[1];
1076 if (x!=xorig || y!=yorig) {
1077 //orientation_img.at<float>(yorig,xorig)=orientation_img.at<float>(y,x);
1078 orientation_img.at<float>(y,x)=orientation_img.at<float>(yorig,xorig);
1085 ChamferMatcher::Match* ChamferMatcher::Matching::localChamferDistance(Point offset, Mat& dist_img, Mat& orientation_img,
1086 ChamferMatcher::Template* tpl, float alpha)
1091 float beta = 1-alpha;
1093 std::vector<int>& addr = tpl->getTemplateAddresses(dist_img.cols);
1095 float* ptr = dist_img.ptr<float>(y)+x;
1098 float sum_distance = 0;
1099 for (size_t i=0; i<addr.size();++i) {
1100 if(addr[i] < (dist_img.cols*dist_img.rows) - (offset.y*dist_img.cols + offset.x)){
1101 sum_distance += *(ptr+addr[i]);
1105 float cost = (sum_distance/truncate_)/addr.size();
1108 if (&orientation_img!=NULL) {
1109 float* optr = orientation_img.ptr<float>(y)+x;
1110 float sum_orientation = 0;
1111 int cnt_orientation = 0;
1113 for (size_t i=0;i<addr.size();++i) {
1115 if(addr[i] < (orientation_img.cols*orientation_img.rows) - (offset.y*orientation_img.cols + offset.x)){
1116 if (tpl->orientations[i]>=-CV_PI && (*(optr+addr[i]))>=-CV_PI) {
1117 sum_orientation += orientation_diff(tpl->orientations[i], (*(optr+addr[i])));
1123 if (cnt_orientation>0) {
1124 cost = (float)(beta*cost+alpha*(sum_orientation/(2*CV_PI))/cnt_orientation);
1130 ChamferMatcher::Match* istance = new ChamferMatcher::Match();
1131 istance->cost = cost;
1132 istance->offset = offset;
1142 ChamferMatcher::Matches* ChamferMatcher::Matching::matchTemplates(Mat& dist_img, Mat& orientation_img, const ImageRange& range, float _orientation_weight)
1145 ChamferMatcher::Matches* pmatches(new Matches());
1146 // try each template
1147 for(size_t i = 0; i < templates.size(); i++) {
1148 ImageIterator* it = range.iterator();
1149 while (it->hasNext()) {
1150 location_scale_t crt = it->next();
1152 Point loc = crt.first;
1153 float scale = crt.second;
1154 Template* tpl = templates[i]->rescale(scale);
1157 if (loc.x-tpl->center.x<0 || loc.x+tpl->size.width/2>=dist_img.cols) continue;
1158 if (loc.y-tpl->center.y<0 || loc.y+tpl->size.height/2>=dist_img.rows) continue;
1160 ChamferMatcher::Match* is = localChamferDistance(loc, dist_img, orientation_img, tpl, _orientation_weight);
1163 pmatches->push_back(*is);
1176 * Run matching using an edge image.
1177 * @param edge_img Edge image
1178 * @return a match object
1180 ChamferMatcher::Matches* ChamferMatcher::Matching::matchEdgeImage(Mat& edge_img, const ImageRange& range,
1181 float _orientation_weight, int /*max_matches*/, float /*min_match_distance*/)
1183 CV_Assert(edge_img.channels()==1);
1187 Mat orientation_img;
1189 annotated_img.create(edge_img.size(), CV_32SC2);
1190 dist_img.create(edge_img.size(),CV_32FC1);
1192 // Computing distance transform
1193 computeDistanceTransform(edge_img,dist_img, annotated_img, truncate_);
1196 //orientation_img = NULL;
1197 if (use_orientation_) {
1198 orientation_img.create(edge_img.size(), CV_32FC1);
1199 orientation_img.setTo(0);
1200 Mat edge_clone = edge_img.clone();
1201 computeEdgeOrientations(edge_clone, orientation_img );
1202 edge_clone.release();
1203 fillNonContourOrientations(annotated_img, orientation_img);
1207 // Template matching
1208 ChamferMatcher::Matches* pmatches = matchTemplates( dist_img,
1211 _orientation_weight);
1214 if (use_orientation_) {
1215 orientation_img.release();
1218 annotated_img.release();
1224 void ChamferMatcher::addMatch(float cost, Point offset, const Template* tpl)
1226 bool new_match = true;
1227 for (int i=0; i<count; ++i) {
1228 if (std::abs(matches[i].offset.x-offset.x)+std::abs(matches[i].offset.y-offset.y)<min_match_distance_) {
1229 // too close, not a new match
1231 // if better cost, replace existing match
1232 if (cost<matches[i].cost) {
1233 matches[i].cost = cost;
1234 matches[i].offset = offset;
1235 matches[i].tpl = tpl;
1237 // re-bubble to keep ordered
1240 if (matches[k-1].cost>matches[k].cost) {
1241 std::swap(matches[k-1],matches[k]);
1251 // if we don't have enough matches yet, add it to the array
1252 if (count<max_matches_) {
1253 matches[count].cost = cost;
1254 matches[count].offset = offset;
1255 matches[count].tpl = tpl;
1258 // otherwise find the right position to insert it
1260 // if higher cost than the worst current match, just ignore it
1261 if (matches[count-1].cost<cost) {
1266 // skip all matches better than current one
1267 while (matches[j].cost<cost) j++;
1269 // shift matches one position
1272 matches[k+1] = matches[k];
1276 matches[j].cost = cost;
1277 matches[j].offset = offset;
1278 matches[j].tpl = tpl;
1283 void ChamferMatcher::showMatch(Mat& img, int index)
1286 std::cout << "Index too big.\n" << std::endl;
1289 CV_Assert(img.channels()==3);
1291 Match match = matches[index];
1293 const template_coords_t& templ_coords = match.tpl->coords;
1295 for (size_t i=0;i<templ_coords.size();++i) {
1296 x = match.offset.x + templ_coords[i].first;
1297 y = match.offset.y + templ_coords[i].second;
1299 if ( x > img.cols-1 || x < 0 || y > img.rows-1 || y < 0) continue;
1300 img.at<Vec3b>(y,x)[0]=0;
1301 img.at<Vec3b>(y,x)[2]=0;
1302 img.at<Vec3b>(y,x)[1]=255;
1306 void ChamferMatcher::showMatch(Mat& img, Match match)
1308 CV_Assert(img.channels()==3);
1310 const template_coords_t& templ_coords = match.tpl->coords;
1311 for (size_t i=0;i<templ_coords.size();++i) {
1312 int x = match.offset.x + templ_coords[i].first;
1313 int y = match.offset.y + templ_coords[i].second;
1314 if ( x > img.cols-1 || x < 0 || y > img.rows-1 || y < 0) continue;
1315 img.at<Vec3b>(y,x)[0]=0;
1316 img.at<Vec3b>(y,x)[2]=0;
1317 img.at<Vec3b>(y,x)[1]=255;
1322 const ChamferMatcher::Matches& ChamferMatcher::matching(Template& tpl, Mat& image_){
1323 chamfer_->addTemplate(tpl);
1326 matches.resize(max_matches_);
1330 Matches* matches_ = chamfer_->matchEdgeImage( image_,
1332 SlidingWindowImageRange(image_.cols,
1341 min_match_distance_);
1345 for(int i = 0; i < (int)matches_->size(); i++){
1346 addMatch(matches_->at(i).cost, matches_->at(i).offset, matches_->at(i).tpl);
1353 matches.resize(count);
1361 int chamerMatching( Mat& img, Mat& templ,
1362 std::vector<std::vector<Point> >& results, std::vector<float>& costs,
1363 double templScale, int maxMatches, double minMatchDistance, int padX,
1364 int padY, int scales, double minScale, double maxScale,
1365 double orientationWeight, double truncate )
1367 CV_Assert(img.type() == CV_8UC1 && templ.type() == CV_8UC1);
1369 ChamferMatcher matcher_(maxMatches, (float)minMatchDistance, padX, padY, scales,
1370 (float)minScale, (float)maxScale,
1371 (float)orientationWeight, (float)truncate);
1373 ChamferMatcher::Template template_(templ, (float)templScale);
1374 ChamferMatcher::Matches match_instances = matcher_.matching(template_, img);
1376 size_t i, nmatches = match_instances.size();
1378 results.resize(nmatches);
1379 costs.resize(nmatches);
1382 double minCost = DBL_MAX;
1384 for( i = 0; i < nmatches; i++ )
1386 const ChamferMatcher::Match& match = match_instances[i];
1387 double cval = match.cost;
1393 costs[i] = (float)cval;
1395 const template_coords_t& templ_coords = match.tpl->coords;
1396 std::vector<Point>& templPoints = results[i];
1397 size_t j, npoints = templ_coords.size();
1398 templPoints.resize(npoints);
1400 for (j = 0; j < npoints; j++ )
1402 int x = match.offset.x + templ_coords[j].first;
1403 int y = match.offset.y + templ_coords[j].second;
1404 templPoints[j] = Point(x,y);