BRISK::operator()( InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints,
OutputArray _descriptors, bool useProvidedKeypoints) const
{
+ bool doOrientation;
+ if (useProvidedKeypoints)
+ doOrientation = false;
+ computeDescriptorsAndOrOrientation(_image, _mask, keypoints, _descriptors, true, doOrientation,
+ useProvidedKeypoints);
+}
+
+void
+BRISK::computeDescriptorsAndOrOrientation(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints,
+ OutputArray _descriptors, bool doDescriptors, bool doOrientation,
+ bool useProvidedKeypoints) const
+{
Mat image = _image.getMat(), mask = _mask.getMat();
if( image.type() != CV_8UC1 )
cvtColor(image, image, CV_BGR2GRAY);
if (!useProvidedKeypoints)
- detectImpl(image, keypoints, mask);
+ {
+ doOrientation = true;
+ computeKeypointsNoOrientation(_image, _mask, keypoints);
+ }
//Remove keypoints very close to the border
size_t ksize = keypoints.size();
int* _values = new int[points_]; // for temporary use
// resize the descriptors:
- _descriptors.create(ksize, strings_, CV_8U);
- cv::Mat descriptors = _descriptors.getMat();
- descriptors.setTo(0);
+ cv::Mat descriptors;
+ if (doDescriptors)
+ {
+ _descriptors.create(ksize, strings_, CV_8U);
+ descriptors = _descriptors.getMat();
+ descriptors.setTo(0);
+ }
// now do the extraction for all keypoints:
uchar* ptr = descriptors.data;
for (size_t k = 0; k < ksize; k++)
{
- int theta;
cv::KeyPoint& kp = keypoints[k];
const int& scale = kscales[k];
- int shifter = 0;
int* pvalues = _values;
const float& x = kp.pt.x;
const float& y = kp.pt.y;
+
+ if (doOrientation)
+ {
+ // get the gray values in the unrotated pattern
+ for (unsigned int i = 0; i < points_; i++)
+ {
+ *(pvalues++) = smoothedIntensity(image, _integral, x, y, scale, 0, i);
+ }
+
+ int direction0 = 0;
+ int direction1 = 0;
+ // now iterate through the long pairings
+ const BriskLongPair* max = longPairs_ + noLongPairs_;
+ for (BriskLongPair* iter = longPairs_; iter < max; ++iter)
+ {
+ t1 = *(_values + iter->i);
+ t2 = *(_values + iter->j);
+ const int delta_t = (t1 - t2);
+ // update the direction:
+ const int tmp0 = delta_t * (iter->weighted_dx) / 1024;
+ const int tmp1 = delta_t * (iter->weighted_dy) / 1024;
+ direction0 += tmp0;
+ direction1 += tmp1;
+ }
+ kp.angle = atan2((float) direction1, (float) direction0) / M_PI * 180.0;
+ if (kp.angle < 0)
+ kp.angle += 360;
+ }
+
+ if (!doDescriptors)
+ continue;
+
+ int theta;
if (kp.angle==-1)
{
// don't compute the gradient direction, just assign a rotation of 0°
// now also extract the stuff for the actual direction:
// let us compute the smoothed values
- shifter = 0;
+ int shifter = 0;
//unsigned int mean=0;
pvalues = _values;
}
void
-BRISK::operator()(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints) const
+BRISK::operator()(InputArray image, InputArray mask, vector<KeyPoint>& keypoints) const
+{
+ computeKeypointsNoOrientation(image, mask, keypoints);
+ computeDescriptorsAndOrOrientation(image, mask, keypoints, cv::noArray(), false, true, true);
+}
+
+void
+BRISK::computeKeypointsNoOrientation(InputArray _image, InputArray _mask, vector<KeyPoint>& keypoints) const
{
Mat image = _image.getMat(), mask = _mask.getMat();
if( image.type() != CV_8UC1 )
// remove invalid points
removeInvalidPoints(mask, keypoints);
-
- // Compute the orientations of the keypoints
- //Remove keypoints very close to the border
- size_t ksize = keypoints.size();
- std::vector<int> kscales; // remember the scale per keypoint
- kscales.resize(ksize);
- static const float log2 = 0.693147180559945;
- static const float lb_scalerange = log(scalerange_) / (log2);
- std::vector<cv::KeyPoint>::iterator beginning = keypoints.begin();
- std::vector<int>::iterator beginningkscales = kscales.begin();
- static const float basicSize06 = basicSize_ * 0.6;
- for (size_t k = 0; k < ksize; k++)
- {
- unsigned int scale;
- scale = std::max((int) (scales_ / lb_scalerange * (log(keypoints[k].size / (basicSize06)) / log2) + 0.5), 0);
- // saturate
- if (scale >= scales_)
- scale = scales_ - 1;
- kscales[k] = scale;
- const int border = sizeList_[scale];
- const int border_x = image.cols - border;
- const int border_y = image.rows - border;
- if (RoiPredicate(border, border, border_x, border_y, keypoints[k]))
- {
- keypoints.erase(beginning + k);
- kscales.erase(beginningkscales + k);
- if (k == 0)
- {
- beginning = keypoints.begin();
- beginningkscales = kscales.begin();
- }
- ksize--;
- k--;
- }
- }
-
- // first, calculate the integral image over the whole image:
- // current integral image
- cv::Mat _integral; // the integral image
- cv::integral(image, _integral);
-
- int* _values = new int[points_]; // for temporary use
-
- // now do the extraction for all keypoints:
-
- // temporary variables containing gray values at sample points:
- int t1;
- int t2;
-
- // the feature orientation
- int direction0;
- int direction1;
-
- for (size_t k = 0; k < ksize; k++)
- {
- cv::KeyPoint& kp = keypoints[k];
- const int& scale = kscales[k];
- int* pvalues = _values;
- const float& x = kp.pt.x;
- const float& y = kp.pt.y;
- // get the gray values in the unrotated pattern
- for (unsigned int i = 0; i < points_; i++)
- {
- *(pvalues++) = smoothedIntensity(image, _integral, x, y, scale, 0, i);
- }
-
- direction0 = 0;
- direction1 = 0;
- // now iterate through the long pairings
- const BriskLongPair* max = longPairs_ + noLongPairs_;
- for (BriskLongPair* iter = longPairs_; iter < max; ++iter)
- {
- t1 = *(_values + iter->i);
- t2 = *(_values + iter->j);
- const int delta_t = (t1 - t2);
- // update the direction:
- const int tmp0 = delta_t * (iter->weighted_dx) / 1024;
- const int tmp1 = delta_t * (iter->weighted_dy) / 1024;
- direction0 += tmp0;
- direction1 += tmp1;
- }
- kp.angle = atan2((float) direction1, (float) direction0) / M_PI * 180.0;
- if (kp.angle < 0)
- kp.angle += 360;
- }
}