Refactor of KAZE and AKAZE:

1) Clean-up from the unused code
2) Remove of SURF extraction method
3) Enabled threading for KAZE extraction
4) Exposed new properties for runtime configuration
pull/2673/head
Ievgen Khvedchenia 11 years ago
parent 220de14077
commit 1a5fcd715d
  1. 25
      modules/features2d/include/opencv2/features2d.hpp
  2. 68
      modules/features2d/src/akaze.cpp
  3. 33
      modules/features2d/src/akaze/AKAZEConfig.h
  4. 285
      modules/features2d/src/akaze/AKAZEFeatures.cpp
  5. 2
      modules/features2d/src/features2d_init.cpp
  6. 19
      modules/features2d/src/kaze.cpp
  7. 135
      modules/features2d/src/kaze/KAZEConfig.h
  8. 926
      modules/features2d/src/kaze/KAZEFeatures.cpp
  9. 123
      modules/features2d/src/kaze/KAZEFeatures.h
  10. 14
      modules/features2d/test/test_keypoints.cpp

@ -893,7 +893,15 @@ KAZE implementation
class CV_EXPORTS_W KAZE : public Feature2D
{
public:
CV_WRAP explicit KAZE(bool _extended = false);
/// AKAZE Descriptor Type
enum DESCRIPTOR_TYPE {
DESCRIPTOR_MSURF = 1,
DESCRIPTOR_GSURF = 2
};
CV_WRAP KAZE();
CV_WRAP explicit KAZE(DESCRIPTOR_TYPE type, bool _extended, bool _upright);
virtual ~KAZE();
@ -917,7 +925,9 @@ protected:
void detectImpl(InputArray image, std::vector<KeyPoint>& keypoints, InputArray mask) const;
void computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const;
CV_PROP int descriptor;
CV_PROP bool extended;
CV_PROP bool upright;
};
/*!
@ -926,7 +936,16 @@ AKAZE implementation
class CV_EXPORTS_W AKAZE : public Feature2D
{
public:
CV_WRAP explicit AKAZE(int _descriptor = 5, int _descriptor_size = 0, int _descriptor_channels = 3);
/// AKAZE Descriptor Type
enum DESCRIPTOR_TYPE {
DESCRIPTOR_KAZE_UPRIGHT = 2, ///< Upright descriptors, not invariant to rotation
DESCRIPTOR_KAZE = 3,
DESCRIPTOR_MLDB_UPRIGHT = 4, ///< Upright descriptors, not invariant to rotation
DESCRIPTOR_MLDB = 5
};
CV_WRAP AKAZE();
CV_WRAP explicit AKAZE(DESCRIPTOR_TYPE _descriptor, int _descriptor_size = 0, int _descriptor_channels = 3);
virtual ~AKAZE();
@ -951,8 +970,8 @@ protected:
void computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const;
void detectImpl(InputArray image, std::vector<KeyPoint>& keypoints, InputArray mask = noArray()) const;
CV_PROP int descriptor_channels;
CV_PROP int descriptor;
CV_PROP int descriptor_channels;
CV_PROP int descriptor_size;
};

@ -53,10 +53,16 @@ http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla13bmvc.pd
namespace cv
{
AKAZE::AKAZE()
: descriptor(DESCRIPTOR_MLDB)
, descriptor_channels(3)
, descriptor_size(0)
{
}
AKAZE::AKAZE(int _descriptor, int _descriptor_size, int _descriptor_channels)
: descriptor_channels(_descriptor_channels)
, descriptor(_descriptor)
AKAZE::AKAZE(DESCRIPTOR_TYPE _descriptor, int _descriptor_size, int _descriptor_channels)
: descriptor(_descriptor)
, descriptor_channels(_descriptor_channels)
, descriptor_size(_descriptor_size)
{
@ -70,12 +76,14 @@ namespace cv
// returns the descriptor size in bytes
int AKAZE::descriptorSize() const
{
if (descriptor < MLDB_UPRIGHT)
switch (descriptor)
{
case cv::AKAZE::DESCRIPTOR_KAZE:
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT:
return 64;
}
else
{
case cv::AKAZE::DESCRIPTOR_MLDB:
case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT:
// We use the full length binary descriptor -> 486 bits
if (descriptor_size == 0)
{
@ -87,32 +95,45 @@ namespace cv
// We use the random bit selection length binary descriptor
return (int)ceil(descriptor_size / 8.);
}
default:
return -1;
}
}
// returns the descriptor type
int AKAZE::descriptorType() const
{
if (descriptor < MLDB_UPRIGHT)
{
return CV_32F;
}
else
switch (descriptor)
{
return CV_8U;
case cv::AKAZE::DESCRIPTOR_KAZE:
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT:
return CV_32F;
case cv::AKAZE::DESCRIPTOR_MLDB:
case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT:
return CV_8U;
default:
return -1;
}
}
// returns the default norm type
int AKAZE::defaultNorm() const
{
if (descriptor < MLDB_UPRIGHT)
switch (descriptor)
{
return NORM_L2;
}
else
{
return NORM_HAMMING;
case cv::AKAZE::DESCRIPTOR_KAZE:
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT:
return cv::NORM_L2;
case cv::AKAZE::DESCRIPTOR_MLDB:
case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT:
return cv::NORM_HAMMING;
default:
return -1;
}
}
@ -132,6 +153,9 @@ namespace cv
cv::Mat& desc = descriptors.getMatRef();
AKAZEOptions options;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols;
options.img_height = img.rows;
@ -164,6 +188,9 @@ namespace cv
img.convertTo(img1_32, CV_32F, 1.0 / 255.0, 0);
AKAZEOptions options;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols;
options.img_height = img.rows;
@ -189,6 +216,9 @@ namespace cv
cv::Mat& desc = descriptors.getMatRef();
AKAZEOptions options;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols;
options.img_height = img.rows;

@ -10,6 +10,7 @@
/* ************************************************************************* */
// OpenCV
#include "precomp.hpp"
#include <opencv2/features2d.hpp>
/* ************************************************************************* */
/// Lookup table for 2d gaussian (sigma = 2.5) where (0,0) is top left and (6,6) is bottom right
@ -23,30 +24,18 @@ const float gauss25[7][7] = {
{ 0.00142946f, 0.00131956f, 0.00103800f, 0.00069579f, 0.00039744f, 0.00019346f, 0.00008024f }
};
/* ************************************************************************* */
/// AKAZE Descriptor Type
enum DESCRIPTOR_TYPE {
SURF_UPRIGHT = 0, ///< Upright descriptors, not invariant to rotation
SURF = 1,
MSURF_UPRIGHT = 2, ///< Upright descriptors, not invariant to rotation
MSURF = 3,
MLDB_UPRIGHT = 4, ///< Upright descriptors, not invariant to rotation
MLDB = 5
};
/* ************************************************************************* */
/// AKAZE Diffusivities
enum DIFFUSIVITY_TYPE {
PM_G1 = 0,
PM_G2 = 1,
WEICKERT = 2,
CHARBONNIER = 3
};
/* ************************************************************************* */
/// AKAZE configuration options structure
struct AKAZEOptions {
/// AKAZE Diffusivities
enum DIFFUSIVITY_TYPE {
PM_G1 = 0,
PM_G2 = 1,
WEICKERT = 2,
CHARBONNIER = 3
};
AKAZEOptions()
: omax(4)
, nsublevels(4)
@ -60,7 +49,7 @@ struct AKAZEOptions {
, dthreshold(0.001f)
, min_dthreshold(0.00001f)
, descriptor(MLDB)
, descriptor(cv::AKAZE::DESCRIPTOR_MLDB)
, descriptor_size(0)
, descriptor_channels(3)
, descriptor_pattern_size(10)
@ -83,7 +72,7 @@ struct AKAZEOptions {
float dthreshold; ///< Detector response threshold to accept point
float min_dthreshold; ///< Minimum detector threshold to accept a point
DESCRIPTOR_TYPE descriptor; ///< Type of descriptor
cv::AKAZE::DESCRIPTOR_TYPE descriptor; ///< Type of descriptor
int descriptor_size; ///< Size of the descriptor in bits. 0->Full size
int descriptor_channels; ///< Number of channels in the descriptor (1, 2, 3)
int descriptor_pattern_size; ///< Actual patch size is 2*pattern_size*point.scale

@ -25,7 +25,8 @@ AKAZEFeatures::AKAZEFeatures(const AKAZEOptions& options) : options_(options) {
ncycles_ = 0;
reordering_ = true;
if (options_.descriptor_size > 0 && options_.descriptor >= MLDB_UPRIGHT) {
if (options_.descriptor_size > 0 && options_.descriptor >= cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT)
{
generateDescriptorSubsample(descriptorSamples_, descriptorBits_, options_.descriptor_size,
options_.descriptor_pattern_size, options_.descriptor_channels);
}
@ -124,16 +125,16 @@ int AKAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat& img)
// Compute the conductivity equation
switch (options_.diffusivity) {
case PM_G1:
case AKAZEOptions::PM_G1:
pm_g1(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break;
case PM_G2:
case AKAZEOptions::PM_G2:
pm_g2(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break;
case WEICKERT:
case AKAZEOptions::WEICKERT:
weickert_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break;
case CHARBONNIER:
case AKAZEOptions::CHARBONNIER:
charbonnier_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break;
default:
@ -170,8 +171,8 @@ class MultiscaleDerivativesInvoker : public cv::ParallelLoopBody
{
public:
explicit MultiscaleDerivativesInvoker(std::vector<TEvolution>& ev, const AKAZEOptions& opt)
: evolution_(&ev)
, options_(opt)
: evolution_(&ev)
, options_(opt)
{
}
@ -210,7 +211,7 @@ private:
void AKAZEFeatures::Compute_Multiscale_Derivatives(void)
{
cv::parallel_for_(cv::Range(0, (int)evolution_.size()),
MultiscaleDerivativesInvoker(evolution_, options_));
MultiscaleDerivativesInvoker(evolution_, options_));
}
/* ************************************************************************* */
@ -255,11 +256,10 @@ void AKAZEFeatures::Find_Scale_Space_Extrema(std::vector<cv::KeyPoint>& kpts)
vector<cv::KeyPoint> kpts_aux;
// Set maximum size
if (options_.descriptor == SURF_UPRIGHT || options_.descriptor == SURF ||
options_.descriptor == MLDB_UPRIGHT || options_.descriptor == MLDB) {
if (options_.descriptor == cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT || options_.descriptor == cv::AKAZE::DESCRIPTOR_MLDB) {
smax = 10.0f*sqrtf(2.0f);
}
else if (options_.descriptor == MSURF_UPRIGHT || options_.descriptor == MSURF) {
else if (options_.descriptor == cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT || options_.descriptor == cv::AKAZE::DESCRIPTOR_KAZE) {
smax = 12.0f*sqrtf(2.0f);
}
@ -574,15 +574,15 @@ class Upright_MLDB_Descriptor_Subset_Invoker : public cv::ParallelLoopBody
{
public:
Upright_MLDB_Descriptor_Subset_Invoker(std::vector<cv::KeyPoint>& kpts,
cv::Mat& desc,
std::vector<TEvolution>& evolution,
AKAZEOptions& options,
cv::Mat descriptorSamples,
cv::Mat descriptorBits)
: keypoints_(&kpts)
, descriptors_(&desc)
, evolution_(&evolution)
, options_(&options)
cv::Mat& desc,
std::vector<TEvolution>& evolution,
AKAZEOptions& options,
cv::Mat descriptorSamples,
cv::Mat descriptorBits)
: keypoints_(&kpts)
, descriptors_(&desc)
, evolution_(&evolution)
, options_(&options)
, descriptorSamples_(descriptorSamples)
, descriptorBits_(descriptorBits)
{
@ -641,15 +641,15 @@ class MLDB_Descriptor_Subset_Invoker : public cv::ParallelLoopBody
{
public:
MLDB_Descriptor_Subset_Invoker(std::vector<cv::KeyPoint>& kpts,
cv::Mat& desc,
std::vector<TEvolution>& evolution,
AKAZEOptions& options,
cv::Mat descriptorSamples,
cv::Mat descriptorBits)
: keypoints_(&kpts)
, descriptors_(&desc)
, evolution_(&evolution)
, options_(&options)
cv::Mat& desc,
std::vector<TEvolution>& evolution,
AKAZEOptions& options,
cv::Mat descriptorSamples,
cv::Mat descriptorBits)
: keypoints_(&kpts)
, descriptors_(&desc)
, evolution_(&evolution)
, options_(&options)
, descriptorSamples_(descriptorSamples)
, descriptorBits_(descriptorBits)
{
@ -684,7 +684,7 @@ private:
void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc)
{
// Allocate memory for the matrix with the descriptors
if (options_.descriptor < MLDB_UPRIGHT) {
if (options_.descriptor < cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT) {
desc = cv::Mat::zeros((int)kpts.size(), 64, CV_32FC1);
}
else {
@ -699,29 +699,19 @@ void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat
}
}
switch (options_.descriptor) {
case SURF_UPRIGHT: // Upright descriptors, not invariant to rotation
switch (options_.descriptor)
{
cv::parallel_for_(cv::Range(0, (int)kpts.size()), SURF_Descriptor_Upright_64_Invoker(kpts, desc, evolution_));
}
break;
case SURF:
{
cv::parallel_for_(cv::Range(0, (int)kpts.size()), SURF_Descriptor_64_Invoker(kpts, desc, evolution_));
}
break;
case MSURF_UPRIGHT: // Upright descriptors, not invariant to rotation
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT: // Upright descriptors, not invariant to rotation
{
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Upright_Descriptor_64_Invoker(kpts, desc, evolution_));
}
break;
case MSURF:
case cv::AKAZE::DESCRIPTOR_KAZE:
{
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Descriptor_64_Invoker(kpts, desc, evolution_));
}
break;
case MLDB_UPRIGHT: // Upright descriptors, not invariant to rotation
case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT: // Upright descriptors, not invariant to rotation
{
if (options_.descriptor_size == 0)
cv::parallel_for_(cv::Range(0, (int)kpts.size()), Upright_MLDB_Full_Descriptor_Invoker(kpts, desc, evolution_, options_));
@ -729,7 +719,7 @@ void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat
cv::parallel_for_(cv::Range(0, (int)kpts.size()), Upright_MLDB_Descriptor_Subset_Invoker(kpts, desc, evolution_, options_, descriptorSamples_, descriptorBits_));
}
break;
case MLDB:
case cv::AKAZE::DESCRIPTOR_MLDB:
{
if (options_.descriptor_size == 0)
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MLDB_Full_Descriptor_Invoker(kpts, desc, evolution_, options_));
@ -783,7 +773,7 @@ void AKAZEFeatures::Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vecto
// Loop slides pi/3 window around feature point
for (ang1 = 0; ang1 < (float)(2.0 * CV_PI); ang1 += 0.15f) {
ang2 = (ang1 + (float)(CV_PI / 3.0) > (float)(2.0*CV_PI) ? ang1 - (float)(5.0*CV_PI / 3.0) : ang1 + (float)(CV_PI / 3.0));
ang2 = (ang1 + (float)(CV_PI / 3.0) >(float)(2.0*CV_PI) ? ang1 - (float)(5.0*CV_PI / 3.0) : ang1 + (float)(CV_PI / 3.0));
sumX = sumY = 0.f;
for (size_t k = 0; k < Ang.size(); ++k) {
@ -812,195 +802,6 @@ void AKAZEFeatures::Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vecto
}
}
/* ************************************************************************* */
/**
* @brief This method computes the upright descriptor of the provided keypoint
* @param kpt Input keypoint
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 64. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void SURF_Descriptor_Upright_64_Invoker::Get_SURF_Descriptor_Upright_64(const cv::KeyPoint& kpt, float *desc) const {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
float sample_x = 0.0, sample_y = 0.0;
float fx = 0.0, fy = 0.0, ratio = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int scale = 0, dsize = 0, level = 0;
const std::vector<TEvolution>& evolution = *evolution_;
// Set the descriptor size and the sample and pattern sizes
dsize = 64;
sample_step = 5;
pattern_size = 10;
// Get the information from the keypoint
ratio = (float)(1 << kpt.octave);
scale = fRound(0.5f*kpt.size / ratio);
level = kpt.class_id;
yf = kpt.pt.y / ratio;
xf = kpt.pt.x / ratio;
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dx = dy = mdx = mdy = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
// Get the coordinates of the sample point on the rotated axis
sample_y = yf + l*scale;
sample_x = xf + k*scale;
y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f);
y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Sum the derivatives to the cumulative descriptor
dx += rx;
dy += ry;
mdx += fabs(rx);
mdy += fabs(ry);
}
}
// Add the values to the descriptor vector
desc[dcount++] = dx;
desc[dcount++] = dy;
desc[dcount++] = mdx;
desc[dcount++] = mdy;
// Store the current length^2 of the vector
len += dx*dx + dy*dy + mdx*mdx + mdy*mdy;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
}
/* ************************************************************************* */
/**
* @brief This method computes the descriptor of the provided keypoint given the
* main orientation
* @param kpt Input keypoint
* @param desc Descriptor vector
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 64. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void SURF_Descriptor_64_Invoker::Get_SURF_Descriptor_64(const cv::KeyPoint& kpt, float *desc) const {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0;
float fx = 0.0, fy = 0.0, ratio = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int scale = 0, dsize = 0, level = 0;
// Set the descriptor size and the sample and pattern sizes
dsize = 64;
sample_step = 5;
pattern_size = 10;
const std::vector<TEvolution>& evolution = *evolution_;
// Get the information from the keypoint
ratio = (float)(1 << kpt.octave);
scale = fRound(0.5f*kpt.size / ratio);
angle = kpt.angle;
level = kpt.class_id;
yf = kpt.pt.y / ratio;
xf = kpt.pt.x / ratio;
co = cos(angle);
si = sin(angle);
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dx = dy = mdx = mdy = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
// Get the coordinates of the sample point on the rotated axis
sample_y = yf + (l*scale*co + k*scale*si);
sample_x = xf + (-l*scale*si + k*scale*co);
y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f);
y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Get the x and y derivatives on the rotated axis
rry = rx*co + ry*si;
rrx = -rx*si + ry*co;
// Sum the derivatives to the cumulative descriptor
dx += rrx;
dy += rry;
mdx += fabs(rrx);
mdy += fabs(rry);
}
}
// Add the values to the descriptor vector
desc[dcount++] = dx;
desc[dcount++] = dy;
desc[dcount++] = mdx;
desc[dcount++] = mdy;
// Store the current length^2 of the vector
len += dx*dx + dy*dy + mdx*mdx + mdy*mdy;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
}
/* ************************************************************************* */
/**
* @brief This method computes the upright descriptor (not rotation invariant) of
@ -1271,8 +1072,8 @@ void Upright_MLDB_Full_Descriptor_Invoker::Get_Upright_MLDB_Full_Descriptor(cons
const std::vector<TEvolution>& evolution = *evolution_;
// Matrices for the M-LDB descriptor
cv::Mat values_1 = cv::Mat::zeros(4, options.descriptor_channels, CV_32FC1);
cv::Mat values_2 = cv::Mat::zeros(9, options.descriptor_channels, CV_32FC1);
cv::Mat values_1 = cv::Mat::zeros(4, options.descriptor_channels, CV_32FC1);
cv::Mat values_2 = cv::Mat::zeros(9, options.descriptor_channels, CV_32FC1);
cv::Mat values_3 = cv::Mat::zeros(16, options.descriptor_channels, CV_32FC1);
// Get the information from the keypoint
@ -1484,12 +1285,12 @@ void MLDB_Full_Descriptor_Invoker::Get_MLDB_Full_Descriptor(const cv::KeyPoint&
int level = 0, nsamples = 0, scale = 0;
int dcount1 = 0, dcount2 = 0;
const AKAZEOptions & options = *options_;
const AKAZEOptions & options = *options_;
const std::vector<TEvolution>& evolution = *evolution_;
// Matrices for the M-LDB descriptor
cv::Mat values_1 = cv::Mat::zeros(4, options.descriptor_channels, CV_32FC1);
cv::Mat values_2 = cv::Mat::zeros(9, options.descriptor_channels, CV_32FC1);
cv::Mat values_1 = cv::Mat::zeros(4, options.descriptor_channels, CV_32FC1);
cv::Mat values_2 = cv::Mat::zeros(9, options.descriptor_channels, CV_32FC1);
cv::Mat values_3 = cv::Mat::zeros(16, options.descriptor_channels, CV_32FC1);
// Get the information from the keypoint
@ -2077,11 +1878,11 @@ inline float get_angle(float x, float y) {
}
if (x < 0 && y >= 0) {
return static_cast<float>(CV_PI) - atanf(-y / x);
return static_cast<float>(CV_PI)-atanf(-y / x);
}
if (x < 0 && y < 0) {
return static_cast<float>(CV_PI) + atanf(y / x);
return static_cast<float>(CV_PI)+atanf(y / x);
}
if (x >= 0 && y < 0) {

@ -126,6 +126,8 @@ CV_INIT_ALGORITHM(GFTTDetector, "Feature2D.GFTT",
///////////////////////////////////////////////////////////////////////////////////////////////////////////
CV_INIT_ALGORITHM(KAZE, "Feature2D.KAZE",
obj.info()->addParam(obj, "descriptor", obj.descriptor);
obj.info()->addParam(obj, "upright", obj.upright);
obj.info()->addParam(obj, "extended", obj.extended))
///////////////////////////////////////////////////////////////////////////////////////////////////////////

@ -52,11 +52,20 @@ http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla12eccv.pd
namespace cv
{
KAZE::KAZE(bool _extended /* = false */)
: extended(_extended)
KAZE::KAZE()
: descriptor(DESCRIPTOR_MSURF)
, extended(false)
, upright(false)
{
}
KAZE::KAZE(DESCRIPTOR_TYPE type, bool _extended, bool _upright)
: descriptor(type)
, extended(_extended)
, upright(_upright)
{
}
KAZE::~KAZE()
{
@ -102,7 +111,9 @@ namespace cv
KAZEOptions options;
options.img_width = img.cols;
options.img_height = img.rows;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.extended = extended;
options.upright = upright;
KAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32);
@ -135,7 +146,9 @@ namespace cv
KAZEOptions options;
options.img_width = img.cols;
options.img_height = img.rows;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.extended = extended;
options.upright = upright;
KAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32);
@ -161,7 +174,9 @@ namespace cv
KAZEOptions options;
options.img_width = img.cols;
options.img_height = img.rows;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.extended = extended;
options.upright = upright;
KAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32);

@ -5,92 +5,81 @@
* @author Pablo F. Alcantarilla
*/
#ifndef __OPENCV_FEATURES_2D_KAZE_CONFIG_HPP__
#define __OPENCV_FEATURES_2D_KAZE_CONFIG_HPP__
//******************************************************************************
//******************************************************************************
#pragma once
// OpenCV Includes
#include "precomp.hpp"
#include <opencv2/features2d.hpp>
//*************************************************************************************
//*************************************************************************************
// Some defines
#define NMAX_CHAR 400
struct KAZEOptions {
// Some default options
static const float DEFAULT_SCALE_OFFSET = 1.60f; // Base scale offset (sigma units)
static const float DEFAULT_OCTAVE_MAX = 4.0f; // Maximum octave evolution of the image 2^sigma (coarsest scale sigma units)
static const int DEFAULT_NSUBLEVELS = 4; // Default number of sublevels per scale level
static const float DEFAULT_DETECTOR_THRESHOLD = 0.001f; // Detector response threshold to accept point
static const float DEFAULT_MIN_DETECTOR_THRESHOLD = 0.00001f; // Minimum Detector response threshold to accept point
static const int DEFAULT_DESCRIPTOR_MODE = 1; // Descriptor Mode 0->SURF, 1->M-SURF
static const bool DEFAULT_USE_FED = true; // 0->AOS, 1->FED
static const bool DEFAULT_UPRIGHT = false; // Upright descriptors, not invariant to rotation
static const bool DEFAULT_EXTENDED = false; // Extended descriptor, dimension 128
enum DIFFUSIVITY_TYPE {
PM_G1 = 0,
PM_G2 = 1,
WEICKERT = 2
};
// Some important configuration variables
static const float DEFAULT_SIGMA_SMOOTHING_DERIVATIVES = 1.0f;
static const float DEFAULT_KCONTRAST = 0.01f;
static const float KCONTRAST_PERCENTILE = 0.7f;
static const int KCONTRAST_NBINS = 300;
static const bool COMPUTE_KCONTRAST = true;
static const int DEFAULT_DIFFUSIVITY_TYPE = 1; // 0 -> PM G1, 1 -> PM G2, 2 -> Weickert
static const bool USE_CLIPPING_NORMALIZATION = false;
static const float CLIPPING_NORMALIZATION_RATIO = 1.6f;
static const int CLIPPING_NORMALIZATION_NITER = 5;
KAZEOptions()
: descriptor(cv::KAZE::DESCRIPTOR_MSURF)
, diffusivity(PM_G2)
//*************************************************************************************
//*************************************************************************************
, soffset(1.60f)
, omax(4)
, nsublevels(4)
, img_width(0)
, img_height(0)
, sderivatives(1.0f)
, dthreshold(0.001f)
, kcontrast(0.01f)
, kcontrast_percentille(0.7f)
, kcontrast_bins(300)
struct KAZEOptions {
, use_fed(true)
, upright(false)
, extended(false)
, use_clipping_normalilzation(false)
, clipping_normalization_ratio(1.6f)
, clipping_normalization_niter(5)
{
}
KAZEOptions() {
// Load the default options
soffset = DEFAULT_SCALE_OFFSET;
omax = static_cast<int>(DEFAULT_OCTAVE_MAX);
nsublevels = DEFAULT_NSUBLEVELS;
dthreshold = DEFAULT_DETECTOR_THRESHOLD;
use_fed = DEFAULT_USE_FED;
upright = DEFAULT_UPRIGHT;
extended = DEFAULT_EXTENDED;
descriptor = DEFAULT_DESCRIPTOR_MODE;
diffusivity = DEFAULT_DIFFUSIVITY_TYPE;
sderivatives = DEFAULT_SIGMA_SMOOTHING_DERIVATIVES;
}
cv::KAZE::DESCRIPTOR_TYPE descriptor;
DIFFUSIVITY_TYPE diffusivity;
float soffset;
int omax;
int nsublevels;
int img_width;
int img_height;
int diffusivity;
float sderivatives;
float dthreshold;
bool use_fed;
bool upright;
bool extended;
int descriptor;
float soffset;
int omax;
int nsublevels;
int img_width;
int img_height;
float sderivatives;
float dthreshold;
float kcontrast;
float kcontrast_percentille;
int kcontrast_bins;
bool use_fed;
bool upright;
bool extended;
bool use_clipping_normalilzation;
float clipping_normalization_ratio;
int clipping_normalization_niter;
};
struct TEvolution {
cv::Mat Lx, Ly; // First order spatial derivatives
cv::Mat Lxx, Lxy, Lyy; // Second order spatial derivatives
cv::Mat Lflow; // Diffusivity image
cv::Mat Lt; // Evolution image
cv::Mat Lsmooth; // Smoothed image
cv::Mat Lstep; // Evolution step update
cv::Mat Ldet; // Detector response
float etime; // Evolution time
float esigma; // Evolution sigma. For linear diffusion t = sigma^2 / 2
float octave; // Image octave
float sublevel; // Image sublevel in each octave
int sigma_size; // Integer esigma. For computing the feature detector responses
cv::Mat Lx, Ly; // First order spatial derivatives
cv::Mat Lxx, Lxy, Lyy; // Second order spatial derivatives
cv::Mat Lflow; // Diffusivity image
cv::Mat Lt; // Evolution image
cv::Mat Lsmooth; // Smoothed image
cv::Mat Lstep; // Evolution step update
cv::Mat Ldet; // Detector response
float etime; // Evolution time
float esigma; // Evolution sigma. For linear diffusion t = sigma^2 / 2
float octave; // Image octave
float sublevel; // Image sublevel in each octave
int sigma_size; // Integer esigma. For computing the feature detector responses
};
//*************************************************************************************
//*************************************************************************************
#endif

File diff suppressed because it is too large Load Diff

@ -26,97 +26,52 @@ class KAZEFeatures {
private:
// Parameters of the Nonlinear diffusion class
float soffset_; // Base scale offset
float sderivatives_; // Standard deviation of the Gaussian for the nonlinear diff. derivatives
int omax_; // Maximum octave level
int nsublevels_; // Number of sublevels per octave level
int img_width_; // Width of the original image
int img_height_; // Height of the original image
std::vector<TEvolution> evolution_; // Vector of nonlinear diffusion evolution
float kcontrast_; // The contrast parameter for the scalar nonlinear diffusion
float dthreshold_; // Feature detector threshold response
int diffusivity_; // Diffusivity type, 0->PM G1, 1->PM G2, 2-> Weickert
int descriptor_mode_; // Descriptor mode
bool use_fed_; // Set to true in case we want to use FED for the nonlinear diffusion filtering. Set false for using AOS
bool use_upright_; // Set to true in case we want to use the upright version of the descriptors
bool use_extended_; // Set to true in case we want to use the extended version of the descriptors
bool use_normalization;
// Vector of keypoint vectors for finding extrema in multiple threads
std::vector<std::vector<cv::KeyPoint> > kpts_par_;
// FED parameters
int ncycles_; // Number of cycles
bool reordering_; // Flag for reordering time steps
std::vector<std::vector<float > > tsteps_; // Vector of FED dynamic time steps
std::vector<int> nsteps_; // Vector of number of steps per cycle
// Computation times variables in ms
//double tkcontrast_; // Kcontrast factor computation
//double tnlscale_; // Nonlinear Scale space generation
//double tdetector_; // Feature detector
//double tmderivatives_; // Multiscale derivatives computation
//double tdresponse_; // Detector response computation
//double tdescriptor_; // Feature descriptor
//double tsubpixel_; // Subpixel refinement
// Some auxiliary variables used in the AOS step
cv::Mat Ltx_, Lty_, px_, py_, ax_, ay_, bx_, by_, qr_, qc_;
KAZEOptions options;
// Parameters of the Nonlinear diffusion class
std::vector<TEvolution> evolution_; // Vector of nonlinear diffusion evolution
// Vector of keypoint vectors for finding extrema in multiple threads
std::vector<std::vector<cv::KeyPoint> > kpts_par_;
// FED parameters
int ncycles_; // Number of cycles
bool reordering_; // Flag for reordering time steps
std::vector<std::vector<float > > tsteps_; // Vector of FED dynamic time steps
std::vector<int> nsteps_; // Vector of number of steps per cycle
// Some auxiliary variables used in the AOS step
cv::Mat Ltx_, Lty_, px_, py_, ax_, ay_, bx_, by_, qr_, qc_;
public:
// Constructor
KAZEFeatures(KAZEOptions& options);
// Constructor
KAZEFeatures(KAZEOptions& options);
// Public methods for KAZE interface
void Allocate_Memory_Evolution(void);
int Create_Nonlinear_Scale_Space(const cv::Mat& img);
void Feature_Detection(std::vector<cv::KeyPoint>& kpts);
void Feature_Description(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc);
// Public methods for KAZE interface
void Allocate_Memory_Evolution(void);
int Create_Nonlinear_Scale_Space(const cv::Mat& img);
void Feature_Detection(std::vector<cv::KeyPoint>& kpts);
void Feature_Description(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc);
static void Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vector<TEvolution>& evolution_, const KAZEOptions& options);
private:
// Feature Detection Methods
void Compute_KContrast(const cv::Mat& img, const float& kper);
void Compute_Multiscale_Derivatives(void);
void Compute_Detector_Response(void);
void Determinant_Hessian_Parallel(std::vector<cv::KeyPoint>& kpts);
void Find_Extremum_Threading(const int& level);
void Do_Subpixel_Refinement(std::vector<cv::KeyPoint>& kpts);
// AOS Methods
void AOS_Step_Scalar(cv::Mat &Ld, const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void AOS_Rows(const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void AOS_Columns(const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void Thomas(const cv::Mat &a, const cv::Mat &b, const cv::Mat &Ld, cv::Mat &x);
// Feature Description methods
void Compute_Main_Orientation_SURF(cv::KeyPoint& kpt);
// Descriptor Mode -> 0 SURF 64
void Get_SURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
void Get_SURF_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
// Descriptor Mode -> 0 SURF 128
void Get_SURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
void Get_SURF_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
// Descriptor Mode -> 1 M-SURF 64
void Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
void Get_MSURF_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
// Descriptor Mode -> 1 M-SURF 128
void Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
void Get_MSURF_Descriptor_128(const cv::KeyPoint& kpt, float *desc);
// Descriptor Mode -> 2 G-SURF 64
void Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
void Get_GSURF_Descriptor_64(const cv::KeyPoint& kpt, float *desc);
// Descriptor Mode -> 2 G-SURF 128
void Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
void Get_GSURF_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
// Feature Detection Methods
void Compute_KContrast(const cv::Mat& img, const float& kper);
void Compute_Multiscale_Derivatives(void);
void Compute_Detector_Response(void);
void Determinant_Hessian_Parallel(std::vector<cv::KeyPoint>& kpts);
void Find_Extremum_Threading(const int& level);
void Do_Subpixel_Refinement(std::vector<cv::KeyPoint>& kpts);
// AOS Methods
void AOS_Step_Scalar(cv::Mat &Ld, const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void AOS_Rows(const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void AOS_Columns(const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void Thomas(const cv::Mat &a, const cv::Mat &b, const cv::Mat &Ld, cv::Mat &x);
};
//*************************************************************************************

@ -169,12 +169,18 @@ TEST(Features2d_Detector_Keypoints_Dense, validation)
TEST(Features2d_Detector_Keypoints_KAZE, validation)
{
CV_FeatureDetectorKeypointsTest test(Algorithm::create<FeatureDetector>("Feature2D.KAZE"));
test.safe_run();
CV_FeatureDetectorKeypointsTest test_gsurf(cv::Ptr<FeatureDetector>(new cv::KAZE(cv::KAZE::DESCRIPTOR_GSURF, false, false)));
test_gsurf.safe_run();
CV_FeatureDetectorKeypointsTest test_msurf(cv::Ptr<FeatureDetector>(new cv::KAZE(cv::KAZE::DESCRIPTOR_MSURF, false, false)));
test_msurf.safe_run();
}
TEST(Features2d_Detector_Keypoints_AKAZE, validation)
{
CV_FeatureDetectorKeypointsTest test(Algorithm::create<FeatureDetector>("Feature2D.AKAZE"));
test.safe_run();
CV_FeatureDetectorKeypointsTest test_kaze(cv::Ptr<FeatureDetector>(new cv::AKAZE(cv::AKAZE::DESCRIPTOR_KAZE)));
test_kaze.safe_run();
CV_FeatureDetectorKeypointsTest test_mldb(cv::Ptr<FeatureDetector>(new cv::AKAZE(cv::AKAZE::DESCRIPTOR_MLDB)));
test_mldb.safe_run();
}

Loading…
Cancel
Save