FlannBasedMatcher(LshIndex) in the feature2d optimization for continuance additional train()

Current implementation of miniflann is releasing the trained index, and
rebuilding the index from the beginning.
But, some indexing algorithms like the LSH are able to add the indexing
data after that.
This branch is implementation of that optimization for LshIndex
FlannBasedMatcher in the feature2d.
pull/3829/head
ippei ito 10 years ago
parent 29e7eb7719
commit cd42e38013
  1. 14
      modules/features2d/src/matchers.cpp
  2. 7
      modules/flann/include/opencv2/flann/autotuned_index.h
  3. 7
      modules/flann/include/opencv2/flann/composite_index.h
  4. 10
      modules/flann/include/opencv2/flann/flann_base.hpp
  5. 8
      modules/flann/include/opencv2/flann/hierarchical_clustering_index.h
  6. 7
      modules/flann/include/opencv2/flann/kdtree_index.h
  7. 7
      modules/flann/include/opencv2/flann/kdtree_single_index.h
  8. 7
      modules/flann/include/opencv2/flann/kmeans_index.h
  9. 7
      modules/flann/include/opencv2/flann/linear_index.h
  10. 18
      modules/flann/include/opencv2/flann/lsh_index.h
  11. 4
      modules/flann/include/opencv2/flann/lsh_table.h
  12. 3
      modules/flann/include/opencv2/flann/miniflann.hpp
  13. 5
      modules/flann/include/opencv2/flann/nn_index.h
  14. 49
      modules/flann/src/miniflann.cpp

@ -531,10 +531,20 @@ void FlannBasedMatcher::clear()
void FlannBasedMatcher::train() void FlannBasedMatcher::train()
{ {
if( flannIndex.empty() || mergedDescriptors.size() < addedDescCount ) int trained = mergedDescriptors.size();
if (flannIndex.empty() || trained < addedDescCount)
{ {
mergedDescriptors.set( trainDescCollection ); mergedDescriptors.set( trainDescCollection );
flannIndex = new flann::Index( mergedDescriptors.getDescriptors(), *indexParams );
// construct flannIndex class, if empty or Algorithm not equal FLANN_INDEX_LSH
if (flannIndex.empty() || flannIndex->getAlgorithm() != cvflann::FLANN_INDEX_LSH)
{
flannIndex = new flann::Index(mergedDescriptors.getDescriptors(), *indexParams);
}
else
{
flannIndex->build(mergedDescriptors.getDescriptors(), mergedDescriptors.getDescriptors().rowRange(trained, mergedDescriptors.size()), *indexParams, cvflann::FLANN_DIST_HAMMING);
}
} }
} }

@ -94,6 +94,13 @@ public:
} }
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
/** /**
* Method responsible with building the index. * Method responsible with building the index.
*/ */

@ -130,6 +130,13 @@ public:
return kmeans_index_->usedMemory() + kdtree_index_->usedMemory(); return kmeans_index_->usedMemory() + kdtree_index_->usedMemory();
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
/** /**
* \brief Builds the index * \brief Builds the index
*/ */

@ -124,6 +124,16 @@ public:
delete nnIndex_; delete nnIndex_;
} }
/**
* implementation for algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
if (!loaded_) {
nnIndex_->addIndex(wholeData, additionalData);
}
}
/** /**
* Builds the index. * Builds the index.
*/ */

@ -378,6 +378,14 @@ public:
return pool.usedMemory+pool.wastedMemory+memoryCounter; return pool.usedMemory+pool.wastedMemory+memoryCounter;
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
/** /**
* Builds the index * Builds the index
*/ */

@ -117,6 +117,13 @@ public:
delete[] var_; delete[] var_;
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
/** /**
* Builds the index * Builds the index
*/ */

@ -110,6 +110,13 @@ public:
if (reorder_) delete[] data_.data; if (reorder_) delete[] data_.data;
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
/** /**
* Builds the index * Builds the index
*/ */

@ -362,6 +362,13 @@ public:
return pool_.usedMemory+pool_.wastedMemory+memoryCounter_; return pool_.usedMemory+pool_.wastedMemory+memoryCounter_;
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
/** /**
* Builds the index * Builds the index
*/ */

@ -85,6 +85,13 @@ public:
return 0; return 0;
} }
/**
* Dummy implementation for other algorithms of addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
}
void buildIndex() void buildIndex()
{ {
/* nothing to do here for linear search */ /* nothing to do here for linear search */

@ -104,6 +104,20 @@ public:
LshIndex(const LshIndex&); LshIndex(const LshIndex&);
LshIndex& operator=(const LshIndex&); LshIndex& operator=(const LshIndex&);
/**
* Implementation for the LSH addable indexes after that.
*/
void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData)
{
tables_.resize(table_number_);
for (unsigned int i = 0; i < table_number_; ++i) {
lsh::LshTable<ElementType>& table = tables_[i];
// Add the features to the table with indexed offset
table.add(wholeData.rows - additionalData.rows, additionalData);
}
dataset_ = wholeData;
}
/** /**
* Builds the index * Builds the index
*/ */
@ -126,8 +140,8 @@ public:
lsh::LshTable<ElementType>& table = tables_[i]; lsh::LshTable<ElementType>& table = tables_[i];
table = lsh::LshTable<ElementType>(feature_size_, key_size_, indices); table = lsh::LshTable<ElementType>(feature_size_, key_size_, indices);
// Add the features to the table // Add the features to the table with offset 0
table.add(dataset_); table.add(0, dataset_);
} }
} }

@ -194,13 +194,13 @@ public:
/** Add a set of features to the table /** Add a set of features to the table
* @param dataset the values to store * @param dataset the values to store
*/ */
void add(Matrix<ElementType> dataset) void add(int indexed_ofst, Matrix<ElementType> dataset)
{ {
#if USE_UNORDERED_MAP #if USE_UNORDERED_MAP
buckets_space_.rehash((buckets_space_.size() + dataset.rows) * 1.2); buckets_space_.rehash((buckets_space_.size() + dataset.rows) * 1.2);
#endif #endif
// Add the features to the table // Add the features to the table
for (unsigned int i = 0; i < dataset.rows; ++i) add(i, dataset[i]); for (unsigned int i = 0; i < dataset.rows; ++i) add(i + indexed_ofst, dataset[i]);
// Now that the table is full, optimize it for speed/space // Now that the table is full, optimize it for speed/space
optimize(); optimize();
} }

@ -134,7 +134,8 @@ public:
CV_WRAP Index(InputArray features, const IndexParams& params, cvflann::flann_distance_t distType=cvflann::FLANN_DIST_L2); CV_WRAP Index(InputArray features, const IndexParams& params, cvflann::flann_distance_t distType=cvflann::FLANN_DIST_L2);
virtual ~Index(); virtual ~Index();
CV_WRAP virtual void build(InputArray features, const IndexParams& params, cvflann::flann_distance_t distType=cvflann::FLANN_DIST_L2); CV_WRAP virtual void build(InputArray wholefeatures, InputArray additionalfeatures, const IndexParams& params, cvflann::flann_distance_t distType=cvflann::FLANN_DIST_L2);
CV_WRAP virtual void knnSearch(InputArray query, OutputArray indices, CV_WRAP virtual void knnSearch(InputArray query, OutputArray indices,
OutputArray dists, int knn, const SearchParams& params=SearchParams()); OutputArray dists, int knn, const SearchParams& params=SearchParams());

@ -59,6 +59,11 @@ public:
*/ */
virtual void buildIndex() = 0; virtual void buildIndex() = 0;
/**
* \brief implementation for algorithms of addable indexes after that.
*/
virtual void addIndex(const Matrix<ElementType>& wholeData, const Matrix<ElementType>& additionalData) = 0;
/** /**
* \brief Perform k-nearest neighbor search * \brief Perform k-nearest neighbor search
* \param[in] queries The query points for which to find the nearest neighbors * \param[in] queries The query points for which to find the nearest neighbors

@ -308,7 +308,7 @@ SearchParams::SearchParams( int checks, float eps, bool sorted )
template<typename Distance, typename IndexType> void template<typename Distance, typename IndexType> void
buildIndex_(void*& index, const Mat& data, const IndexParams& params, const Distance& dist = Distance()) buildIndex_(void*& index, const Mat& wholedata, const Mat& data, const IndexParams& params, const Distance& dist = Distance())
{ {
typedef typename Distance::ElementType ElementType; typedef typename Distance::ElementType ElementType;
if(DataType<ElementType>::type != data.type()) if(DataType<ElementType>::type != data.type())
@ -317,15 +317,25 @@ buildIndex_(void*& index, const Mat& data, const IndexParams& params, const Dist
CV_Error(CV_StsBadArg, "Only continuous arrays are supported"); CV_Error(CV_StsBadArg, "Only continuous arrays are supported");
::cvflann::Matrix<ElementType> dataset((ElementType*)data.data, data.rows, data.cols); ::cvflann::Matrix<ElementType> dataset((ElementType*)data.data, data.rows, data.cols);
IndexType* _index = new IndexType(dataset, get_params(params), dist);
IndexType* _index = NULL;
if( !index || getParam<flann_algorithm_t>(params, "algorithm", FLANN_INDEX_LINEAR) != FLANN_INDEX_LSH) // currently, additional index support is the lsh algorithm only.
{
_index = new IndexType(dataset, get_params(params), dist);
_index->buildIndex(); _index->buildIndex();
index = _index; index = _index;
}
else // build additional lsh index
{
::cvflann::Matrix<ElementType> wholedataset((ElementType*)wholedata.data, wholedata.rows, wholedata.cols);
((IndexType*)index)->addIndex(wholedataset, dataset);
}
} }
template<typename Distance> void template<typename Distance> void
buildIndex(void*& index, const Mat& data, const IndexParams& params, const Distance& dist = Distance()) buildIndex(void*& index, const Mat& wholedata, const Mat& data, const IndexParams& params, const Distance& dist = Distance())
{ {
buildIndex_<Distance, ::cvflann::Index<Distance> >(index, data, params, dist); buildIndex_<Distance, ::cvflann::Index<Distance> >(index, wholedata, data, params, dist);
} }
#if CV_NEON #if CV_NEON
@ -348,21 +358,28 @@ Index::Index(InputArray _data, const IndexParams& params, flann_distance_t _dist
featureType = CV_32F; featureType = CV_32F;
algo = FLANN_INDEX_LINEAR; algo = FLANN_INDEX_LINEAR;
distType = FLANN_DIST_L2; distType = FLANN_DIST_L2;
build(_data, params, _distType); build(_data, _data, params, _distType);
} }
void Index::build(InputArray _data, const IndexParams& params, flann_distance_t _distType) void Index::build(InputArray &_wholedata, InputArray &_data, const IndexParams& params, flann_distance_t _distType)
{ {
release();
algo = getParam<flann_algorithm_t>(params, "algorithm", FLANN_INDEX_LINEAR); algo = getParam<flann_algorithm_t>(params, "algorithm", FLANN_INDEX_LINEAR);
if( algo == FLANN_INDEX_SAVED )
if (algo != FLANN_INDEX_LSH) // do not release if algo == FLANN_INDEX_LSH
{
release();
}
if (algo == FLANN_INDEX_SAVED)
{ {
load(_data, getParam<std::string>(params, "filename", std::string())); load(_data, getParam<std::string>(params, "filename", std::string()));
return; return;
} }
Mat data = _data.getMat(); Mat data = _data.getMat();
if (algo != FLANN_INDEX_LSH) // do not clear if algo == FLANN_INDEX_LSH
{
index = 0; index = 0;
}
featureType = data.type(); featureType = data.type();
distType = _distType; distType = _distType;
@ -374,29 +391,29 @@ void Index::build(InputArray _data, const IndexParams& params, flann_distance_t
switch( distType ) switch( distType )
{ {
case FLANN_DIST_HAMMING: case FLANN_DIST_HAMMING:
buildIndex< HammingDistance >(index, data, params); buildIndex< HammingDistance >(index, _wholedata.getMat(), data, params);
break; break;
case FLANN_DIST_L2: case FLANN_DIST_L2:
buildIndex< ::cvflann::L2<float> >(index, data, params); buildIndex< ::cvflann::L2<float> >(index, _wholedata.getMat(), data, params);
break; break;
case FLANN_DIST_L1: case FLANN_DIST_L1:
buildIndex< ::cvflann::L1<float> >(index, data, params); buildIndex< ::cvflann::L1<float> >(index, _wholedata.getMat(), data, params);
break; break;
#if MINIFLANN_SUPPORT_EXOTIC_DISTANCE_TYPES #if MINIFLANN_SUPPORT_EXOTIC_DISTANCE_TYPES
case FLANN_DIST_MAX: case FLANN_DIST_MAX:
buildIndex< ::cvflann::MaxDistance<float> >(index, data, params); buildIndex< ::cvflann::MaxDistance<float> >(index, _wholedata.getMat(), data, params);
break; break;
case FLANN_DIST_HIST_INTERSECT: case FLANN_DIST_HIST_INTERSECT:
buildIndex< ::cvflann::HistIntersectionDistance<float> >(index, data, params); buildIndex< ::cvflann::HistIntersectionDistance<float> >(index, _wholedata.getMat(), data, params);
break; break;
case FLANN_DIST_HELLINGER: case FLANN_DIST_HELLINGER:
buildIndex< ::cvflann::HellingerDistance<float> >(index, data, params); buildIndex< ::cvflann::HellingerDistance<float> >(index, _wholedata.getMat(), data, params);
break; break;
case FLANN_DIST_CHI_SQUARE: case FLANN_DIST_CHI_SQUARE:
buildIndex< ::cvflann::ChiSquareDistance<float> >(index, data, params); buildIndex< ::cvflann::ChiSquareDistance<float> >(index, _wholedata.getMat(), data, params);
break; break;
case FLANN_DIST_KL: case FLANN_DIST_KL:
buildIndex< ::cvflann::KL_Divergence<float> >(index, data, params); buildIndex< ::cvflann::KL_Divergence<float> >(index, _wholedata.getMat(), data, params);
break; break;
#endif #endif
default: default:

Loading…
Cancel
Save