Merge pull request #480 from cbalint13:hdf

pull/375/head
Alexander Alekhin 9 years ago
commit 6b1483ecdb
  1. 61
      modules/hdf/include/opencv2/hdf/hdf5.hpp
  2. 150
      modules/hdf/src/hdf5.cpp

@ -115,14 +115,19 @@ public:
/* @overload */ /* @overload */
CV_WRAP virtual void dscreate( const int rows, const int cols, const int type, CV_WRAP virtual void dscreate( const int rows, const int cols, const int type,
String dslabel, const int compresslevel = HDF5::H5_NONE, String dslabel ) const = 0;
const vector<int>& dims_chunks = vector<int>() ) const = 0; /* @overload */
CV_WRAP virtual void dscreate( const int rows, const int cols, const int type,
String dslabel, const int compresslevel ) const = 0;
/* @overload */
CV_WRAP virtual void dscreate( const int rows, const int cols, const int type,
String dslabel, const int compresslevel, const vector<int>& dims_chunks ) const = 0;
/** @brief Create and allocate storage for two dimensional single or multi channel dataset. /** @brief Create and allocate storage for two dimensional single or multi channel dataset.
@param rows declare amount of rows @param rows declare amount of rows
@param cols declare amount of cols @param cols declare amount of cols
@param type type to be used @param type type to be used
@param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. @param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten.
@param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. @param compresslevel specify the compression level 0-9 to be used, H5_NONE is default and means no compression.
@param dims_chunks each array member specify chunking sizes to be used for block i/o, @param dims_chunks each array member specify chunking sizes to be used for block i/o,
by default NULL means none at all. by default NULL means none at all.
@ -181,17 +186,24 @@ public:
Multiple datasets inside single hdf5 file is allowed. Multiple datasets inside single hdf5 file is allowed.
*/ */
CV_WRAP virtual void dscreate( const int rows, const int cols, const int type, CV_WRAP virtual void dscreate( const int rows, const int cols, const int type,
String dslabel, const int compresslevel = HDF5::H5_NONE, const int* dims_chunks = NULL ) const = 0; String dslabel, const int compresslevel, const int* dims_chunks ) const = 0;
/* @overload */ /* @overload */
CV_WRAP virtual void dscreate( const vector<int>& sizes, const int type, String dslabel, CV_WRAP virtual void dscreate( const int n_dims, const int* sizes, const int type,
const int compresslevel = HDF5::H5_NONE, const vector<int>& dims_chunks = vector<int>() ) const = 0; String dslabel ) const = 0;
/* @overload */
CV_WRAP virtual void dscreate( const int n_dims, const int* sizes, const int type,
String dslabel, const int compresslevel ) const = 0;
/* @overload */
CV_WRAP virtual void dscreate( const vector<int>& sizes, const int type,
String dslabel, const int compresslevel = HDF5::H5_NONE,
const vector<int>& dims_chunks = vector<int>() ) const = 0;
/** @brief Create and allocate storage for n-dimensional dataset, single or mutichannel type. /** @brief Create and allocate storage for n-dimensional dataset, single or mutichannel type.
@param n_dims declare number of dimensions @param n_dims declare number of dimensions
@param sizes array containing sizes for each dimensions @param sizes array containing sizes for each dimensions
@param type type to be used @param type type to be used
@param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. @param dslabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten.
@param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. @param compresslevel specify the compression level 0-9 to be used, H5_NONE is default and means no compression.
@param dims_chunks each array member specify chunking sizes to be used for block i/o, @param dims_chunks each array member specify chunking sizes to be used for block i/o,
by default NULL means none at all. by default NULL means none at all.
@note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked @note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked
@ -254,7 +266,7 @@ public:
@endcode @endcode
*/ */
CV_WRAP virtual void dscreate( const int n_dims, const int* sizes, const int type, CV_WRAP virtual void dscreate( const int n_dims, const int* sizes, const int type,
String dslabel, const int compresslevel = HDF5::H5_NONE, const int* dims_chunks = NULL ) const = 0; String dslabel, const int compresslevel, const int* dims_chunks ) const = 0;
/** @brief Fetch dataset sizes /** @brief Fetch dataset sizes
@param dslabel specify the hdf5 dataset label to be measured. @param dslabel specify the hdf5 dataset label to be measured.
@ -281,9 +293,14 @@ public:
*/ */
CV_WRAP virtual int dsgettype( String dslabel ) const = 0; CV_WRAP virtual int dsgettype( String dslabel ) const = 0;
/* @overload */
CV_WRAP virtual void dswrite( InputArray Array, String dslabel ) const = 0;
/* @overload */
CV_WRAP virtual void dswrite( InputArray Array, String dslabel,
const int* dims_offset ) const = 0;
/* @overload */ /* @overload */
CV_WRAP virtual void dswrite( InputArray Array, String dslabel, CV_WRAP virtual void dswrite( InputArray Array, String dslabel,
const vector<int>& dims_offset = vector<int>(), const vector<int>& dims_offset,
const vector<int>& dims_counts = vector<int>() ) const = 0; const vector<int>& dims_counts = vector<int>() ) const = 0;
/** @brief Write or overwrite a Mat object into specified dataset of hdf5 file. /** @brief Write or overwrite a Mat object into specified dataset of hdf5 file.
@param Array specify Mat data array to be written. @param Array specify Mat data array to be written.
@ -348,11 +365,16 @@ public:
@endcode @endcode
*/ */
CV_WRAP virtual void dswrite( InputArray Array, String dslabel, CV_WRAP virtual void dswrite( InputArray Array, String dslabel,
const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; const int* dims_offset, const int* dims_counts ) const = 0;
/* @overload */ /* @overload */
CV_WRAP virtual void dsinsert( InputArray Array, String dslabel, CV_WRAP virtual void dsinsert( InputArray Array, String dslabel ) const = 0;
const vector<int>& dims_offset = vector<int>(), /* @overload */
CV_WRAP virtual void dsinsert( InputArray Array,
String dslabel, const int* dims_offset ) const = 0;
/* @overload */
CV_WRAP virtual void dsinsert( InputArray Array,
String dslabel, const vector<int>& dims_offset,
const vector<int>& dims_counts = vector<int>() ) const = 0; const vector<int>& dims_counts = vector<int>() ) const = 0;
/** @brief Insert or overwrite a Mat object into specified dataset and autoexpand dataset size if **unlimited** property allows. /** @brief Insert or overwrite a Mat object into specified dataset and autoexpand dataset size if **unlimited** property allows.
@param Array specify Mat data array to be written. @param Array specify Mat data array to be written.
@ -402,12 +424,17 @@ public:
@endcode @endcode
*/ */
CV_WRAP virtual void dsinsert( InputArray Array, String dslabel, CV_WRAP virtual void dsinsert( InputArray Array, String dslabel,
const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; const int* dims_offset, const int* dims_counts ) const = 0;
/* @overload */
CV_WRAP virtual void dsread( OutputArray Array, String dslabel ) const = 0;
/* @overload */
CV_WRAP virtual void dsread( OutputArray Array,
String dslabel, const int* dims_offset ) const = 0;
/* @overload */ /* @overload */
CV_WRAP virtual void dsread( OutputArray Array, String dslabel, CV_WRAP virtual void dsread( OutputArray Array, String dslabel,
const vector<int>& dims_offset = vector<int>(), const vector<int>& dims_offset,
const vector<int>& dims_counts = vector<int>() ) const = 0; const vector<int>& dims_counts = vector<int>() ) const = 0;
/** @brief Read specific dataset from hdf5 file into Mat object. /** @brief Read specific dataset from hdf5 file into Mat object.
@param Array Mat container where data reads will be returned. @param Array Mat container where data reads will be returned.
@ -449,7 +476,7 @@ public:
@endcode @endcode
*/ */
CV_WRAP virtual void dsread( OutputArray Array, String dslabel, CV_WRAP virtual void dsread( OutputArray Array, String dslabel,
const int* dims_offset = NULL, const int* dims_counts = NULL ) const = 0; const int* dims_offset, const int* dims_counts ) const = 0;
/** @brief Fetch keypoint dataset size /** @brief Fetch keypoint dataset size
@param kplabel specify the hdf5 dataset label to be measured. @param kplabel specify the hdf5 dataset label to be measured.
@ -468,9 +495,9 @@ public:
/** @brief Create and allocate special storage for cv::KeyPoint dataset. /** @brief Create and allocate special storage for cv::KeyPoint dataset.
@param size declare fixed number of KeyPoints @param size declare fixed number of KeyPoints
@param kplabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten. @param kplabel specify the hdf5 dataset label, any existing dataset with the same label will be overwritten.
@param compresslevel specify the compression level 0-9 to be used, by default H5_NONE means none at all. @param compresslevel specify the compression level 0-9 to be used, H5_NONE is default and means no compression.
@param chunks each array member specify chunking sizes to be used for block i/o, @param chunks each array member specify chunking sizes to be used for block i/o,
by default H5_NONE means none at all. H5_NONE is default and means no compression.
@note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked @note If the dataset already exists an exception will be thrown. Existence of the dataset can be checked
using hlexists(). using hlexists().

@ -75,52 +75,81 @@ public:
// get sizes of dataset // get sizes of dataset
virtual vector<int> dsgetsize( String dslabel, int dims_flag = H5_GETDIMS ) const; virtual vector<int> dsgetsize( String dslabel, int dims_flag = H5_GETDIMS ) const;
// get data type of dataset /* get data type of dataset */
virtual int dsgettype( String dslabel ) const; virtual int dsgettype( String dslabel ) const;
// overload dscreate() // overload dscreate() #1
virtual void dscreate( const int rows, const int cols, const int type, virtual void dscreate( const int rows, const int cols, const int type, String dslabel ) const;
String dslabel, const int compresslevel = H5_NONE,
const vector<int>& dims_chunks = vector<int>() ) const;
// create two dimensional single or mutichannel dataset // overload dscreate() #2
virtual void dscreate( const int rows, const int cols, const int type, virtual void dscreate( const int rows, const int cols, const int type, String dslabel,
String dslabel, const int compresslevel = H5_NONE, const int* dims_chunks = NULL ) const; const int compresslevel ) const;
// overload dscreate() // overload dscreate() #3
virtual void dscreate( const int rows, const int cols, const int type, String dslabel,
const int compresslevel, const vector<int>& dims_chunks ) const;
/* create two dimensional single or mutichannel dataset */
virtual void dscreate( const int rows, const int cols, const int type, String dslabel,
const int compresslevel, const int* dims_chunks ) const;
// overload dscreate() #1
virtual void dscreate( const int n_dims, const int* sizes, const int type,
String dslabel ) const;
// overload dscreate() #2
virtual void dscreate( const int n_dims, const int* sizes, const int type,
String dslabel, const int compresslevel ) const;
// overload dscreate() #3
virtual void dscreate( const vector<int>& sizes, const int type, String dslabel, virtual void dscreate( const vector<int>& sizes, const int type, String dslabel,
const int compresslevel = H5_NONE, const vector<int>& dims_chunks = vector<int>() ) const; const int compresslevel = H5_NONE, const vector<int>& dims_chunks = vector<int>() ) const;
// create n-dimensional single or mutichannel dataset /* create n-dimensional single or mutichannel dataset */
virtual void dscreate( const int n_dims, const int* sizes, const int type, virtual void dscreate( const int n_dims, const int* sizes, const int type,
String dslabel, const int compresslevel = H5_NONE, const int* dims_chunks = NULL ) const; String dslabel, const int compresslevel, const int* dims_chunks ) const;
// overload dswrite() // overload dswrite() #1
virtual void dswrite( InputArray Array, String dslabel, virtual void dswrite( InputArray Array, String dslabel ) const;
const vector<int>& dims_offset = vector<int>(),
// overload dswrite() #2
virtual void dswrite( InputArray Array, String dslabel, const int* dims_offset ) const;
// overload dswrite() #3
virtual void dswrite( InputArray Array, String dslabel, const vector<int>& dims_offset,
const vector<int>& dims_counts = vector<int>() ) const; const vector<int>& dims_counts = vector<int>() ) const;
// write into dataset /* write into dataset */
virtual void dswrite( InputArray Array, String dslabel, virtual void dswrite( InputArray Array, String dslabel,
const int* dims_offset = NULL, const int* dims_counts = NULL ) const; const int* dims_offset, const int* dims_counts ) const;
// overload dsinsert() #1
virtual void dsinsert( InputArray Array, String dslabel ) const;
// overload dsinsert() // overload dsinsert() #2
virtual void dsinsert( InputArray Array, String dslabel, const int* dims_offset ) const;
// overload dsinsert() #3
virtual void dsinsert( InputArray Array, String dslabel, virtual void dsinsert( InputArray Array, String dslabel,
const vector<int>& dims_offset = vector<int>(), const vector<int>& dims_offset, const vector<int>& dims_counts = vector<int>() ) const;
const vector<int>& dims_counts = vector<int>() ) const;
// append / merge into dataset /* append / merge into dataset */
virtual void dsinsert( InputArray Array, String dslabel, virtual void dsinsert( InputArray Array, String dslabel,
const int* dims_offset = NULL, const int* dims_counts = NULL ) const; const int* dims_offset = NULL, const int* dims_counts = NULL ) const;
// overload dsread() // overload dsread() #1
virtual void dsread( OutputArray Array, String dslabel ) const;
// overload dsread() #2
virtual void dsread( OutputArray Array, String dslabel, const int* dims_offset ) const;
// overload dsread() #3
virtual void dsread( OutputArray Array, String dslabel, virtual void dsread( OutputArray Array, String dslabel,
const vector<int>& dims_offset = vector<int>(), const vector<int>& dims_offset, const vector<int>& dims_counts = vector<int>() ) const;
const vector<int>& dims_counts = vector<int>() ) const;
// read from dataset // read from dataset
virtual void dsread( OutputArray Array, String dslabel, virtual void dsread( OutputArray Array, String dslabel,
const int* dims_offset = NULL, const int* dims_counts = NULL ) const; const int* dims_offset, const int* dims_counts ) const;
/* /*
* std::vector<cv::KeyPoint> * std::vector<cv::KeyPoint>
@ -351,6 +380,28 @@ int HDF5Impl::dsgettype( String dslabel ) const
return CV_MAKETYPE( cvtype, channs ); return CV_MAKETYPE( cvtype, channs );
} }
// overload
void HDF5Impl::dscreate( const int rows, const int cols, const int type,
String dslabel ) const
{
// dataset dims
int dsizes[2] = { rows, cols };
// create the two dim array
dscreate( 2, dsizes, type, dslabel, HDF5::H5_NONE, NULL );
}
// overload
void HDF5Impl::dscreate( const int rows, const int cols, const int type,
String dslabel, const int compresslevel ) const
{
// dataset dims
int dsizes[2] = { rows, cols };
// create the two dim array
dscreate( 2, dsizes, type, dslabel, compresslevel, NULL );
}
// overload // overload
void HDF5Impl::dscreate( const int rows, const int cols, const int type, void HDF5Impl::dscreate( const int rows, const int cols, const int type,
String dslabel, const int compresslevel, String dslabel, const int compresslevel,
@ -370,6 +421,20 @@ void HDF5Impl::dscreate( const int rows, const int cols, const int type,
dscreate( 2, dsizes, type, dslabel, compresslevel, dims_chunks ); dscreate( 2, dsizes, type, dslabel, compresslevel, dims_chunks );
} }
// overload
void HDF5Impl::dscreate( const int n_dims, const int* sizes, const int type,
String dslabel ) const
{
dscreate( n_dims, sizes, type, dslabel, H5_NONE, NULL );
}
// overload
void HDF5Impl::dscreate( const int n_dims, const int* sizes, const int type,
String dslabel, const int compresslevel ) const
{
dscreate( n_dims, sizes, type, dslabel, compresslevel, NULL );
}
// overload // overload
void HDF5Impl::dscreate( const vector<int>& sizes, const int type, void HDF5Impl::dscreate( const vector<int>& sizes, const int type,
String dslabel, const int compresslevel, String dslabel, const int compresslevel,
@ -455,6 +520,19 @@ void HDF5Impl::dscreate( const int n_dims, const int* sizes, const int type,
H5Sclose( dspace ); H5Sclose( dspace );
} }
// overload
void HDF5Impl::dsread( OutputArray Array, String dslabel ) const
{
dsread( Array, dslabel, NULL, NULL );
}
// overload
void HDF5Impl::dsread( OutputArray Array, String dslabel,
const int* dims_offset ) const
{
dsread( Array, dslabel, dims_offset );
}
// overload // overload
void HDF5Impl::dsread( OutputArray Array, String dslabel, void HDF5Impl::dsread( OutputArray Array, String dslabel,
const vector<int>& dims_offset, const vector<int>& dims_offset,
@ -557,6 +635,17 @@ void HDF5Impl::dsread( OutputArray Array, String dslabel,
H5Dclose( dsdata ); H5Dclose( dsdata );
} }
// overload
void HDF5Impl::dswrite( InputArray Array, String dslabel ) const
{
dswrite( Array, dslabel, NULL, NULL );
}
// overload
void HDF5Impl::dswrite( InputArray Array, String dslabel,
const int* dims_offset ) const
{
dswrite( Array, dslabel, dims_offset, NULL );
}
// overload // overload
void HDF5Impl::dswrite( InputArray Array, String dslabel, void HDF5Impl::dswrite( InputArray Array, String dslabel,
const vector<int>& dims_offset, const vector<int>& dims_offset,
@ -641,6 +730,19 @@ void HDF5Impl::dswrite( InputArray Array, String dslabel,
H5Dclose( dsdata ); H5Dclose( dsdata );
} }
// overload
void HDF5Impl::dsinsert( InputArray Array, String dslabel ) const
{
dsinsert( Array, dslabel, NULL, NULL );
}
// overload
void HDF5Impl::dsinsert( InputArray Array, String dslabel,
const int* dims_offset ) const
{
dsinsert( Array, dslabel, dims_offset, NULL );
}
// overload // overload
void HDF5Impl::dsinsert( InputArray Array, String dslabel, void HDF5Impl::dsinsert( InputArray Array, String dslabel,
const vector<int>& dims_offset, const vector<int>& dims_offset,

Loading…
Cancel
Save