Add docs for remaning functions

pull/265/head
Vitaliy Lyudvichenko 9 years ago
parent c681508872
commit bd242d1c85
  1. 54
      modules/dnn/include/opencv2/dnn/dnn.hpp
  2. 81
      modules/dnn/include/opencv2/dnn/layer.hpp
  3. 14
      modules/dnn/src/dnn.cpp
  4. 2
      modules/dnn/test/test_layers.cpp

@ -74,10 +74,14 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
String type; //!< Type name which was used for creating layer by layer factory (optional).
};
/** @brief This interface class allows to build new Layers - are building blocks of networks. */
/** @brief This interface class allows to build new Layers - are building blocks of networks.
*
* Each class, derived from Layer, must implement allocate() methods to declare own outputs and forward() to compute outputs.
* Also before using the new layer into networks you must register your layer by using one of @ref LayerFactoryModule "LayerFactory" macros.
*/
struct CV_EXPORTS Layer
{
///List of learned parameters must be stored here to allow read them by using Net::getParam().
//! List of learned parameters must be stored here to allow read them by using Net::getParam().
std::vector<Blob> blobs;
/** @brief Allocates internal buffers and output blobs with respect to the shape of inputs.
@ -90,7 +94,11 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
*/
virtual void allocate(const std::vector<Blob*> &input, std::vector<Blob> &output) = 0;
virtual void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs) = 0;
/** @brief Given the @p input blobs, computes the output @p blobs.
* @param[in] input the input blobs.
* @param[out] output allocated output blobs, which will store results of the computation.
*/
virtual void forward(std::vector<Blob*> &input, std::vector<Blob> &output) = 0;
/** @brief Returns index of input blob into the input array.
* @param inputName label of input blob
@ -118,6 +126,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* and edges specify relationships between layers inputs and ouputs.
*
* Each network layer has unique integer id and unique string name inside its network.
* LayerId can store either layer name or layer id.
*
* This class supports reference counting of its instances, i. e. copies point to the same instance.
*/
@ -125,8 +134,8 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
{
public:
Net();
~Net();
Net(); //!< Default constructor.
~Net(); //!< Destructor frees the net only if there aren't references to the net anymore.
/** @brief Adds new layer to the net.
* @param name unique name of the adding layer.
@ -181,19 +190,52 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
*/
void setNetInputs(const std::vector<String> &inputBlobNames);
/** @brief Runs forward pass for the whole network */
void forward();
/** @brief Runs forward pass to compute output of layer @p toLayer */
void forward(LayerId toLayer);
/** @brief Runs forward pass to compute output of layer @p toLayer, but computations start from @p startLayer */
void forward(LayerId startLayer, LayerId toLayer);
/** @overload */
void forward(const std::vector<LayerId> &startLayers, const std::vector<LayerId> &toLayers);
//[Wished feature] Optimized forward: makes forward only for layers which wasn't changed after previous forward().
//TODO:
/** @brief Optimized forward.
* @warning Not implemented yet.
* @details Makes forward only those layers which weren't changed after previous forward().
*/
void forwardOpt(LayerId toLayer);
/** @overload */
void forwardOpt(const std::vector<LayerId> &toLayers);
/** @brief Sets the new value for the layer output blob
* @param outputName descriptor of the updating layer output blob.
* @param blob new blob.
* @see connect(String, String) to know format of the descriptor.
* @note If updating blob is not empty then @p blob must have the same shape,
* because network reshaping is not implemented yet.
*/
void setBlob(String outputName, const Blob &blob);
/** @brief Returns the layer output blob.
* @param outputName the descriptor of the returning layer output blob.
* @see connect(String, String)
*/
Blob getBlob(String outputName);
/** @brief Sets the new value for the learned param of the layer.
* @param layer name or id of the layer.
* @param numParam index of the layer parameter in the Layer::blobs array.
* @param blob the new value.
* @see Layer::blobs
* @note If shape of the new blob differs from the previous shape,
* then the following forward pass may fail.
*/
void setParam(LayerId layer, int numParam, const Blob &blob);
/** @brief Returns parameter blob of the layer.
* @param layer name or id of the layer.
* @param numParam index of the layer parameter in the Layer::blobs array.
* @see Layer::blobs
*/
Blob getParam(LayerId layer, int numParam = 0);
private:

@ -47,66 +47,101 @@ namespace cv
{
namespace dnn
{
//Layer factory allows to create instances of registered layers.
class CV_EXPORTS LayerRegister
//! @addtogroup dnn
//! @{
//!
//! @defgroup LayerFactoryModule Utilities for new layers registration
//! @{
/** @brief %Layer factory allows to create instances of registered layers. */
class CV_EXPORTS LayerFactory
{
public:
//! Each Layer class must provide this function to the factory
typedef Ptr<Layer>(*Constuctor)(LayerParams &params);
//! Registers the layer class with typename @p type and specified @p constructor.
static void registerLayer(const String &type, Constuctor constructor);
//! Unregisters registered layer with specified type name.
static void unregisterLayer(const String &type);
/** @brief Creates instance of registered layer.
* @param type type name of creating layer.
* @param params parameters which will be used for layer initialization.
*/
static Ptr<Layer> createLayerInstance(const String &type, LayerParams& params);
private:
LayerRegister();
LayerFactory();
struct Impl;
static Ptr<Impl> impl;
};
/** @brief Registers layer constructor in runtime.
* @param type string, containing type name of the layer.
* @param constuctorFunc pointer to the function of type LayerRegister::Constuctor, which creates the layer.
* @details This macros must be placed inside the function code.
*/
#define REG_RUNTIME_LAYER_FUNC(type, constuctorFunc) \
LayerFactory::registerLayer(#type, constuctorFunc);
/** @brief Registers layer class in runtime.
* @param type string, containing type name of the layer.
* @param class C++ class, derived from Layer.
* @details This macros must be placed inside the function code.
*/
#define REG_RUNTIME_LAYER_CLASS(type, class) \
LayerFactory::registerLayer(#type, _layerDynamicRegisterer<class>);
/** @brief Registers layer constructor on module load time.
* @param type string, containing type name of the layer.
* @param constuctorFunc pointer to the function of type LayerRegister::Constuctor, which creates the layer.
* @details This macros must be placed outside the function code.
*/
#define REG_STATIC_LAYER_FUNC(type, constuctorFunc) \
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, constuctorFunc);
/** @brief Registers layer class on module load time.
* @param type string, containing type name of the layer.
* @param class C++ class, derived from Layer.
* @details This macros must be placed outside the function code.
*/
#define REG_STATIC_LAYER_CLASS(type, class) \
Ptr<Layer> __LayerStaticRegisterer_func_##type(LayerParams &params) \
{ return Ptr<Layer>(new class(params)); } \
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, __LayerStaticRegisterer_func_##type);
//! @}
//! @}
template<typename LayerClass>
Ptr<Layer> _layerDynamicRegisterer(LayerParams &params)
{
return Ptr<Layer>(new LayerClass(params));
}
#define REG_RUNTIME_LAYER_FUNC(type, constuctorFunc) \
LayerRegister::registerLayer(#type, constuctorFunc);
#define REG_RUNTIME_LAYER_CLASS(type, class) \
LayerRegister::registerLayer(#type, _layerDynamicRegisterer<class>);
//allows automatically register created layer on module load time
struct _LayerStaticRegisterer
{
String type;
_LayerStaticRegisterer(const String &type, LayerRegister::Constuctor constuctor)
_LayerStaticRegisterer(const String &type, LayerFactory::Constuctor constuctor)
{
this->type = type;
LayerRegister::registerLayer(type, constuctor);
LayerFactory::registerLayer(type, constuctor);
}
~_LayerStaticRegisterer()
{
LayerRegister::unregisterLayer(type);
LayerFactory::unregisterLayer(type);
}
};
//registers layer constructor on module load time
#define REG_STATIC_LAYER_FUNC(type, constuctorFunc) \
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, constuctorFunc);
//registers layer class on module load time
#define REG_STATIC_LAYER_CLASS(type, class) \
Ptr<Layer> __LayerStaticRegisterer_func_##type(LayerParams &params) \
{ return Ptr<Layer>(new class(params)); } \
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, __LayerStaticRegisterer_func_##type);
}
}
#endif

@ -116,7 +116,7 @@ struct LayerData
if (layerInstance)
return layerInstance;
layerInstance = LayerRegister::createLayerInstance(type, params);
layerInstance = LayerFactory::createLayerInstance(type, params);
if (!layerInstance)
{
CV_Error(Error::StsError, "Can't create layer \"" + name + "\" of type \"" + type + "\"");
@ -557,14 +557,14 @@ Layer::~Layer() {}
//////////////////////////////////////////////////////////////////////////
struct LayerRegister::Impl : public std::map<String, LayerRegister::Constuctor>
struct LayerFactory::Impl : public std::map<String, LayerFactory::Constuctor>
{
};
//allocates on load and cleans on exit
Ptr<LayerRegister::Impl> LayerRegister::impl(new LayerRegister::Impl());
Ptr<LayerFactory::Impl> LayerFactory::impl(new LayerFactory::Impl());
void LayerRegister::registerLayer(const String &_type, Constuctor constructor)
void LayerFactory::registerLayer(const String &_type, Constuctor constructor)
{
String type = _type.toLowerCase();
Impl::iterator it = impl->find(type);
@ -577,16 +577,16 @@ void LayerRegister::registerLayer(const String &_type, Constuctor constructor)
impl->insert(std::make_pair(type, constructor));
}
void LayerRegister::unregisterLayer(const String &_type)
void LayerFactory::unregisterLayer(const String &_type)
{
String type = _type.toLowerCase();
impl->erase(type);
}
Ptr<Layer> LayerRegister::createLayerInstance(const String &_type, LayerParams& params)
Ptr<Layer> LayerFactory::createLayerInstance(const String &_type, LayerParams& params)
{
String type = _type.toLowerCase();
Impl::const_iterator it = LayerRegister::impl->find(type);
Impl::const_iterator it = LayerFactory::impl->find(type);
if (it != impl->end())
{

@ -147,7 +147,7 @@ TEST(Layer_Test_Reshape, squeeze)
std::vector<Blob*> inpVec(1, &inp);
std::vector<Blob> outVec;
Ptr<Layer> rl = LayerRegister::createLayerInstance("Reshape", params);
Ptr<Layer> rl = LayerFactory::createLayerInstance("Reshape", params);
rl->allocate(inpVec, outVec);
rl->forward(inpVec, outVec);

Loading…
Cancel
Save