parent
160d86440c
commit
06f949a590
26 changed files with 454 additions and 290 deletions
@ -0,0 +1,71 @@ |
||||
#ifndef __OPENCV_DNN_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYER_HPP__ |
||||
#include <opencv2/dnn.hpp> |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
|
||||
//Layer factory allows to create instances of registered layers.
|
||||
class CV_EXPORTS LayerRegister |
||||
{ |
||||
public: |
||||
|
||||
typedef Ptr<Layer>(*Constuctor)(LayerParams ¶ms); |
||||
|
||||
static void registerLayer(const String &type, Constuctor constructor); |
||||
|
||||
static void unregisterLayer(const String &type); |
||||
|
||||
static Ptr<Layer> createLayerInstance(const String &type, LayerParams& params); |
||||
|
||||
private: |
||||
LayerRegister(); |
||||
|
||||
struct Impl; |
||||
static Ptr<Impl> impl; |
||||
}; |
||||
|
||||
template<typename LayerClass> |
||||
Ptr<Layer> _layerDynamicRegisterer(LayerParams ¶ms) |
||||
{ |
||||
return Ptr<Layer>(new LayerClass(params)); |
||||
} |
||||
|
||||
#define REG_RUNTIME_LAYER_FUNC(type, constuctorFunc) \ |
||||
LayerRegister::registerLayer(#type, constuctorFunc); |
||||
|
||||
#define REG_RUNTIME_LAYER_CLASS(type, class) \ |
||||
LayerRegister::registerLayer(#type, _layerDynamicRegisterer<class>); |
||||
|
||||
//allows automatically register created layer on module load time
|
||||
struct _LayerStaticRegisterer |
||||
{ |
||||
String type; |
||||
|
||||
_LayerStaticRegisterer(const String &type, LayerRegister::Constuctor constuctor) |
||||
{ |
||||
this->type = type; |
||||
LayerRegister::registerLayer(type, constuctor); |
||||
} |
||||
|
||||
~_LayerStaticRegisterer() |
||||
{ |
||||
LayerRegister::unregisterLayer(type); |
||||
} |
||||
}; |
||||
|
||||
//registers layer constructor on module load time
|
||||
#define REG_STATIC_LAYER_FUNC(type, constuctorFunc) \ |
||||
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, constuctorFunc); |
||||
|
||||
//registers layer class on module load time
|
||||
#define REG_STATIC_LAYER_CLASS(type, class) \ |
||||
Ptr<Layer> __LayerStaticRegisterer_func_##type(LayerParams ¶ms) \
|
||||
{ return Ptr<Layer>(new class(params)); } \
|
||||
static _LayerStaticRegisterer __LayerStaticRegisterer_##type(#type, __LayerStaticRegisterer_func_##type); |
||||
|
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,64 @@ |
||||
#include "precomp.hpp" |
||||
|
||||
#include "layers/concat_layer.hpp" |
||||
#include "layers/convolution_layer.hpp" |
||||
#include "layers/blank_layer.hpp" |
||||
#include "layers/elementwise_layers.hpp" |
||||
#include "layers/fully_connected_layer.hpp" |
||||
#include "layers/lrn_layer.hpp" |
||||
#include "layers/mvn_layer.hpp" |
||||
#include "layers/pooling_layer.hpp" |
||||
#include "layers/reshape_layer.hpp" |
||||
#include "layers/slice_layer.hpp" |
||||
#include "layers/softmax_layer.hpp" |
||||
#include "layers/split_layer.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
|
||||
struct AutoInitializer |
||||
{ |
||||
bool status; |
||||
|
||||
AutoInitializer() : status(false) |
||||
{ |
||||
cv::dnn::initModule(); |
||||
} |
||||
}; |
||||
|
||||
static AutoInitializer init; |
||||
|
||||
void initModule() |
||||
{ |
||||
if (init.status) |
||||
return; |
||||
|
||||
REG_RUNTIME_LAYER_CLASS(Slice, SliceLayer) |
||||
REG_RUNTIME_LAYER_CLASS(Softmax, SoftMaxLayer) |
||||
REG_RUNTIME_LAYER_CLASS(Split, SplitLayer) |
||||
REG_RUNTIME_LAYER_CLASS(Reshape, ReshapeLayer) |
||||
REG_STATIC_LAYER_FUNC(Flatten, createFlattenLayer) |
||||
REG_RUNTIME_LAYER_CLASS(Pooling, PoolingLayer) |
||||
REG_RUNTIME_LAYER_CLASS(MVN, MVNLayer) |
||||
REG_RUNTIME_LAYER_CLASS(LRN, LRNLayer) |
||||
REG_RUNTIME_LAYER_CLASS(InnerProduct, FullyConnectedLayer) |
||||
|
||||
REG_RUNTIME_LAYER_CLASS(ReLU, ElementWiseLayer<ReLUFunctor>) |
||||
REG_RUNTIME_LAYER_CLASS(TanH, ElementWiseLayer<TanHFunctor>) |
||||
REG_RUNTIME_LAYER_CLASS(BNLL, ElementWiseLayer<BNLLFunctor>) |
||||
REG_RUNTIME_LAYER_CLASS(Power, ElementWiseLayer<PowerFunctor>) |
||||
REG_RUNTIME_LAYER_CLASS(AbsVal, ElementWiseLayer<AbsValFunctor>) |
||||
REG_RUNTIME_LAYER_CLASS(Sigmoid, ElementWiseLayer<SigmoidFunctor>) |
||||
REG_RUNTIME_LAYER_CLASS(Dropout, BlankLayer) |
||||
|
||||
REG_RUNTIME_LAYER_CLASS(Convolution, ConvolutionLayer) |
||||
REG_RUNTIME_LAYER_CLASS(Deconvolution, DeConvolutionLayer) |
||||
REG_RUNTIME_LAYER_CLASS(Concat, ConcatLayer) |
||||
|
||||
init.status = true; |
||||
} |
||||
|
||||
} |
||||
} |
@ -0,0 +1,20 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_CONCAT_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_CONCAT_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
class ConcatLayer : public Layer |
||||
{ |
||||
int axis; |
||||
|
||||
public: |
||||
ConcatLayer(LayerParams& params); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,50 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_CONVOLUTION_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_CONVOLUTION_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
//TODO: simultaneously convolution and bias addition for cache optimization
|
||||
class ConvolutionLayer : public Layer |
||||
{ |
||||
protected: |
||||
bool bias; |
||||
int numOutput, group; |
||||
int padH, padW; |
||||
int kerH, kerW; |
||||
int strideH, strideW; |
||||
|
||||
int inpH, inpW, inpCn; |
||||
int outH, outW, outCn; |
||||
int topH, topW, topCn; //switched between inp/out on deconv/conv
|
||||
int inpGroupCn, outGroupCn; |
||||
int ksize; |
||||
|
||||
Mat colMat, biasOnesMat; |
||||
|
||||
inline bool is1x1() const; |
||||
virtual void computeInpOutShape(const Blob &inpBlob); |
||||
void im2col(Blob &inpBlob, int imNum, int cnGroup); |
||||
|
||||
public: |
||||
ConvolutionLayer() {} |
||||
ConvolutionLayer(LayerParams ¶ms); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
|
||||
class DeConvolutionLayer : public ConvolutionLayer |
||||
{ |
||||
protected: |
||||
void computeInpOutShape(const Blob &inpBlob); |
||||
void col2im(Mat &dstMat); |
||||
|
||||
public: |
||||
DeConvolutionLayer(LayerParams ¶ms) : ConvolutionLayer(params) {} |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,26 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_FULLY_CONNECTED_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_FULLY_CONNECTED_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
class FullyConnectedLayer : public Layer |
||||
{ |
||||
bool bias; |
||||
int numOutputs; |
||||
int axis_, axis; |
||||
|
||||
int innerSize; |
||||
|
||||
void reshape(const Blob &inp, Blob &out); |
||||
|
||||
public: |
||||
FullyConnectedLayer(LayerParams ¶ms); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,34 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_LRN_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_LRN_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
class LRNLayer : public Layer |
||||
{ |
||||
enum
|
||||
{ |
||||
CHANNEL_NRM, |
||||
SPATIAL_NRM, |
||||
SPATIAL_CONTRAST_NRM //cuda-convnet feature
|
||||
} type; |
||||
|
||||
int size; |
||||
double alpha, beta; |
||||
|
||||
Blob bufBlob; |
||||
|
||||
void channelNoramlization(Blob &src, Blob &dst); |
||||
void spatialNormalization(Blob &src, Blob &dst); |
||||
|
||||
public: |
||||
|
||||
LRNLayer(LayerParams ¶ms); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,24 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_MVN_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_MVN_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
|
||||
class MVNLayer : public Layer |
||||
{ |
||||
double eps; |
||||
bool acrossChannels, normalizeVariance; |
||||
|
||||
public: |
||||
|
||||
MVNLayer(LayerParams ¶ms); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
|
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,37 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_POOLING_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_POOLING_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
class PoolingLayer : public Layer |
||||
{ |
||||
enum
|
||||
{ |
||||
MAX, |
||||
AVE, |
||||
STOCHASTIC |
||||
}; |
||||
|
||||
int type; |
||||
int padH, padW; |
||||
int strideH, strideW; |
||||
int kernelH, kernelW; |
||||
|
||||
int inpH, inpW; |
||||
int outH, outW; |
||||
|
||||
void computeOutputShape(int inpH, int inpW); |
||||
void maxPooling(Blob &input, Blob &output); |
||||
void avePooling(Blob &input, Blob &output); |
||||
|
||||
public: |
||||
PoolingLayer(LayerParams ¶ms); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,30 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_RESHAPE_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_RESHAPE_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
|
||||
class ReshapeLayer : public Layer |
||||
{ |
||||
public: |
||||
ReshapeLayer(LayerParams ¶ms); |
||||
|
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
|
||||
void forward(std::vector<Blob*>&, std::vector<Blob>&) {} |
||||
|
||||
protected: |
||||
BlobShape shapeDesc; |
||||
int inAxis, inNumAxes, autoAxisIdx; |
||||
|
||||
void computeOutputShape(int startAxis, int endAxis, BlobShape &inpShape, BlobShape &outShape); |
||||
}; |
||||
|
||||
Ptr<Layer> createFlattenLayer(LayerParams&); |
||||
|
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,26 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_SLICE_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_SLICE_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
|
||||
class SliceLayer : public Layer |
||||
{ |
||||
public: |
||||
SliceLayer(LayerParams ¶ms); |
||||
|
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
|
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
|
||||
private: |
||||
int inAxis; |
||||
std::vector<int> slicePoints; |
||||
}; |
||||
|
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,21 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_SOFTMAX_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_SOFTMAX_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
class SoftMaxLayer : public Layer |
||||
{ |
||||
int axis_, axis; |
||||
Blob maxAggregator; |
||||
|
||||
public: |
||||
SoftMaxLayer(LayerParams ¶ms); |
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
}; |
||||
} |
||||
} |
||||
#endif |
@ -0,0 +1,25 @@ |
||||
#ifndef __OPENCV_DNN_LAYERS_SPLIT_LAYER_HPP__ |
||||
#define __OPENCV_DNN_LAYERS_SPLIT_LAYER_HPP__ |
||||
#include "../precomp.hpp" |
||||
|
||||
namespace cv |
||||
{ |
||||
namespace dnn |
||||
{ |
||||
|
||||
class SplitLayer : public Layer |
||||
{ |
||||
public: |
||||
SplitLayer(LayerParams ¶ms); |
||||
|
||||
void allocate(const std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
|
||||
void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs); |
||||
|
||||
private: |
||||
int outputsNum; |
||||
}; |
||||
|
||||
} |
||||
} |
||||
#endif |
Loading…
Reference in new issue