Dnn API changes (removed NetConfiguration, added LayerRegister, improved Blob).

pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 24a9effd59
commit 1fd9304965
  1. 95
      modules/dnn/include/opencv2/dnn.hpp
  2. 41
      modules/dnn/include/opencv2/dnn/dnn.inl.hpp
  3. 7
      modules/dnn/src/caffe_importer.cpp
  4. 23
      modules/dnn/src/dnn.cpp
  5. 16
      modules/dnn/test/test_caffe_importer.cpp

@ -59,11 +59,11 @@ namespace dnn
typedef Layer* (*Constuctor)(); typedef Layer* (*Constuctor)();
static void registerLayer(const String &type, Constuctor constructor); CV_EXPORTS static void registerLayer(const String &type, Constuctor constructor);
static void unregisterLayer(const String &type); CV_EXPORTS static void unregisterLayer(const String &type);
static Ptr<Layer> createLayerInstance(const String &type); CV_EXPORTS static Ptr<Layer> createLayerInstance(const String &type);
private: private:
LayerRegister(); LayerRegister();
@ -88,80 +88,71 @@ namespace dnn
//setUp calls once (think that it's constructor) //setUp calls once (think that it's constructor)
virtual void setUp(LayerParams &params); virtual void setUp(LayerParams &params);
//after setUp the following two function must be able to return values
virtual int getNumInputs();
virtual int getNumOutputs();
//maybe useless function //maybe useless function
//shape of output blobs must be adjusted with respect to shape of input blobs //shape of output blobs must be adjusted with respect to shape of input blobs
virtual void adjustShape(const std::vector<Blob> &inputs, std::vector<Blob> &outputs); virtual void adjustShape(const std::vector<Blob> &inputs, std::vector<Blob> &outputs);
virtual void forward(std::vector<Blob> &inputs, std::vector<Blob> &outputs); virtual void forward(std::vector<Blob> &inputs, std::vector<Blob> &outputs);
virtual int getNumInputs();
virtual int getNumOutputs();
//each input/output can be labeled to easily identify their using "layer_name.output_name"
virtual String getInputName(int inputNum);
virtual String getOutputName(int outputNum);
}; };
//containers for String and int
typedef DictValue LayerId;
typedef DictValue BlobId;
//TODO: divide NetConfiguration interface and implementation, hide internal data CV_EXPORTS class Net
//TODO: maybe eliminate all int ids and replace them by string names
//Proxy class for different formats
//Each format importer must populate it
CV_EXPORTS class NetConfiguration
{ {
public: public:
CV_EXPORTS static Ptr<NetConfiguration> create(); CV_EXPORTS Net();
CV_EXPORTS ~Net();
int addLayer(const String &name, const String &type);
void deleteLayer(int layerId);
void setLayerParams(int layerId, LayerParams &params);
CV_EXPORTS int addLayer(const String &name, const String &type, LayerParams &params = LayerParams());
CV_EXPORTS void deleteLayer(LayerId layer);
//each output of each layer can be labeled by unique string label (as in Caffe) //each output of each layer can be labeled by unique string label (as in Caffe)
//if label not specified then %layer_name%:c_%N% will be used //if label not specified then %layer_name%.%layer_output_id% can be used
void setLayerOutputLabels(int layerId, const std::vector<String> &outputNames); void setOutputNames(LayerId layer, const std::vector<String> &outputNames);
//version #1 CV_EXPORTS void connect(BlobId input, BlobId output);
void addConnection(int fromLayer, int fromLayerOutput, int toLayer, int toLayerInput); CV_EXPORTS void connect(const std::vector<BlobId> &outputs, const std::vector<BlobId> &inputs);
CV_EXPORTS void connect(const std::vector<BlobId> &outputs, LayerId layer);
//or maybe version #2 int getOutputId(LayerId layer, int outputNum);
inline int getBlobId(int layerId, int inputOutputNumber) int getInputId(LayerId layer, int inputNum);
{ int getLayerId(LayerId layer);
return (layerId << 16) + inputOutputNumber;
}
void addConnection(int outputId, int inputId); void forward();
void forward(LayerId toLayer);
void forward(LayerId startLayer, LayerId toLayer);
void forward(const std::vector<LayerId> &startLayers, const std::vector<LayerId> &toLayers);
void addConnections(const std::vector<int> &outputIds, const std::vector<int> &inputIds); //[Wished feature] Optimized smart forward(). Makes forward only for layers which wasn't changed after previous forward().
void forwardOpt(LayerId toLayer);
void forwardOpt(const std::vector<LayerId> &toLayers);
private: void setBlob(BlobId outputName, const Blob &blob);
Blob getBlob(BlobId outputName);
void setParam(LayerId layer, int numParam, const Blob &blob);
void getParam(LayerId layer, int numParam);
int lastLayerId; private:
std::map< int, Ptr<Layer> > layers;
std::map< int, std::vector<String> > layerOutputLabels;
};
CV_EXPORTS class Net
{
public:
CV_EXPORTS static Ptr<Net> create(Ptr<NetConfiguration> config);
virtual ~Net() = 0;
virtual int getBlobId(int layerId, int outputId) = 0;
virtual int getBlobId(const String &blobName) = 0;
virtual void forward(std::vector< int, Ptr<Blob> > &inputBlobs, std::vector<int, Ptr<Blob> > &outputBlobs) = 0;
virtual void forward(int layer, std::vector<Ptr<Blob> > &layerOutputs) = 0; struct Impl;
Ptr<Impl> impl;
}; };
CV_EXPORTS class Importer CV_EXPORTS class Importer
{ {
public: public:
virtual void populateNetConfiguration(Ptr<NetConfiguration> config) = 0; virtual void populateNet(Net net) = 0;
virtual ~Importer(); virtual ~Importer();
}; };

@ -31,6 +31,47 @@ namespace dnn
CV_Assert(false); CV_Assert(false);
return Mat(); return Mat();
} }
inline
int Blob::cols() const
{
CV_DbgAssert(m.dims > 2);
return m.size[m.dims-1];
}
inline
int Blob::rows() const
{
CV_DbgAssert(m.dims > 2);
return m.size[m.dims-2];
}
inline
Size Blob::size() const
{
return Size(cols(), rows());
}
inline
int Blob::channels() const
{
CV_DbgAssert(m.dims >= 3);
return m.size[m.dims-3];
}
inline
int Blob::num() const
{
CV_DbgAssert(m.dims == 4);
return m.size[0];
}
inline
Vec4i Blob::shape() const
{
CV_DbgAssert(m.dims == 4);
return Vec4i(m.size.p);
}
} }
} }

@ -154,11 +154,8 @@ namespace
CV_Error(cv::Error::StsAssert, "Unknown shape of input blob"); CV_Error(cv::Error::StsAssert, "Unknown shape of input blob");
} }
size_t declaredBlobSize = 1;
for (int i = 0; i < shape.size(); i++)
declaredBlobSize *= shape[i];
CV_Assert(declaredBlobSize == protoBlob.data_size());
dstBlob.create(shape.size(), shape, CV_32F); dstBlob.create(shape.size(), shape, CV_32F);
CV_Assert(protoBlob.data_size() == dstBlob.getMatRef().total());
CV_DbgAssert(protoBlob.GetDescriptor()->FindFieldByLowercaseName("data")->cpp_type() == FieldDescriptor::CPPTYPE_FLOAT); CV_DbgAssert(protoBlob.GetDescriptor()->FindFieldByLowercaseName("data")->cpp_type() == FieldDescriptor::CPPTYPE_FLOAT);
float *dstData = dstBlob.getMatRef().ptr<float>(); float *dstData = dstBlob.getMatRef().ptr<float>();
@ -167,7 +164,7 @@ namespace
dstData[i] = protoBlob.data(i); dstData[i] = protoBlob.data(i);
} }
void populateNetConfiguration(Ptr<NetConfiguration> config) void populateNet(Net dstNet)
{ {
int layersSize = net.layer_size(); int layersSize = net.layer_size();

@ -61,21 +61,38 @@ void Blob::create(int ndims, const int *sizes, int type /*= CV_32F*/)
m.create(shape.channels, &shape[0], type); m.create(shape.channels, &shape[0], type);
} }
Net::~Net() struct Net::Impl
{
};
Net::Net() : impl(new Net::Impl)
{ {
} }
Net::~Net()
{
}
Importer::~Importer() Importer::~Importer()
{ {
} }
#include <sstream>
template<typename T>
String toString(const T &v)
{
std::stringstream ss;
ss << v;
return ss.str();
}
Ptr<NetConfiguration> NetConfiguration::create() cv::String Layer::getInputName(int inputNum)
{ {
return Ptr<NetConfiguration>(new NetConfiguration()); return "input" + toString(inputNum);
} }
} }

@ -22,15 +22,15 @@ static std::string getTestFile(const char *filename)
TEST(ReadCaffePrototxt_gtsrb, Accuracy) TEST(ReadCaffePrototxt_gtsrb, Accuracy)
{ {
Ptr<Importer> importer = createCaffeImporter(getTestFile("gtsrb.prototxt"), getTestFile("gtsrb_iter_36000.caffemodel") ); Ptr<Importer> importer = createCaffeImporter(getTestFile("gtsrb.prototxt"), getTestFile("gtsrb_iter_36000.caffemodel") );
Ptr<NetConfiguration> config = NetConfiguration::create(); Net net;
importer->populateNetConfiguration(config); importer->populateNet(net);
} }
//TEST(ReadCaffePrototxt_GoogleNet, Accuracy) TEST(ReadCaffePrototxt_GoogleNet, Accuracy)
//{ {
// Ptr<Importer> importer = createCaffeImporter(getOpenCVExtraDir() + "/dnn/googlenet_deploy.prototxt", ""); Ptr<Importer> importer = createCaffeImporter(getOpenCVExtraDir() + "/dnn/googlenet_deploy.prototxt", "");
// Ptr<NetConfiguration> config = NetConfiguration::create(); Net net;
// importer->populateNetConfiguration(config); importer->populateNet(net);
//} }
} }
Loading…
Cancel
Save