Added libprotobuf to CMake. Added some Caffe files.

pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 2f6d743342
commit cfb9cfab9b
  1. 16
      modules/dnn/CMakeLists.txt
  2. 26
      modules/dnn/include/opencv2/dnn.hpp
  3. 19563
      modules/dnn/src/caffe.pb.cpp
  4. 14544
      modules/dnn/src/caffe.pb.h
  5. 80
      modules/dnn/src/dnn.cpp

@ -1,5 +1,19 @@
set(the_description "Deep neural netork module. Allow load models and make forward pass") set(the_description "Deep neural network module. Allow load models and make forward pass")
set(OPENCV_MODULE_IS_PART_OF_WORLD OFF) set(OPENCV_MODULE_IS_PART_OF_WORLD OFF)
ocv_define_module(dnn opencv_imgproc opencv_core opencv_highgui WRAP python) ocv_define_module(dnn opencv_imgproc opencv_core opencv_highgui WRAP python)
option(WITH_PROTOBUF "Build with libprotobuf" ON)
if(NOT WITH_PROTOBUF)
message(ERROR "libprotobuf required for dnn module")
else()
message(STATUS "Find protobuf")
find_package( Protobuf REQUIRED )
target_include_directories(opencv_dnn PUBLIC ${PROTOBUF_INCLUDE_DIR})
target_link_libraries(opencv_dnn ${PROTOBUF_LIBRARIES})
message(STATUS "Protobuf:" ${PROTOBUF_INCLUDE_DIR})
message(STATUS "Protobuf:" ${PROTOBUF_LIBRARIES})
endif()
target_link_libraries(opencv_dnn) target_link_libraries(opencv_dnn)

@ -23,7 +23,7 @@ namespace dnn
Blob(const Mat &in); Blob(const Mat &in);
Blob(UMat &in); Blob(UMat &in);
Blob(const UMat &in); Blob(const UMat &in);
int width() const; //cols int width() const; //cols
int height() const; //rows int height() const; //rows
int channels() const; int channels() const;
@ -37,7 +37,7 @@ namespace dnn
struct Value struct Value
{ {
int type; int type;
union union
{ {
int i; int i;
@ -76,7 +76,7 @@ namespace dnn
//TODO: this field must be declared as public if we want support possibility to change these params in runtime //TODO: this field must be declared as public if we want support possibility to change these params in runtime
std::vector<Blob> learnedParams; std::vector<Blob> learnedParams;
virtual ~Layer(); virtual ~Layer();
//type of Layer //type of Layer
@ -96,7 +96,7 @@ namespace dnn
virtual void forward(std::vector<Blob> &inputs, std::vector<Blob> &outputs); virtual void forward(std::vector<Blob> &inputs, std::vector<Blob> &outputs);
private: private:
static std::map<String, Constuctor> registeredLayers; static std::map<String, Constuctor> registeredLayers;
}; };
@ -113,7 +113,7 @@ namespace dnn
int addLayer(const String &name, const String &type); int addLayer(const String &name, const String &type);
void deleteLayer(int layerId); void deleteLayer(int layerId);
void setLayerParams(int layerId, LayerParams &params); void setLayerParams(int layerId, LayerParams &params);
//each output of each layer can be labeled by unique string label (as in Caffe) //each output of each layer can be labeled by unique string label (as in Caffe)
@ -126,7 +126,7 @@ namespace dnn
//or maybe version #2 //or maybe version #2
inline int getBlobId(int layerId, int inputOutputNumber) inline int getBlobId(int layerId, int inputOutputNumber)
{ {
return layerId << 16 + inputOutputNumber; return (layerId << 16) + inputOutputNumber;
} }
void addConnection(int outputId, int inputId); void addConnection(int outputId, int inputId);
@ -136,8 +136,8 @@ namespace dnn
private: private:
int lastLayerId; int lastLayerId;
std::map<int, Ptr<Layer>> layers; std::map< int, Ptr<Layer> > layers;
std::map<int, std::vector<String>> layerOutputLabels; std::map< int, std::vector<String> > layerOutputLabels;
}; };
@ -147,13 +147,15 @@ namespace dnn
static Ptr<Net> create(Ptr<NetConfiguration> config); static Ptr<Net> create(Ptr<NetConfiguration> config);
virtual ~Net();
virtual int getBlobId(int layerId, int outputId); virtual int getBlobId(int layerId, int outputId);
virtual int getBlobId(const String &blobName); virtual int getBlobId(const String &blobName);
virtual void forward(std::vector<int, Ptr<Blob>> &inputBlobs, std::vector<int, Ptr<Blob>> &outputBlobs); virtual void forward(std::vector< int, Ptr<Blob> > &inputBlobs, std::vector<int, Ptr<Blob> > &outputBlobs);
virtual void forward(int layer, std::vector<Ptr<Blob>> &layerOutputs); virtual void forward(int layer, std::vector<Ptr<Blob> > &layerOutputs);
}; };
class Importer class Importer
@ -165,7 +167,9 @@ namespace dnn
virtual ~Importer(); virtual ~Importer();
}; };
Ptr<Importer> createCaffeImporter(const String &prototxt, const String &caffeModel);
} }
} }
#endif #endif

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,28 +1,78 @@
#include "opencv2/dnn.hpp" #include "opencv2/dnn.hpp"
#include <iostream>
#include <fstream>
#include <google/protobuf/message.h>
#include <google/protobuf/text_format.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include "caffe.pb.h"
namespace
{
}
namespace cv namespace cv
{ {
namespace dnn namespace dnn
{ {
Blob::Blob(Mat &in) : _InputOutputArray(in)
{
}
Blob::Blob(const Mat &in) : _InputOutputArray(in)
{
Blob::Blob(Mat &in) : _InputOutputArray(in) }
{
}
Blob::Blob(const Mat &in) : _InputOutputArray(in) Blob::Blob(UMat &in) : _InputOutputArray(in)
{ {
} }
Blob::Blob(UMat &in) : _InputOutputArray(in) Blob::Blob(const UMat &in) : _InputOutputArray(in)
{ {
}
Blob::Blob(const UMat &in) : _InputOutputArray(in) }
{
class CaffeImporter : public Importer
{
public:
CaffeImporter(const char *pototxt, const char *caffeModel)
{
std::ifstream proto_ifs(pototxt, std::ifstream::in);
std::ifstream model_ifs(caffeModel, std::ifstream::in);
CV_Assert(proto_ifs.is_open() && model_ifs.is_open());
google::protobuf::io::IstreamInputStream proto_zcs(&proto_ifs);
google::protobuf::io::IstreamInputStream model_zcs(&model_ifs);
//google::protobuf::Message msg_arch;
//google::protobuf::Message msg_weights;
caffe::NetParameter msg_arch;
CV_Assert( google::protobuf::TextFormat::Parse(&proto_zcs, &msg_arch) );
//CV_Assert( msg_weights.ParseFromZeroCopyStream(model_zcs) );
const google::protobuf::Descriptor *desc_arch = msg_arch.GetDescriptor();
CV_Assert(desc_arch);
}
void populateNetConfiguration(Ptr<NetConfiguration> config)
{
}
};
Ptr<Importer> createCaffeImporter(const String &prototxt, const String &caffeModel)
{
return Ptr<Importer>(new CaffeImporter(prototxt.c_str(), caffeModel.c_str()));
}
}
} }
} }
Loading…
Cancel
Save