diff --git a/modules/dnn/include/opencv2/dnn.hpp b/modules/dnn/include/opencv2/dnn.hpp index 626433b18..eea4fc2bb 100644 --- a/modules/dnn/include/opencv2/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn.hpp @@ -8,6 +8,4 @@ #include -#endif /* __OPENCV_DNN_HPP__ */ - - +#endif /* __OPENCV_DNN_HPP__ */ \ No newline at end of file diff --git a/modules/dnn/include/opencv2/dnn/dnn.hpp b/modules/dnn/include/opencv2/dnn/dnn.hpp index 681da980d..b7074f8eb 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.hpp @@ -109,7 +109,7 @@ namespace dnn virtual String getInputName(int inputNum); virtual String getOutputName(int outputNum); }; - + //containers for String and int typedef DictValue LayerId; typedef DictValue BlobId; @@ -124,7 +124,7 @@ namespace dnn int addLayer(const String &name, const String &type, LayerParams ¶ms); int getLayerId(LayerId layer); void deleteLayer(LayerId layer); - + //each output of each layer can be labeled by unique string label (as in Caffe) //if label not specified then %layer_name%.%layer_output_id% can be used void setOutputNames(LayerId layer, const std::vector &outputNames); @@ -145,7 +145,7 @@ namespace dnn void setBlob(BlobId outputName, const Blob &blob); Blob getBlob(BlobId outputName); - + void setParam(LayerId layer, int numParam, const Blob &blob); void getParam(LayerId layer, int numParam); diff --git a/modules/dnn/include/opencv2/dnn/dnn.inl.hpp b/modules/dnn/include/opencv2/dnn/dnn.inl.hpp index a000ba40e..7d477fe17 100644 --- a/modules/dnn/include/opencv2/dnn/dnn.inl.hpp +++ b/modules/dnn/include/opencv2/dnn/dnn.inl.hpp @@ -44,13 +44,13 @@ namespace dnn { return Size(cols(), rows()); } - + inline int Blob::channels() const { CV_DbgAssert(m.dims >= 3); return m.size[m.dims-3]; } - + inline int Blob::num() const { CV_DbgAssert(m.dims == 4); diff --git a/modules/dnn/samples/alexnet.cpp b/modules/dnn/samples/alexnet.cpp index cc5f89f9c..831852c7c 100644 --- a/modules/dnn/samples/alexnet.cpp +++ b/modules/dnn/samples/alexnet.cpp @@ -61,7 +61,7 @@ int main(void) Blob probBlob = net.getBlob("prob"); ClassProb bc = getMaxClass(probBlob); - + initClassesNames(); std::string className = (bc.first < (int)CLASES_NAMES.size()) ? CLASES_NAMES[bc.first] : "unnamed"; @@ -71,6 +71,6 @@ int main(void) std::cout << " \"" + className << "\""; std::cout << std::endl; std::cout << "Prob: " << bc.second * 100 << "%" << std::endl; - + return 0; } \ No newline at end of file diff --git a/modules/dnn/src/caffe/glog_emulator.hpp b/modules/dnn/src/caffe/glog_emulator.hpp index 44fbf0585..718d297a2 100644 --- a/modules/dnn/src/caffe/glog_emulator.hpp +++ b/modules/dnn/src/caffe/glog_emulator.hpp @@ -27,12 +27,12 @@ class GLogWrapper public: - GLogWrapper(const char *_file, const char *_func, int _line, - const char *_type, + GLogWrapper(const char *_file, const char *_func, int _line, + const char *_type, const char *_cond_str = NULL, bool _cond_status = true ) : - stream(selectStream(_type)), - file(_file), func(_func), line(_line), + stream(selectStream(_type)), + file(_file), func(_func), line(_line), type(_type), cond_str(_cond_str), cond_staus(_cond_status) {} template diff --git a/modules/dnn/src/caffe_importer.cpp b/modules/dnn/src/caffe_importer.cpp index b56cc9dcb..bfebd8b94 100644 --- a/modules/dnn/src/caffe_importer.cpp +++ b/modules/dnn/src/caffe_importer.cpp @@ -26,7 +26,7 @@ namespace { caffe::NetParameter net; caffe::NetParameter netBinary; - + public: CaffeImporter(const char *pototxt, const char *caffeModel) @@ -69,7 +69,7 @@ namespace const std::string &name = field->name(); std::cout << field->type_name() << " " << name << ":"; - + #define GET_FIRST(Type) (isRepeated ? msgRefl->GetRepeated##Type(msg, field, 0) : msgRefl->Get##Type(msg, field)) switch (type) @@ -97,7 +97,7 @@ namespace break; } - std::cout << std::endl; + std::cout << std::endl; } void extractLayerParams(const Message &msg, cv::dnn::LayerParams ¶ms) @@ -109,7 +109,7 @@ namespace { const FieldDescriptor *fd = msgDesc->field(fieldId); - bool hasData = fd->is_required() || + bool hasData = fd->is_required() || (fd->is_optional() && (msgRefl->HasField(msg, fd) /*|| fd->has_default_value()*/)) || (fd->is_repeated() && msgRefl->FieldSize(msg, fd) > 0); @@ -119,7 +119,7 @@ namespace if (fd->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) { if (fd->is_repeated()) //Extract only first item! - extractLayerParams(msgRefl->GetRepeatedMessage(msg, fd, 0), params); + extractLayerParams(msgRefl->GetRepeatedMessage(msg, fd, 0), params); else extractLayerParams(msgRefl->GetMessage(msg, fd), params); } @@ -218,7 +218,7 @@ namespace extractLayerParams(layer, layerParams); extractBinaryLayerParms(layer, layerParams); - + int id = dstNet.addLayer(name, type, layerParams); dstNet.setOutputNames(id, tops); diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index a756372b5..3eacf5458 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -131,7 +131,7 @@ struct LayerOutId String name; LayerOutId() {} - LayerOutId(int layerId, int outputId, const String &outputName = String()) + LayerOutId(int layerId, int outputId, const String &outputName = String()) : lid(layerId), oid(outputId), name(outputName) {} }; @@ -323,7 +323,7 @@ struct Net::Impl void computeNetOutputs() { netOutputs.clear(); - + MapIdToLayerData::iterator it; for (it = layers.begin(); it != layers.end(); it++) { @@ -346,7 +346,7 @@ struct Net::Impl //already allocated if (ld.flag) return; - + //allocate parents for (set::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++) allocateLayer(*i); @@ -461,7 +461,7 @@ int Net::addLayer(const String &name, const String &type, LayerParams ¶ms) void Net::connect(BlobId input, BlobId output) { - + } void Net::setOutputNames(LayerId layer, const std::vector &outputNames) diff --git a/modules/dnn/src/layers/convolution_layer.cpp b/modules/dnn/src/layers/convolution_layer.cpp index e62bd22cf..0aa7e2f43 100644 --- a/modules/dnn/src/layers/convolution_layer.cpp +++ b/modules/dnn/src/layers/convolution_layer.cpp @@ -86,7 +86,7 @@ namespace dnn biasOnesMat = Mat::ones(1, outH * outW, CV_32F); } } - + template void im2col_cpu(const Dtype* data_im, const int channels, const int height, const int width, const int kernel_h, const int kernel_w, diff --git a/modules/dnn/src/layers/lrn_layer.cpp b/modules/dnn/src/layers/lrn_layer.cpp index 692232672..83bc6311f 100644 --- a/modules/dnn/src/layers/lrn_layer.cpp +++ b/modules/dnn/src/layers/lrn_layer.cpp @@ -24,7 +24,7 @@ namespace dnn void spatialNormalization(Blob &src, Blob &dst); public: - + LRNLayer(LayerParams ¶ms); void allocate(const std::vector &inputs, std::vector &outputs); void forward(std::vector &inputs, std::vector &outputs); @@ -101,7 +101,7 @@ namespace dnn accum.convertTo(accum, accum.type(), alpha/channels, 1); cv::pow(accum, beta, accum); - + for (int cn = channels - 1; cn >= 0; cn--) { cv::divide(srcBlob.getMat(n, cn), accum, dstBlob.getMat(n, cn)); diff --git a/modules/dnn/src/layers/pooling_layer.cpp b/modules/dnn/src/layers/pooling_layer.cpp index c4bf4062b..61d5366c9 100644 --- a/modules/dnn/src/layers/pooling_layer.cpp +++ b/modules/dnn/src/layers/pooling_layer.cpp @@ -10,7 +10,7 @@ namespace dnn { class PoolingLayer : public Layer { - enum + enum { MAX, AVE, diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index 97ab35881..ef2079c71 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -32,7 +32,7 @@ namespace dnn void SoftMaxLayer::allocate(const std::vector &inputs, std::vector &outputs) { CV_Assert(inputs.size() == 1); - + Vec4i shape = inputs[0]->shape(); outputs.resize(1); outputs[0].create(shape); @@ -91,7 +91,7 @@ namespace dnn { size_t srcOffset = outerDim * outerStep; size_t bufOffset = outerDim * cnStep; - + //sum exp along axis for (size_t i = 0; i < innerSize; i++) bufPtr[bufOffset + i] = 0.f;