Fixed more warnings. Updated glog_emulator.hpp:

pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent d1179832f7
commit cfdf060ff5
  1. 3
      modules/dnn/CMakeLists.txt
  2. 2
      modules/dnn/README.md
  3. 9
      modules/dnn/include/opencv2/dnn.hpp
  4. 1
      modules/dnn/samples/alexnet.cpp
  5. 11
      modules/dnn/src/caffe/common.hpp
  6. 35
      modules/dnn/src/caffe/glog_emulator.hpp
  7. 9
      modules/dnn/src/caffe/util/io.cpp
  8. 2
      modules/dnn/src/caffe/util/io.hpp
  9. 4
      modules/dnn/src/caffe/util/upgrade_proto.cpp
  10. 2
      modules/dnn/src/caffe/util/upgrade_proto.hpp
  11. 10
      modules/dnn/src/dnn.cpp
  12. 4
      modules/dnn/src/layers/convolution_layer.cpp
  13. 6
      modules/dnn/src/layers/fully_connected_layer.cpp

@ -14,6 +14,7 @@ else()
message(STATUS "PROTOBUF not found. Caffe import function will be disabled.")
set(HAVE_PROTOBUF OFF)
set(PROTOBUF_LIBRARIES "")
add_definitions(-DHAVE_PROTOBUF=0)
endif()
endmacro(_dnn_find_protobuf)
@ -24,7 +25,7 @@ set(OPENCV_MODULE_IS_PART_OF_WORLD OFF)
_dnn_find_protobuf()
ocv_add_module(dnn opencv_imgproc opencv_core opencv_highgui WRAP python matlab)
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wno-shadow -Wno-parentheses)
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wno-shadow -Wno-parentheses -Wmaybe-uninitialized -Wsign-promo -Wmissing-declarations -Wmissing-prototypes)
ocv_glob_module_sources(${PROTO_SRCS} ${PROTO_HDRS})
ocv_source_group("Src\\protobuf" FILES ${PROTO_SRCS} ${PROTO_HDRS})
ocv_module_include_directories(include src/caffe ${PROTOBUF_INCLUDE_DIR})

@ -0,0 +1,2 @@
Deep Neural Network module
==========================

@ -5,7 +5,14 @@
// We are free to change headers layout in dnn subfolder, so please include
// this header for future compartibility
/** @defgroup dnn Deep Neural Network module
@{
This module contain tools to load artifical neural network models and to make forward test passes.
@}
*/
#include <opencv2/dnn/dnn.hpp>
#endif /* __OPENCV_DNN_HPP__ */
#endif /* __OPENCV_DNN_HPP__ */

@ -53,6 +53,7 @@ int main(void)
CV_Assert(!img.empty());
cvtColor(img, img, COLOR_BGR2RGB);
img.convertTo(img, CV_32F);
resize(img, img, Size(227, 227));
subtract(img, cv::mean(img), img);
Blob imgBlob(img);

@ -19,17 +19,6 @@
#include <utility> // pair
#include <vector>
//// Disable the copy and assignment operator for a class.
//#define DISABLE_COPY_AND_ASSIGN(classname) \
//private:\
// classname(const classname&);\
// classname& operator=(const classname&)
//
//// A simple macro to mark codes that are not implemented, so that when the code
//// is executed we will see a fatal log.
//#define NOT_IMPLEMENTED LOG(FATAL) << "Not Implemented Yet"
//
namespace caffe {
// Common functions and classes from std that caffe often uses.

@ -1,10 +1,11 @@
#pragma once
#include <stdlib.h>
#include <iostream>
#include <sstream>
#include <opencv2/core.hpp>
#define CHECK(cond) cv::GLogWrapper(__FILE__, CV_Func, __LINE__, "CHECK", #cond, cond)
#define CHECK_EQ(a, b) cv::GLogWrapper(__FILE__, CV_Func, __LINE__, "CHECK", #a #b, ((a) == (b)))
#define CHECK_EQ(a, b) cv::GLogWrapper(__FILE__, CV_Func, __LINE__, "CHECK", #a"="#b, ((a) == (b)))
#define LOG(TYPE) cv::GLogWrapper(__FILE__, CV_Func, __LINE__, #TYPE)
namespace cv
@ -12,18 +13,10 @@ namespace cv
class GLogWrapper
{
const char *type, *cond_str, *file, *func;
std::stringstream stream;
const char *file, *func, *type, *cond_str;
int line;
bool cond_staus;
std::ostream &stream;
static std::ostream &selectStream(const char *type)
{
if (!strcmp(type, "INFO"))
return std::cout;
else
return std::cerr;
}
public:
@ -31,9 +24,8 @@ public:
const char *_type,
const char *_cond_str = NULL, bool _cond_status = true
) :
stream(selectStream(_type)),
file(_file), func(_func), line(_line),
type(_type), cond_str(_cond_str), cond_staus(_cond_status) {}
file(_file), func(_func), type(_type), cond_str(_cond_str),
line(_line), cond_staus(_cond_status) {}
template<typename T>
GLogWrapper &operator<<(const T &v)
@ -47,13 +39,16 @@ public:
{
if (cond_str && !cond_staus)
{
cv::error(cv::Error::StsAssert, cond_str, func, file, line);
cv::error(cv::Error::StsError, "FAILED: " + String(cond_str) + "." + stream.str(), func, file, line);
}
else if (!cond_str && strcmp(type, "CHECK"))
{
if (!strcmp(type, "INFO"))
std::cout << stream.str();
else
std::cerr << stream.str();
}
//else if (!cond_str && strcmp(type, "INFO"))
//{
// cv::error(cv::Error::StsAssert, type, func, file, line);
//}
}
};
}
}

@ -1,4 +1,5 @@
#ifdef HAVE_PROTOBUF
#if HAVE_PROTOBUF
#include "io.hpp"
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
@ -29,7 +30,7 @@ using google::protobuf::Message;
bool ReadProtoFromTextFile(const char* filename, Message* proto) {
std::ifstream fs(filename, std::ifstream::in);
CV_Assert(fs.is_open());
CHECK(fs.is_open()) << "Can't open \"" << filename << "\"";
IstreamInputStream input(&fs);
bool success = google::protobuf::TextFormat::Parse(&input, proto);
fs.close();
@ -47,7 +48,7 @@ bool ReadProtoFromTextFile(const char* filename, Message* proto) {
//
bool ReadProtoFromBinaryFile(const char* filename, Message* proto) {
std::ifstream fs(filename, std::ifstream::in | std::ifstream::binary);
CV_Assert(fs.is_open());
CHECK(fs.is_open()) << "Can't open \"" << filename << "\"";
ZeroCopyInputStream* raw_input = new IstreamInputStream(&fs);
CodedInputStream* coded_input = new CodedInputStream(raw_input);
coded_input->SetTotalBytesLimit(kProtoReadBytesLimit, 536870912);
@ -66,4 +67,4 @@ bool ReadProtoFromBinaryFile(const char* filename, Message* proto) {
//}
} // namespace caffe
#endif
#endif

@ -1,5 +1,6 @@
#ifndef CAFFE_UTIL_IO_H_
#define CAFFE_UTIL_IO_H_
#if HAVE_PROTOBUF
//instead of GLOG
#include "../glog_emulator.hpp"
@ -141,4 +142,5 @@ void CVMatToDatum(const cv::Mat& cv_img, Datum* datum);
} // namespace caffe
#endif
#endif // CAFFE_UTIL_IO_H_

@ -1,4 +1,4 @@
#ifdef HAVE_PROTOBUF
#if HAVE_PROTOBUF
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/io/zero_copy_stream_impl.h>
#include <google/protobuf/text_format.h>
@ -939,4 +939,4 @@ void ReadNetParamsFromBinaryFileOrDie(const string& param_file,
}
} // namespace caffe
#endif
#endif

@ -1,5 +1,6 @@
#ifndef CAFFE_UTIL_UPGRADE_PROTO_H_
#define CAFFE_UTIL_UPGRADE_PROTO_H_
#if HAVE_PROTOBUF
#include <string>
#include "caffe/common.hpp"
@ -61,4 +62,5 @@ void ReadNetParamsFromBinaryFileOrDie(const string& param_file,
} // namespace caffe
#endif
#endif // CAFFE_UTIL_UPGRADE_PROTO_H_

@ -335,7 +335,7 @@ struct Net::Impl
}
std::cout << "\nNet Outputs(" << netOutputs.size() << "):\n";
for (int i = 0; i < netOutputs.size(); i++)
for (size_t i = 0; i < netOutputs.size(); i++)
std::cout << layers[netOutputs[i]].name << std::endl;
}
@ -459,10 +459,10 @@ int Net::addLayer(const String &name, const String &type, LayerParams &params)
return id;
}
void Net::connect(BlobId input, BlobId output)
{
//void Net::connect(BlobId input, BlobId output)
//{
}
//}
void Net::setOutputNames(LayerId layer, const std::vector<String> &outputNames)
{
@ -605,4 +605,4 @@ Ptr<Layer> LayerRegister::createLayerInstance(const String &_type, LayerParams&
}
}
}
}

@ -50,7 +50,7 @@ namespace dnn
if (bias)
{
Blob &biasBlob = learnedParams[1];
CV_Assert(biasBlob.total() == numOutput);
CV_Assert(biasBlob.total() == (size_t)numOutput);
}
}
@ -159,4 +159,4 @@ namespace dnn
outW = (inW + 2 * padW - kernelW) / strideW + 1;
}
}
}
}

@ -30,7 +30,7 @@ namespace dnn
bias = params.get<bool>("bias_term", true);
CV_Assert(params.learnedBlobs.size() >= 1);
CV_Assert(!bias || (params.learnedBlobs.size() >= 2 && params.learnedBlobs[1].total() == numOutputs));
CV_Assert(!bias || (params.learnedBlobs.size() >= 2 && (int)params.learnedBlobs[1].total() == numOutputs));
learnedParams.resize(bias ? 2 : 1);
learnedParams[0] = params.learnedBlobs[0];
@ -49,7 +49,7 @@ namespace dnn
inW = inputs[0]->cols();
inSize = inC * inH * inW;
CV_Assert(inSize * numOutputs == learnedParams[0].total());
CV_Assert((size_t)inSize * (size_t)numOutputs == learnedParams[0].total());
outputs.resize(inputs.size());
for (size_t i = 0; i < inputs.size(); i++)
@ -84,4 +84,4 @@ namespace dnn
}
}
}
}
}

Loading…
Cancel
Save