From 27177658414006df5de95c120ec795e2908aa3e2 Mon Sep 17 00:00:00 2001 From: Vitaliy Lyudvichenko Date: Wed, 24 Jun 2015 20:02:31 +0300 Subject: [PATCH] Fixed some warnings --- modules/dnn/include/opencv2/dnn/dict.hpp | 6 +++--- modules/dnn/src/caffe/{ => util}/io.cpp | 0 modules/dnn/src/caffe/util/io.hpp | 2 +- modules/dnn/src/caffe/{ => util}/upgrade_proto.cpp | 0 modules/dnn/src/caffe_importer.cpp | 8 ++++---- modules/dnn/src/layers/blank_layer.cpp | 2 +- modules/dnn/src/layers/elementwise_layers.cpp | 2 +- modules/dnn/src/layers/fully_connected_layer.cpp | 2 +- modules/dnn/src/layers/layers_common.cpp | 2 +- modules/dnn/src/layers/lrn_layer.cpp | 2 +- modules/dnn/src/layers/softmax_layer.cpp | 1 - 11 files changed, 13 insertions(+), 14 deletions(-) rename modules/dnn/src/caffe/{ => util}/io.cpp (100%) rename modules/dnn/src/caffe/{ => util}/upgrade_proto.cpp (100%) diff --git a/modules/dnn/include/opencv2/dnn/dict.hpp b/modules/dnn/include/opencv2/dnn/dict.hpp index 889ce404b..fcc53b3b2 100644 --- a/modules/dnn/include/opencv2/dnn/dict.hpp +++ b/modules/dnn/include/opencv2/dnn/dict.hpp @@ -53,7 +53,7 @@ public: bool has(const String &name) { - return dict.count(name); + return dict.count(name) != 0; } DictValue *ptr(const String &name) @@ -125,7 +125,7 @@ inline double DictValue::get() const if (type == cv::Param::REAL) return d; else if (type == cv::Param::INT) - return i; + return (double)i; else { CV_Assert(type == cv::Param::REAL || type == cv::Param::INT); @@ -156,7 +156,7 @@ inline bool DictValue::get() const } else if (type == cv::Param::INT) { - return i; + return i != 0; } else { diff --git a/modules/dnn/src/caffe/io.cpp b/modules/dnn/src/caffe/util/io.cpp similarity index 100% rename from modules/dnn/src/caffe/io.cpp rename to modules/dnn/src/caffe/util/io.cpp diff --git a/modules/dnn/src/caffe/util/io.hpp b/modules/dnn/src/caffe/util/io.hpp index 458cbeffb..0c56997cb 100644 --- a/modules/dnn/src/caffe/util/io.hpp +++ b/modules/dnn/src/caffe/util/io.hpp @@ -2,7 +2,7 @@ #define CAFFE_UTIL_IO_H_ //instead of GLOG -#include "glog_emulator.hpp" +#include "../glog_emulator.hpp" //#include #include diff --git a/modules/dnn/src/caffe/upgrade_proto.cpp b/modules/dnn/src/caffe/util/upgrade_proto.cpp similarity index 100% rename from modules/dnn/src/caffe/upgrade_proto.cpp rename to modules/dnn/src/caffe/util/upgrade_proto.cpp diff --git a/modules/dnn/src/caffe_importer.cpp b/modules/dnn/src/caffe_importer.cpp index 7d8b5b35b..b56cc9dcb 100644 --- a/modules/dnn/src/caffe_importer.cpp +++ b/modules/dnn/src/caffe_importer.cpp @@ -156,12 +156,12 @@ namespace } dstBlob.create(shape.size(), shape, CV_32F); - CV_Assert(protoBlob.data_size() == dstBlob.getMatRef().total()); + CV_Assert(protoBlob.data_size() == (int)dstBlob.getMatRef().total()); CV_DbgAssert(protoBlob.GetDescriptor()->FindFieldByLowercaseName("data")->cpp_type() == FieldDescriptor::CPPTYPE_FLOAT); float *dstData = dstBlob.getMatRef().ptr(); - for (size_t i = 0; i < protoBlob.data_size(); i++) + for (int i = 0; i < protoBlob.data_size(); i++) dstData[i] = protoBlob.data(i); } @@ -201,7 +201,7 @@ namespace std::vector layersName(layersSize); std::vector layersId(layersSize); - std::vector> bottomsVec(layersSize); + std::vector > bottomsVec(layersSize); for (int li = 0; li < layersSize; li++) { @@ -247,7 +247,7 @@ Ptr cv::dnn::createCaffeImporter(const String &prototxt, const String #else //HAVE_PROTOBUF -Ptr cv::dnn::createCaffeImporter(const String &prototxt, const String &caffeModel) +Ptr cv::dnn::createCaffeImporter(const String&, const String&) { CV_Error(cv::Error::StsNotImplemented, "libprotobuf required to import data from Caffe models"); return Ptr(); diff --git a/modules/dnn/src/layers/blank_layer.cpp b/modules/dnn/src/layers/blank_layer.cpp index 4a5ce96dc..a790a95df 100644 --- a/modules/dnn/src/layers/blank_layer.cpp +++ b/modules/dnn/src/layers/blank_layer.cpp @@ -9,7 +9,7 @@ namespace dnn { public: - BlankLayer(LayerParams ¶ms) + BlankLayer(LayerParams&) { } diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index 0023c67eb..735dc7212 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -59,7 +59,7 @@ namespace dnn struct TanHFunctor { - TanHFunctor(LayerParams ¶ms) {} + TanHFunctor(LayerParams&) {} inline float operator()(float x) { diff --git a/modules/dnn/src/layers/fully_connected_layer.cpp b/modules/dnn/src/layers/fully_connected_layer.cpp index 3c605e4d9..c7e184175 100644 --- a/modules/dnn/src/layers/fully_connected_layer.cpp +++ b/modules/dnn/src/layers/fully_connected_layer.cpp @@ -12,7 +12,7 @@ namespace dnn int numOutputs; int inC, inH, inW; - size_t inSize; + int inSize; public: FullyConnectedLayer(LayerParams ¶ms); diff --git a/modules/dnn/src/layers/layers_common.cpp b/modules/dnn/src/layers/layers_common.cpp index e0e141c96..b4bec3f66 100644 --- a/modules/dnn/src/layers/layers_common.cpp +++ b/modules/dnn/src/layers/layers_common.cpp @@ -41,7 +41,7 @@ void getKernelParams(LayerParams ¶ms, int &kernelH, int &kernelW, int &padH, strideH = strideW = params.get("stride", 1); } - CV_Assert(kernelH > 0 && kernelW > 0 && padH >= 0 && padW >= 0 && strideH > 0 & strideW > 0); + CV_Assert(kernelH > 0 && kernelW > 0 && padH >= 0 && padW >= 0 && strideH > 0 && strideW > 0); } } diff --git a/modules/dnn/src/layers/lrn_layer.cpp b/modules/dnn/src/layers/lrn_layer.cpp index be3b45a16..692232672 100644 --- a/modules/dnn/src/layers/lrn_layer.cpp +++ b/modules/dnn/src/layers/lrn_layer.cpp @@ -129,7 +129,7 @@ namespace dnn cv::pow(dst, beta, dst); cv::divide(src, dst, dst); - CV_DbgAssert(dataDst0 == dst.data); + CV_Assert(dataDst0 == dst.data); //debug } } } diff --git a/modules/dnn/src/layers/softmax_layer.cpp b/modules/dnn/src/layers/softmax_layer.cpp index e24e84f33..97ab35881 100644 --- a/modules/dnn/src/layers/softmax_layer.cpp +++ b/modules/dnn/src/layers/softmax_layer.cpp @@ -50,7 +50,6 @@ namespace dnn float *dstPtr = dst.ptr(); float *bufPtr = maxAggregator.ptr(); - size_t totalSize = src.total(); size_t outerSize = src.total(0, axis); size_t channels = src.size(axis); size_t innerSize = src.total(axis + 1, -1);