diff --git a/modules/dnn/src/caffe/caffe_importer.cpp b/modules/dnn/src/caffe/caffe_importer.cpp index 2d615c448a..0b6c0a6e38 100644 --- a/modules/dnn/src/caffe/caffe_importer.cpp +++ b/modules/dnn/src/caffe/caffe_importer.cpp @@ -49,6 +49,7 @@ #include #include #include +#include #include "caffe_io.hpp" #endif @@ -57,8 +58,7 @@ namespace dnn { CV__DNN_EXPERIMENTAL_NS_BEGIN #ifdef HAVE_PROTOBUF -using ::google::protobuf::RepeatedField; -using ::google::protobuf::RepeatedPtrField; +using ::google::protobuf::RepeatedFieldRef; using ::google::protobuf::Message; using ::google::protobuf::Descriptor; using ::google::protobuf::FieldDescriptor; @@ -136,7 +136,7 @@ public: #define SET_UP_FILED(getter, arrayConstr, gtype) \ if (isRepeated) { \ - const RepeatedField &v = refl->GetRepeatedField(msg, field); \ + const RepeatedFieldRef v = refl->GetRepeatedFieldRef(msg, field); \ params.set(name, DictValue::arrayConstr(v.begin(), (int)v.size())); \ } \ else { \ @@ -168,7 +168,7 @@ public: break; case FieldDescriptor::CPPTYPE_STRING: if (isRepeated) { - const RepeatedPtrField &v = refl->GetRepeatedPtrField(msg, field); + const RepeatedFieldRef v = refl->GetRepeatedFieldRef(msg, field); params.set(name, DictValue::arrayString(v.begin(), (int)v.size())); } else { diff --git a/modules/dnn/src/tensorflow/tf_importer.cpp b/modules/dnn/src/tensorflow/tf_importer.cpp index 16c5c16308..1a01ac87d6 100644 --- a/modules/dnn/src/tensorflow/tf_importer.cpp +++ b/modules/dnn/src/tensorflow/tf_importer.cpp @@ -2739,14 +2739,21 @@ DataLayout TFImporter::predictOutputDataLayout(const tensorflow::NodeDef& layer) void TFImporter::populateNet() { - CV_Assert(netBin.ByteSize() || netTxt.ByteSize()); +#if GOOGLE_PROTOBUF_VERSION < 3005000 + size_t netBinSize = saturate_cast(netBin.ByteSize()); + size_t netTxtSize = saturate_cast(netTxt.ByteSize()); +#else + size_t netBinSize = netBin.ByteSizeLong(); + size_t netTxtSize = netTxt.ByteSizeLong(); +#endif + CV_Assert(netBinSize || netTxtSize); CV_LOG_INFO(NULL, "DNN/TF: parsing model" << (netBin.has_versions() ? cv::format(" produced by TF v%d (min_consumer=%d)", (int)netBin.versions().producer(), (int)netBin.versions().min_consumer()) : cv::String(" (N/A version info)")) << ". Number of nodes = " << netBin.node_size() ); - if (netTxt.ByteSize()) + if (netTxtSize) { CV_LOG_INFO(NULL, "DNN/TF: parsing config" << (netTxt.has_versions() ? cv::format(" produced by TF v%d (min_consumer=%d)", (int)netTxt.versions().producer(), (int)netTxt.versions().min_consumer()) : cv::String(" (N/A version info)")) @@ -2775,7 +2782,7 @@ void TFImporter::populateNet() CV_LOG_DEBUG(NULL, "DNN/TF: sortByExecutionOrder(model) => " << netBin.node_size() << " nodes"); } - tensorflow::GraphDef& net = netTxt.ByteSize() != 0 ? netTxt : netBin; + tensorflow::GraphDef& net = netTxtSize != 0 ? netTxt : netBin; int layersSize = net.node_size(); @@ -2873,7 +2880,12 @@ void TFImporter::addPermuteLayer(const int* order, const std::string& permName, void TFImporter::parseNode(const tensorflow::NodeDef& layer) { - tensorflow::GraphDef& net = netTxt.ByteSize() != 0 ? netTxt : netBin; +#if GOOGLE_PROTOBUF_VERSION < 3005000 + size_t netTxtSize = saturate_cast(netTxt.ByteSize()); +#else + size_t netTxtSize = netTxt.ByteSizeLong(); +#endif + tensorflow::GraphDef& net = netTxtSize != 0 ? netTxt : netBin; const std::string& name = layer.name(); const std::string& type = layer.op();