|
|
|
@ -62,6 +62,8 @@ namespace cv { |
|
|
|
|
namespace dnn { |
|
|
|
|
CV__DNN_EXPERIMENTAL_NS_BEGIN |
|
|
|
|
|
|
|
|
|
static size_t DNN_NETWORK_DUMP = utils::getConfigurationParameterSizeT("OPENCV_DNN_NETWORK_DUMP", 0); |
|
|
|
|
|
|
|
|
|
// this option is useful to run valgrind memory errors detection
|
|
|
|
|
static bool DNN_DISABLE_MEMORY_OPTIMIZATIONS = utils::getConfigurationParameterBool("OPENCV_DNN_DISABLE_MEMORY_OPTIMIZATIONS", false); |
|
|
|
|
|
|
|
|
@ -86,6 +88,7 @@ using std::vector; |
|
|
|
|
using std::map; |
|
|
|
|
using std::make_pair; |
|
|
|
|
using std::set; |
|
|
|
|
using std::string; |
|
|
|
|
|
|
|
|
|
//==================================================================================================
|
|
|
|
|
|
|
|
|
@ -1054,12 +1057,19 @@ static Ptr<BackendWrapper> wrapMat(int backendId, int targetId, cv::Mat& m) |
|
|
|
|
return Ptr<BackendWrapper>(); // TODO Error?
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
static int g_networkId = 0; |
|
|
|
|
|
|
|
|
|
struct Net::Impl |
|
|
|
|
{ |
|
|
|
|
typedef std::map<int, LayerShapes> LayersShapesMap; |
|
|
|
|
typedef std::map<int, LayerData> MapIdToLayerData; |
|
|
|
|
|
|
|
|
|
const int networkId; // network global identifier
|
|
|
|
|
int networkDumpCounter; // dump counter
|
|
|
|
|
|
|
|
|
|
Impl() |
|
|
|
|
: networkId(CV_XADD(&g_networkId, 1)) |
|
|
|
|
, networkDumpCounter(0) |
|
|
|
|
{ |
|
|
|
|
//allocate fake net input layer
|
|
|
|
|
netInputLayer = Ptr<DataLayer>(new DataLayer()); |
|
|
|
@ -1223,6 +1233,11 @@ struct Net::Impl |
|
|
|
|
{ |
|
|
|
|
CV_TRACE_FUNCTION(); |
|
|
|
|
|
|
|
|
|
if (DNN_NETWORK_DUMP > 0 && networkDumpCounter == 0) |
|
|
|
|
{ |
|
|
|
|
dumpNetworkToFile(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
if (preferableBackend == DNN_BACKEND_DEFAULT) |
|
|
|
|
preferableBackend = (Backend)PARAM_DNN_BACKEND_DEFAULT; |
|
|
|
|
#ifdef HAVE_INF_ENGINE |
|
|
|
@ -1299,6 +1314,11 @@ struct Net::Impl |
|
|
|
|
|
|
|
|
|
netWasAllocated = true; |
|
|
|
|
this->blobsToKeep = blobsToKeep_; |
|
|
|
|
|
|
|
|
|
if (DNN_NETWORK_DUMP > 0) |
|
|
|
|
{ |
|
|
|
|
dumpNetworkToFile(); |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -2979,6 +2999,31 @@ struct Net::Impl |
|
|
|
|
static |
|
|
|
|
Net createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNet); |
|
|
|
|
#endif |
|
|
|
|
|
|
|
|
|
string dump(); |
|
|
|
|
|
|
|
|
|
void dumpNetworkToFile() |
|
|
|
|
{ |
|
|
|
|
#ifndef OPENCV_DNN_DISABLE_NETWORK_AUTO_DUMP |
|
|
|
|
String dumpFileName = cv::format("ocv_dnn_net_%05d_%02d.dot", networkId, networkDumpCounter++); |
|
|
|
|
try |
|
|
|
|
{ |
|
|
|
|
string dumpStr = dump(); |
|
|
|
|
std::ofstream out(dumpFileName.c_str(), std::ios::out | std::ios::binary); |
|
|
|
|
out << dumpStr; |
|
|
|
|
} |
|
|
|
|
catch (const std::exception& e) |
|
|
|
|
{ |
|
|
|
|
std::ofstream out((dumpFileName + ".error").c_str(), std::ios::out); |
|
|
|
|
out << "Exception: " << e.what() << std::endl; |
|
|
|
|
} |
|
|
|
|
catch (...) |
|
|
|
|
{ |
|
|
|
|
std::ofstream out((dumpFileName + ".error").c_str(), std::ios::out); |
|
|
|
|
out << "Can't dump: unknown exception" << std::endl; |
|
|
|
|
} |
|
|
|
|
#endif |
|
|
|
|
} |
|
|
|
|
}; |
|
|
|
|
|
|
|
|
|
Net::Net() : impl(new Net::Impl) |
|
|
|
@ -3496,20 +3541,26 @@ int Net::getLayerId(const String &layer) |
|
|
|
|
return impl->getLayerId(layer); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
String parseLayerParams(const String& name, const LayerParams& lp) { |
|
|
|
|
static |
|
|
|
|
string dumpLayerParameterSize(const string& name, const LayerParams& lp) |
|
|
|
|
{ |
|
|
|
|
std::ostringstream out(name, std::ios::ate); |
|
|
|
|
DictValue param = lp.get(name); |
|
|
|
|
std::ostringstream out; |
|
|
|
|
out << name << " "; |
|
|
|
|
switch (param.size()) { |
|
|
|
|
case 1: out << ": "; break; |
|
|
|
|
case 2: out << "(HxW): "; break; |
|
|
|
|
case 3: out << "(DxHxW): "; break; |
|
|
|
|
default: CV_Error(Error::StsNotImplemented, format("Unsupported %s size = %d", name.c_str(), param.size())); |
|
|
|
|
switch (param.size()) |
|
|
|
|
{ |
|
|
|
|
case 1: out << " : "; break; |
|
|
|
|
case 2: out << " (HxW): "; break; |
|
|
|
|
case 3: out << " (DxHxW): "; break; |
|
|
|
|
default: |
|
|
|
|
CV_LOG_INFO(NULL, format("DNN/dumpLayerParameterSize(): Unsupported '%s' size = %d", name.c_str(), param.size())); |
|
|
|
|
out << ": "; |
|
|
|
|
} |
|
|
|
|
for (size_t i = 0; i < param.size() - 1; i++) { |
|
|
|
|
out << param.get<int>(i) << " x "; |
|
|
|
|
for (size_t i = 0; i < param.size(); i++) |
|
|
|
|
{ |
|
|
|
|
if (i > 0) |
|
|
|
|
out << " x "; |
|
|
|
|
out << param.get<int>(i); |
|
|
|
|
} |
|
|
|
|
out << param.get<int>(param.size() - 1) << "\\l"; |
|
|
|
|
return out.str(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -3517,23 +3568,33 @@ String Net::dump() |
|
|
|
|
{ |
|
|
|
|
CV_Assert(!empty()); |
|
|
|
|
|
|
|
|
|
if (impl->netInputLayer->inputsData.empty()) |
|
|
|
|
CV_Error(Error::StsError, "Requested set input"); |
|
|
|
|
bool hasInput = !impl->netInputLayer->inputsData.empty(); |
|
|
|
|
|
|
|
|
|
if (hasInput) |
|
|
|
|
{ |
|
|
|
|
if (!impl->netWasAllocated) |
|
|
|
|
impl->setUpNet(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
return impl->dump(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
string Net::Impl::dump() |
|
|
|
|
{ |
|
|
|
|
bool hasInput = !netInputLayer->inputsData.empty(); |
|
|
|
|
|
|
|
|
|
std::ostringstream out; |
|
|
|
|
std::map<int, LayerData>& map = impl->layers; |
|
|
|
|
int prefBackend = impl->preferableBackend; |
|
|
|
|
const std::map<int, LayerData>& map = layers; |
|
|
|
|
|
|
|
|
|
Backend prefBackend = (Backend)preferableBackend; |
|
|
|
|
std::vector<std::vector<int> > skippedLayers; |
|
|
|
|
std::vector<int> skipId; |
|
|
|
|
std::vector<int> allLayers(map.size(), -1); |
|
|
|
|
int idPrev = -1; |
|
|
|
|
Ptr<BackendNode> prevNode; |
|
|
|
|
for (std::map<int, LayerData>::reverse_iterator rit = map.rbegin(); rit != map.rend(); ++rit) |
|
|
|
|
for (std::map<int, LayerData>::const_reverse_iterator rit = map.rbegin(); rit != map.rend(); ++rit) |
|
|
|
|
{ |
|
|
|
|
std::map<int, Ptr<BackendNode> >::iterator itBackend = rit->second.backendNodes.find(prefBackend); |
|
|
|
|
std::map<int, Ptr<BackendNode> >::const_iterator itBackend = rit->second.backendNodes.find(prefBackend); |
|
|
|
|
if (prefBackend == DNN_BACKEND_OPENCV || itBackend == rit->second.backendNodes.end() || |
|
|
|
|
itBackend->second.empty()) |
|
|
|
|
{ |
|
|
|
@ -3572,154 +3633,203 @@ String Net::dump() |
|
|
|
|
prevNode = itBackend->second; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
String colors[] = {"#ffffb3", "#fccde5", "#8dd3c7", "#bebada", "#80b1d3", "#fdb462"}; |
|
|
|
|
String backend; |
|
|
|
|
switch (prefBackend) { |
|
|
|
|
string colors[] = {"#ffffb3", "#fccde5", "#8dd3c7", "#bebada", "#80b1d3", "#fdb462"}; |
|
|
|
|
string backend; |
|
|
|
|
switch (prefBackend) |
|
|
|
|
{ |
|
|
|
|
case DNN_BACKEND_DEFAULT: backend = "DEFAULT/"; break; |
|
|
|
|
case DNN_BACKEND_HALIDE: backend = "HALIDE/"; break; |
|
|
|
|
case DNN_BACKEND_INFERENCE_ENGINE: // fallthru
|
|
|
|
|
case DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019: backend = "DLIE/"; break; |
|
|
|
|
case DNN_BACKEND_INFERENCE_ENGINE_NGRAPH: backend = "NGRAPH/"; break; |
|
|
|
|
case DNN_BACKEND_OPENCV: backend = "OCV/"; break; |
|
|
|
|
// don't use default:
|
|
|
|
|
} |
|
|
|
|
out << "digraph G {" << '\n'; |
|
|
|
|
out << "digraph G {\n"; |
|
|
|
|
// Add nodes
|
|
|
|
|
for (std::map<int, LayerData>::iterator it = map.begin(); it != map.end(); ++it) |
|
|
|
|
for (std::map<int, LayerData>::const_iterator it = map.begin(); it != map.end(); ++it) |
|
|
|
|
{ |
|
|
|
|
String name = it->second.params.name; |
|
|
|
|
if (allLayers[it->first] == -1 && !name.empty()) { |
|
|
|
|
out << " " << "\"" << name << "\"" << " [label=\""; |
|
|
|
|
skipId.clear(); |
|
|
|
|
skipId.push_back(it->first); |
|
|
|
|
const LayerData& ld = it->second; |
|
|
|
|
string name = ld.params.name; |
|
|
|
|
std::vector<int> clusterIds(1, it->first); |
|
|
|
|
if (allLayers[it->first] == -1 && !name.empty()) |
|
|
|
|
{ |
|
|
|
|
out << "\t\"" << name << "\" [label=\""; |
|
|
|
|
} |
|
|
|
|
else if (name.empty() || it->first != skippedLayers[allLayers[it->first]][0]) |
|
|
|
|
{ |
|
|
|
|
continue; |
|
|
|
|
else { // first node in cluster : it->first == skippedLayers[allLayers[it->first]][0]
|
|
|
|
|
} |
|
|
|
|
else // first node in cluster : it->first == skippedLayers[allLayers[it->first]][0]
|
|
|
|
|
{ |
|
|
|
|
int cluster = allLayers[it->first]; |
|
|
|
|
out << " " << "\"" << "cluster_" << cluster << "\"" << " [label=\"{"; |
|
|
|
|
skipId = skippedLayers[allLayers[it->first]]; // vertices in current cluster
|
|
|
|
|
out << "\t\"" << "cluster_" << cluster << "\" [label=\"{"; |
|
|
|
|
clusterIds = skippedLayers[allLayers[it->first]]; // vertices in current cluster
|
|
|
|
|
} |
|
|
|
|
for (int i = 0; i < skipId.size(); i++) |
|
|
|
|
for (int i = 0; i < clusterIds.size(); i++) |
|
|
|
|
{ |
|
|
|
|
LayerParams& lp = map[skipId[i]].params; |
|
|
|
|
CV_DbgAssert(map.find(clusterIds[i]) != map.end()); |
|
|
|
|
const LayerParams& lp = map.find(clusterIds[i])->second.params; |
|
|
|
|
if (!lp.name.empty()) { |
|
|
|
|
if (i > 0) { |
|
|
|
|
out << " | "; |
|
|
|
|
} |
|
|
|
|
out << lp.name << "\\n" << lp.type << "\\n"; |
|
|
|
|
if (lp.has("kernel_size")) { |
|
|
|
|
String kernel = parseLayerParams("kernel_size", lp); |
|
|
|
|
out << lp.name << "\\n" << lp.type << "\\n"; // align center
|
|
|
|
|
if (lp.has("kernel_size")) |
|
|
|
|
{ |
|
|
|
|
string kernel = dumpLayerParameterSize("kernel_size", lp); |
|
|
|
|
out << kernel; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} else if (lp.has("kernel_h") && lp.has("kernel_w")) { |
|
|
|
|
DictValue h = lp.get("kernel_h"); |
|
|
|
|
DictValue w = lp.get("kernel_w"); |
|
|
|
|
out << "kernel (HxW): " << h << " x " << w << "\\l"; |
|
|
|
|
out << "kernel (HxW): " << h << " x " << w; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
if (lp.has("stride")) { |
|
|
|
|
String stride = parseLayerParams("stride", lp); |
|
|
|
|
string stride = dumpLayerParameterSize("stride", lp); |
|
|
|
|
out << stride; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} else if (lp.has("stride_h") && lp.has("stride_w")) { |
|
|
|
|
DictValue h = lp.get("stride_h"); |
|
|
|
|
DictValue w = lp.get("stride_w"); |
|
|
|
|
out << "stride (HxW): " << h << " x " << w << "\\l"; |
|
|
|
|
out << "stride (HxW): " << h << " x " << w; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
if (lp.has("dilation")) { |
|
|
|
|
String dilation = parseLayerParams("dilation", lp); |
|
|
|
|
string dilation = dumpLayerParameterSize("dilation", lp); |
|
|
|
|
out << dilation; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} else if (lp.has("dilation_h") && lp.has("dilation_w")) { |
|
|
|
|
DictValue h = lp.get("dilation_h"); |
|
|
|
|
DictValue w = lp.get("dilation_w"); |
|
|
|
|
out << "dilation (HxW): " << h << " x " << w << "\\l"; |
|
|
|
|
out << "dilation (HxW): " << h << " x " << w; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
if (lp.has("pad")) { |
|
|
|
|
DictValue pad = lp.get("pad"); |
|
|
|
|
out << "pad "; |
|
|
|
|
switch (pad.size()) { |
|
|
|
|
case 1: out << ": " << pad << "\\l"; break; |
|
|
|
|
case 2: out << "(HxW): (" << pad.get<int>(0) << " x " << pad.get<int>(1) << ")" << "\\l"; break; |
|
|
|
|
case 4: out << "(HxW): (" << pad.get<int>(0) << ", " << pad.get<int>(2) << ") x (" << pad.get<int>(1) << ", " << pad.get<int>(3) << ")" << "\\l"; break; |
|
|
|
|
case 6: out << "(DxHxW): (" << pad.get<int>(0) << ", " << pad.get<int>(3) << ") x (" << pad.get<int>(1) << ", " << pad.get<int>(4) |
|
|
|
|
<< ") x (" << pad.get<int>(2) << ", " << pad.get<int>(5) << ")" << "\\l"; break; |
|
|
|
|
switch (pad.size()) |
|
|
|
|
{ |
|
|
|
|
case 1: out << ": " << pad; break; |
|
|
|
|
case 2: |
|
|
|
|
out << "(HxW): (" << pad.get<int>(0) << " x " << pad.get<int>(1) << ")"; |
|
|
|
|
break; |
|
|
|
|
case 4: |
|
|
|
|
out << "(HxW): (" << pad.get<int>(0) << ", " << pad.get<int>(2) |
|
|
|
|
<< ") x (" << pad.get<int>(1) << ", " << pad.get<int>(3) << ")"; |
|
|
|
|
break; |
|
|
|
|
case 6: |
|
|
|
|
out << "(DxHxW): (" << pad.get<int>(0) << ", " << pad.get<int>(3) |
|
|
|
|
<< ") x (" << pad.get<int>(1) << ", " << pad.get<int>(4) |
|
|
|
|
<< ") x (" << pad.get<int>(2) << ", " << pad.get<int>(5) << ")"; |
|
|
|
|
break; |
|
|
|
|
default: CV_Error(Error::StsNotImplemented, format("Unsupported pad size = %d", pad.size())); |
|
|
|
|
} |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} else if (lp.has("pad_l") && lp.has("pad_t") && lp.has("pad_r") && lp.has("pad_b")) { |
|
|
|
|
DictValue l = lp.get("pad_l"); |
|
|
|
|
DictValue t = lp.get("pad_t"); |
|
|
|
|
DictValue r = lp.get("pad_r"); |
|
|
|
|
DictValue b = lp.get("pad_b"); |
|
|
|
|
out << "pad (HxW): (" << t << ", " << b << ") x (" << l << ", " << r << ")" << "\\l"; |
|
|
|
|
out << "pad (HxW): (" << t << ", " << b << ") x (" << l << ", " << r << ")"; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
else if (lp.has("pooled_w") || lp.has("pooled_h")) { |
|
|
|
|
DictValue h = lp.get("pooled_h"); |
|
|
|
|
DictValue w = lp.get("pooled_w"); |
|
|
|
|
out << "pad (HxW): " << h << " x " << w << "\\l"; |
|
|
|
|
out << "pad pooled (HxW): " << h << " x " << w; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
if (lp.has("pool")) { |
|
|
|
|
out << "pool: " << lp.get("pool") << "\\l"; |
|
|
|
|
out << "pool: " << lp.get("pool"); |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
if (lp.has("global_pooling")) { |
|
|
|
|
out << "global_pooling: " << lp.get("global_pooling") << "\\l"; |
|
|
|
|
out << "global_pooling: " << lp.get("global_pooling"); |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
if (lp.has("group")) { |
|
|
|
|
out << "group: " << lp.get("group") << "\\l"; |
|
|
|
|
out << "group: " << lp.get("group"); |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
if (!it->second.outputBlobs.empty()) |
|
|
|
|
out << "output: " << it->second.outputBlobs[0].size << "\\l"; |
|
|
|
|
if (!ld.outputBlobs.empty()) |
|
|
|
|
{ |
|
|
|
|
out << "output: " << ld.outputBlobs[0].size; |
|
|
|
|
out << "\\l"; // align left
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
Ptr<BackendNode> layerBackend = it->second.backendNodes[prefBackend]; |
|
|
|
|
Ptr<BackendNode> layerBackend; |
|
|
|
|
std::map<int, Ptr<BackendNode> >::const_iterator ibn = ld.backendNodes.find(prefBackend); |
|
|
|
|
if (ibn != ld.backendNodes.end()) |
|
|
|
|
layerBackend = ibn->second; |
|
|
|
|
out << (!layerBackend.empty() ? backend : "OCV/"); |
|
|
|
|
int colorId = 0; |
|
|
|
|
switch (it->second.layerInstance->preferableTarget) { |
|
|
|
|
case DNN_TARGET_CPU: out << "CPU\\n"; colorId = layerBackend.empty() ? 0 : 5; break; |
|
|
|
|
case DNN_TARGET_OPENCL: out << "OCL\\n"; colorId = 1; break; |
|
|
|
|
case DNN_TARGET_OPENCL_FP16: out << "OCL_FP16\\n"; colorId = 2; break; |
|
|
|
|
case DNN_TARGET_MYRIAD: out << "MYRIAD\\n"; colorId = 3; break; |
|
|
|
|
case DNN_TARGET_FPGA: out << "FPGA\\n"; colorId = 4; break; |
|
|
|
|
} |
|
|
|
|
out << ((skipId.size() == 1)? "\" " : " }\" "); |
|
|
|
|
const Target target = ld.layerInstance.empty() |
|
|
|
|
? DNN_TARGET_CPU |
|
|
|
|
: (Target)(ld.layerInstance->preferableTarget); // TODO fix preferableTarget type
|
|
|
|
|
switch (target) |
|
|
|
|
{ |
|
|
|
|
case DNN_TARGET_CPU: out << "CPU"; colorId = layerBackend.empty() ? 0 : 5; break; |
|
|
|
|
case DNN_TARGET_OPENCL: out << "OCL"; colorId = 1; break; |
|
|
|
|
case DNN_TARGET_OPENCL_FP16: out << "OCL_FP16"; colorId = 2; break; |
|
|
|
|
case DNN_TARGET_MYRIAD: out << "MYRIAD"; colorId = 3; break; |
|
|
|
|
case DNN_TARGET_FPGA: out << "FPGA"; colorId = 4; break; |
|
|
|
|
// don't use default:
|
|
|
|
|
} |
|
|
|
|
out << "\\n"; // align center
|
|
|
|
|
out << ((clusterIds.size() == 1)? "\" " : " }\" "); |
|
|
|
|
out << "fillcolor=\"" << colors[colorId] << "\" "; |
|
|
|
|
out << "style=filled "; |
|
|
|
|
out << "shape=" << ((skipId.size() == 1)? "box" : "record") << "]" << '\n'; |
|
|
|
|
out << "shape=" << ((clusterIds.size() == 1)? "box" : "record") << "]\n"; |
|
|
|
|
} |
|
|
|
|
out << '\n'; |
|
|
|
|
// Add edges
|
|
|
|
|
int inputsSize = impl->netInputLayer->outNames.size(); |
|
|
|
|
for (std::map<int, LayerData>::iterator it = map.begin(); it != map.end(); ++it) |
|
|
|
|
int inputsSize = hasInput ? netInputLayer->outNames.size() : 0; |
|
|
|
|
for (std::map<int, LayerData>::const_iterator it = map.begin(); it != map.end(); ++it) |
|
|
|
|
{ |
|
|
|
|
const LayerData& ld = it->second; |
|
|
|
|
if (allLayers[it->first] == -1) // node
|
|
|
|
|
{ |
|
|
|
|
for (int i = 0; i < it->second.consumers.size(); i++) |
|
|
|
|
for (int i = 0; i < ld.consumers.size(); i++) |
|
|
|
|
{ |
|
|
|
|
int outId = it->second.consumers[i].lid; |
|
|
|
|
int outId = ld.consumers[i].lid; |
|
|
|
|
if (it == map.begin() && inputsSize > 1) |
|
|
|
|
out << " " << "\"" << it->second.name << "_" << i << "\"" << " -> "; |
|
|
|
|
out << "\t\"" << ld.name << "_" << i << "\"" << " -> "; |
|
|
|
|
else |
|
|
|
|
out << " " << "\"" << it->second.name << "\"" << " -> "; |
|
|
|
|
out << "\t\"" << ld.name << "\"" << " -> "; |
|
|
|
|
if (allLayers[outId] == -1) // node
|
|
|
|
|
out << "\"" << map[outId].name << "\"" << '\n'; |
|
|
|
|
{ |
|
|
|
|
CV_DbgAssert(map.find(outId) != map.end()); |
|
|
|
|
out << "\"" << map.find(outId)->second.name << "\"\n"; |
|
|
|
|
} |
|
|
|
|
else // cluster
|
|
|
|
|
out << "\"" << "cluster_" << allLayers[outId] << "\"" << '\n'; |
|
|
|
|
{ |
|
|
|
|
out << "\"" << "cluster_" << allLayers[outId] << "\"\n"; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
else if (it->first == skippedLayers[allLayers[it->first]].back()) // edges from last layer in cluster
|
|
|
|
|
{ |
|
|
|
|
for (int i = 0; i < it->second.consumers.size(); i++) |
|
|
|
|
for (int i = 0; i < ld.consumers.size(); i++) |
|
|
|
|
{ |
|
|
|
|
int outId = ld.consumers[i].lid; |
|
|
|
|
if (allLayers[outId] == -1) // node
|
|
|
|
|
{ |
|
|
|
|
int outId = it->second.consumers[i].lid; |
|
|
|
|
if (allLayers[outId] == -1) { // node
|
|
|
|
|
out << " " << "\"" << "cluster_" << allLayers[it->first] << "\"" << " -> "; |
|
|
|
|
out << "\"" << map[outId].name << "\"" << '\n'; |
|
|
|
|
CV_DbgAssert(map.find(outId) != map.end()); |
|
|
|
|
out << "\t\"" << "cluster_" << allLayers[it->first] << "\"" << " -> "; |
|
|
|
|
out << "\"" << map.find(outId)->second.name << "\"\n"; |
|
|
|
|
} |
|
|
|
|
else if (allLayers[outId] != allLayers[it->first]) { // another cluster
|
|
|
|
|
out << " " << "\"" << "cluster_" << allLayers[it->first] << "\"" << " -> "; |
|
|
|
|
out << "\"" << "cluster_" << allLayers[outId] << "\"" << '\n'; |
|
|
|
|
out << "\t\"" << "cluster_" << allLayers[it->first] << "\"" << " -> "; |
|
|
|
|
out << "\"" << "cluster_" << allLayers[outId] << "\"\n"; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
out << "}"; |
|
|
|
|
out << "}\n"; |
|
|
|
|
return out.str(); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|