|
|
|
@ -324,7 +324,6 @@ struct LayerData |
|
|
|
|
//add logging info
|
|
|
|
|
params.name = name; |
|
|
|
|
params.type = type; |
|
|
|
|
skip = false; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
int id; |
|
|
|
@ -347,7 +346,6 @@ struct LayerData |
|
|
|
|
std::map<int, bool> skipFlags; |
|
|
|
|
|
|
|
|
|
int flag; |
|
|
|
|
bool skip; |
|
|
|
|
|
|
|
|
|
Ptr<Layer> getLayerInstance() |
|
|
|
|
{ |
|
|
|
@ -666,9 +664,7 @@ struct Net::Impl |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void setUpNet(const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>()) |
|
|
|
|
{ |
|
|
|
|
if (!netWasAllocated || this->blobsToKeep != blobsToKeep_) |
|
|
|
|
void clear() |
|
|
|
|
{ |
|
|
|
|
MapIdToLayerData::iterator it; |
|
|
|
|
for (it = layers.begin(); it != layers.end(); it++) |
|
|
|
@ -677,7 +673,30 @@ struct Net::Impl |
|
|
|
|
it->second.outputBlobs.clear(); |
|
|
|
|
it->second.internals.clear(); |
|
|
|
|
} |
|
|
|
|
it->second.skipFlags.clear(); |
|
|
|
|
it->second.consumers.clear(); |
|
|
|
|
Ptr<ConvolutionLayer> convLayer = it->second.layerInstance.dynamicCast<ConvolutionLayer>(); |
|
|
|
|
|
|
|
|
|
if( !convLayer.empty() ) |
|
|
|
|
{ |
|
|
|
|
convLayer->setActivation(Ptr<ActivationLayer>()); |
|
|
|
|
convLayer->setBatchNorm(Ptr<BatchNormLayer>()); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
Ptr<PoolingLayer> poolingLayer = it->second.layerInstance.dynamicCast<PoolingLayer>(); |
|
|
|
|
if( !poolingLayer.empty() ) |
|
|
|
|
{ |
|
|
|
|
poolingLayer->computeMaxIdx = true; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void setUpNet(const std::vector<LayerPin>& blobsToKeep_ = std::vector<LayerPin>()) |
|
|
|
|
{ |
|
|
|
|
if (!netWasAllocated || this->blobsToKeep != blobsToKeep_) |
|
|
|
|
{ |
|
|
|
|
clear(); |
|
|
|
|
|
|
|
|
|
allocateLayers(blobsToKeep_); |
|
|
|
|
computeNetOutputLayers(); |
|
|
|
@ -1005,69 +1024,41 @@ struct Net::Impl |
|
|
|
|
ld.flag = 1; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void allocateLayers(const std::vector<LayerPin>& blobsToKeep_) |
|
|
|
|
{ |
|
|
|
|
MapIdToLayerData::iterator it; |
|
|
|
|
for (it = layers.begin(); it != layers.end(); it++) |
|
|
|
|
it->second.flag = 0; |
|
|
|
|
|
|
|
|
|
CV_Assert(!layers[0].outputBlobs.empty()); |
|
|
|
|
ShapesVec inputShapes; |
|
|
|
|
for(int i = 0; i < layers[0].outputBlobs.size(); i++) |
|
|
|
|
{ |
|
|
|
|
CV_Assert(layers[0].outputBlobs[i].total()); |
|
|
|
|
inputShapes.push_back(shape(layers[0].outputBlobs[i])); |
|
|
|
|
} |
|
|
|
|
LayersShapesMap layersShapes; |
|
|
|
|
getLayersShapes(inputShapes, layersShapes); |
|
|
|
|
|
|
|
|
|
blobManager.reset(); |
|
|
|
|
for (it = layers.begin(); it != layers.end(); ++it) |
|
|
|
|
{ |
|
|
|
|
const LayerData& ld = it->second; |
|
|
|
|
blobManager.addReferences(ld.inputBlobsId); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
for (int i = 0; i < blobsToKeep_.size(); i++) |
|
|
|
|
{ |
|
|
|
|
blobManager.addReference(blobsToKeep_[i]); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
for (it = layers.begin(); it != layers.end(); it++) |
|
|
|
|
void fuseLayers(const std::vector<LayerPin>& blobsToKeep_) |
|
|
|
|
{ |
|
|
|
|
int lid = it->first; |
|
|
|
|
allocateLayer(lid, layersShapes); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
// scan through all the layers. If there is convolution layer followed by the activation layer,
|
|
|
|
|
// we try to embed this activation into the convolution and disable separate execution of the activation
|
|
|
|
|
std::vector<String> outnames; |
|
|
|
|
std::set<LayerPin> pinsToKeep(blobsToKeep_.begin(), |
|
|
|
|
blobsToKeep_.end()); |
|
|
|
|
MapIdToLayerData::iterator it; |
|
|
|
|
for (it = layers.begin(); it != layers.end(); it++) |
|
|
|
|
{ |
|
|
|
|
int lid = it->first; |
|
|
|
|
LayerData& ld = layers[lid]; |
|
|
|
|
if( ld.skip ) |
|
|
|
|
if( ld.skipFlags[DNN_BACKEND_DEFAULT] ) |
|
|
|
|
{ |
|
|
|
|
//printf("skipping %s\n", ld.layerInstance->name.c_str());
|
|
|
|
|
continue; |
|
|
|
|
} |
|
|
|
|
//printf("analyzing %s\n", ld.layerInstance->name.c_str());
|
|
|
|
|
if( ld.consumers.size() == 0 ) |
|
|
|
|
outnames.push_back(ld.layerInstance->name); |
|
|
|
|
Ptr<ConvolutionLayer> convLayer = ld.layerInstance.dynamicCast<ConvolutionLayer>(); |
|
|
|
|
if( !convLayer.empty() && ld.consumers.size() == 1 ) |
|
|
|
|
LayerPin lp(lid, 0); |
|
|
|
|
if( !convLayer.empty() && ld.consumers.size() == 1 && |
|
|
|
|
pinsToKeep.count(lp) == 0 ) |
|
|
|
|
{ |
|
|
|
|
LayerData* nextData = &layers[ld.consumers[0].lid]; |
|
|
|
|
Ptr<BatchNormLayer> nextBNormLayer = |
|
|
|
|
nextData->layerInstance.dynamicCast<BatchNormLayer>(); |
|
|
|
|
if( !nextBNormLayer.empty() ) |
|
|
|
|
LayerPin lpNext(ld.consumers[0].lid, 0); |
|
|
|
|
if( !nextBNormLayer.empty() && pinsToKeep.count(lpNext) == 0 ) |
|
|
|
|
{ |
|
|
|
|
LayerData* bnormData = nextData; |
|
|
|
|
nextData = 0; |
|
|
|
|
if( convLayer->setBatchNorm(nextBNormLayer) ) |
|
|
|
|
{ |
|
|
|
|
//printf("fused convolution (%s) and batch norm (%s)\n", convLayer->name.c_str(), nextBNormLayer->name.c_str());
|
|
|
|
|
bnormData->skip = true; |
|
|
|
|
bnormData->skipFlags[DNN_BACKEND_DEFAULT] = true; |
|
|
|
|
ld.outputBlobs = layers[lpNext.lid].outputBlobs; |
|
|
|
|
if( bnormData->consumers.size() == 1 ) |
|
|
|
|
nextData = &layers[bnormData->consumers[0].lid]; |
|
|
|
|
} |
|
|
|
@ -1079,8 +1070,8 @@ struct Net::Impl |
|
|
|
|
|
|
|
|
|
if( !nextActivLayer.empty() && convLayer->setActivation(nextActivLayer) ) |
|
|
|
|
{ |
|
|
|
|
//printf("fused convolution (%s) and activation (%s)\n", convLayer->name.c_str(), nextActivLayer->name.c_str());
|
|
|
|
|
nextData->skip = true; |
|
|
|
|
nextData->skipFlags[DNN_BACKEND_DEFAULT] = true; |
|
|
|
|
ld.outputBlobs = layers[lpNext.lid].outputBlobs; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
Ptr<PoolingLayer> poolingLayer = ld.layerInstance.dynamicCast<PoolingLayer>(); |
|
|
|
@ -1096,10 +1087,43 @@ struct Net::Impl |
|
|
|
|
poolingLayer->computeMaxIdx = false; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
/*printf("outputs: ");
|
|
|
|
|
for( size_t j = 0; j < outnames.size(); j++ ) |
|
|
|
|
printf("%s ", outnames[j].c_str()); |
|
|
|
|
printf("\n");*/ |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void allocateLayers(const std::vector<LayerPin>& blobsToKeep_) |
|
|
|
|
{ |
|
|
|
|
MapIdToLayerData::iterator it; |
|
|
|
|
for (it = layers.begin(); it != layers.end(); it++) |
|
|
|
|
it->second.flag = 0; |
|
|
|
|
|
|
|
|
|
CV_Assert(!layers[0].outputBlobs.empty()); |
|
|
|
|
ShapesVec inputShapes; |
|
|
|
|
for(int i = 0; i < layers[0].outputBlobs.size(); i++) |
|
|
|
|
{ |
|
|
|
|
CV_Assert(layers[0].outputBlobs[i].total()); |
|
|
|
|
inputShapes.push_back(shape(layers[0].outputBlobs[i])); |
|
|
|
|
} |
|
|
|
|
LayersShapesMap layersShapes; |
|
|
|
|
getLayersShapes(inputShapes, layersShapes); |
|
|
|
|
|
|
|
|
|
blobManager.reset(); |
|
|
|
|
for (it = layers.begin(); it != layers.end(); ++it) |
|
|
|
|
{ |
|
|
|
|
const LayerData& ld = it->second; |
|
|
|
|
blobManager.addReferences(ld.inputBlobsId); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
for (int i = 0; i < blobsToKeep_.size(); i++) |
|
|
|
|
{ |
|
|
|
|
blobManager.addReference(blobsToKeep_[i]); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
for (it = layers.begin(); it != layers.end(); it++) |
|
|
|
|
{ |
|
|
|
|
int lid = it->first; |
|
|
|
|
allocateLayer(lid, layersShapes); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
fuseLayers(blobsToKeep_); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void forwardLayer(LayerData &ld) |
|
|
|
@ -1109,7 +1133,7 @@ struct Net::Impl |
|
|
|
|
if (preferableBackend == DNN_BACKEND_DEFAULT || |
|
|
|
|
!layer->supportBackend(preferableBackend)) |
|
|
|
|
{ |
|
|
|
|
if( !ld.skip ) |
|
|
|
|
if( !ld.skipFlags[DNN_BACKEND_DEFAULT] ) |
|
|
|
|
layer->forward(ld.inputBlobs, ld.outputBlobs, ld.internals); |
|
|
|
|
} |
|
|
|
|
else if (!ld.skipFlags[preferableBackend]) |
|
|
|
@ -1300,20 +1324,6 @@ void Net::connect(String _outPin, String _inPin) |
|
|
|
|
impl->connect(outPin.lid, outPin.oid, inpPin.lid, inpPin.oid); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
//void Net::forward(LayerId toLayer)
|
|
|
|
|
//{
|
|
|
|
|
// if (!impl->netWasAllocated)
|
|
|
|
|
// {
|
|
|
|
|
// impl->setUpNet();
|
|
|
|
|
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
// if (toLayer.isString() && toLayer.get<String>().empty())
|
|
|
|
|
// impl->forwardAll();
|
|
|
|
|
// else
|
|
|
|
|
// impl->forwardLayer(impl->getLayerData(toLayer));
|
|
|
|
|
//}
|
|
|
|
|
|
|
|
|
|
Mat Net::forward(const String& outputName) |
|
|
|
|
{ |
|
|
|
|
String layerName = outputName; |
|
|
|
|