|
|
@ -722,6 +722,18 @@ struct DataLayer : public Layer |
|
|
|
void setNames(const std::vector<String> &names) |
|
|
|
void setNames(const std::vector<String> &names) |
|
|
|
{ |
|
|
|
{ |
|
|
|
outNames.assign(names.begin(), names.end()); |
|
|
|
outNames.assign(names.begin(), names.end()); |
|
|
|
|
|
|
|
shapes.clear(); shapes.resize(outNames.size()); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void setInputShape(const String& tgtName, const MatShape& shape) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
std::vector<String>::const_iterator it = std::find(outNames.begin(), outNames.end(), tgtName); |
|
|
|
|
|
|
|
CV_Check(tgtName, it != outNames.end(), "Unknown input"); |
|
|
|
|
|
|
|
int idx = (int)(it - outNames.begin()); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CV_Assert(idx < (int)shapes.size()); |
|
|
|
|
|
|
|
CV_Check(tgtName, shapes[idx].empty(), "Input shape redefinition is not allowed"); |
|
|
|
|
|
|
|
shapes[idx] = shape; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
bool getMemoryShapes(const std::vector<MatShape> &inputs, |
|
|
|
bool getMemoryShapes(const std::vector<MatShape> &inputs, |
|
|
@ -784,6 +796,7 @@ struct DataLayer : public Layer |
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
|
|
std::vector<String> outNames; |
|
|
|
std::vector<String> outNames; |
|
|
|
|
|
|
|
std::vector<MatShape> shapes; |
|
|
|
// Preprocessing parameters for each network's input.
|
|
|
|
// Preprocessing parameters for each network's input.
|
|
|
|
std::vector<double> scaleFactors; |
|
|
|
std::vector<double> scaleFactors; |
|
|
|
std::vector<Scalar> means; |
|
|
|
std::vector<Scalar> means; |
|
|
@ -2841,10 +2854,27 @@ struct Net::Impl |
|
|
|
inOutShapes[0].in = shapes; |
|
|
|
inOutShapes[0].in = shapes; |
|
|
|
} |
|
|
|
} |
|
|
|
else |
|
|
|
else |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
const std::vector<MatShape>& inputShapes = netInputLayer->shapes; |
|
|
|
|
|
|
|
bool none = true; |
|
|
|
|
|
|
|
for (size_t i = 0; i < inputShapes.size(); i++) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
if (!inputShapes[i].empty()) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
none = false; |
|
|
|
|
|
|
|
break; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
if (none) |
|
|
|
{ |
|
|
|
{ |
|
|
|
inOutShapes[0].out.clear(); |
|
|
|
inOutShapes[0].out.clear(); |
|
|
|
return; |
|
|
|
return; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
else |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
inOutShapes[0].in = inputShapes; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (inOutShapes[id].in.empty()) |
|
|
|
if (inOutShapes[id].in.empty()) |
|
|
@ -3069,7 +3099,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe |
|
|
|
// set empty input to determine input shapes
|
|
|
|
// set empty input to determine input shapes
|
|
|
|
for (int inp_id = 0; inp_id < inputsNames.size(); ++inp_id) |
|
|
|
for (int inp_id = 0; inp_id < inputsNames.size(); ++inp_id) |
|
|
|
{ |
|
|
|
{ |
|
|
|
cvNet.setInput(Mat(inp_shapes[inp_id], CV_32F), inputsNames[inp_id]); |
|
|
|
cvNet.setInputShape(inputsNames[inp_id], inp_shapes[inp_id]); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
Ptr<BackendNode> backendNode; |
|
|
|
Ptr<BackendNode> backendNode; |
|
|
@ -3494,6 +3524,13 @@ void Net::setInputsNames(const std::vector<String> &inputBlobNames) |
|
|
|
impl->netInputLayer->setNames(inputBlobNames); |
|
|
|
impl->netInputLayer->setNames(inputBlobNames); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void Net::setInputShape(const String &inputName, const MatShape& shape) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
CV_TRACE_FUNCTION(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
impl->netInputLayer->setInputShape(inputName, shape); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void Net::setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean) |
|
|
|
void Net::setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean) |
|
|
|
{ |
|
|
|
{ |
|
|
|
CV_TRACE_FUNCTION(); |
|
|
|
CV_TRACE_FUNCTION(); |
|
|
@ -3506,6 +3543,33 @@ void Net::setInput(InputArray blob, const String& name, double scalefactor, cons |
|
|
|
if (!pin.valid()) |
|
|
|
if (!pin.valid()) |
|
|
|
CV_Error(Error::StsObjectNotFound, "Requested blob \"" + name + "\" not found"); |
|
|
|
CV_Error(Error::StsObjectNotFound, "Requested blob \"" + name + "\" not found"); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Mat blob_ = blob.getMat(); // can't use InputArray directly due MatExpr stuff
|
|
|
|
|
|
|
|
MatShape blobShape = shape(blob_); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (pin.lid == 0) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
CV_Assert(!impl->netInputLayer.empty()); |
|
|
|
|
|
|
|
const DataLayer& netInputLayer = *impl->netInputLayer.get(); |
|
|
|
|
|
|
|
if (!netInputLayer.shapes.empty()) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
CV_CheckLT(pin.oid, (int)netInputLayer.shapes.size(), ""); |
|
|
|
|
|
|
|
const MatShape& inputShapeLimitation = netInputLayer.shapes[pin.oid]; |
|
|
|
|
|
|
|
if (!inputShapeLimitation.empty()) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
CV_CheckEQ(inputShapeLimitation.size(), blobShape.size(), ""); |
|
|
|
|
|
|
|
#if 0 // TODO: DNNTestNetwork.MobileNet_SSD_Caffe_Different_Width_Height/0
|
|
|
|
|
|
|
|
const size_t dims = inputShapeLimitation.size(); |
|
|
|
|
|
|
|
for (size_t dim = 0; dim < dims; dim++) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
if (dims >= 3 && dim == 0 && inputShapeLimitation[0] == 1) |
|
|
|
|
|
|
|
continue; // don't limit batch
|
|
|
|
|
|
|
|
CV_CheckEQ(inputShapeLimitation[dim], blobShape[dim], ""); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
#endif |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
LayerData &ld = impl->layers[pin.lid]; |
|
|
|
LayerData &ld = impl->layers[pin.lid]; |
|
|
|
const int numInputs = std::max(pin.oid+1, (int)ld.requiredOutputs.size()); |
|
|
|
const int numInputs = std::max(pin.oid+1, (int)ld.requiredOutputs.size()); |
|
|
|
ld.outputBlobs.resize(numInputs); |
|
|
|
ld.outputBlobs.resize(numInputs); |
|
|
@ -3515,17 +3579,11 @@ void Net::setInput(InputArray blob, const String& name, double scalefactor, cons |
|
|
|
impl->netInputLayer->means.resize(numInputs); |
|
|
|
impl->netInputLayer->means.resize(numInputs); |
|
|
|
|
|
|
|
|
|
|
|
MatShape prevShape = shape(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
MatShape prevShape = shape(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
Mat blob_ = blob.getMat(); |
|
|
|
bool oldShape = prevShape == blobShape; |
|
|
|
bool oldShape = prevShape == shape(blob_); |
|
|
|
|
|
|
|
if (oldShape) |
|
|
|
|
|
|
|
{ |
|
|
|
|
|
|
|
blob_.copyTo(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
blob_.copyTo(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
} |
|
|
|
if (!oldShape) |
|
|
|
else |
|
|
|
ld.outputBlobs[pin.oid] = impl->netInputLayer->inputsData[pin.oid]; |
|
|
|
{ |
|
|
|
|
|
|
|
ld.outputBlobs[pin.oid] = blob_.clone(); |
|
|
|
|
|
|
|
impl->netInputLayer->inputsData[pin.oid] = ld.outputBlobs[pin.oid]; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (!ld.outputBlobsWrappers[pin.oid].empty()) |
|
|
|
if (!ld.outputBlobsWrappers[pin.oid].empty()) |
|
|
|
{ |
|
|
|
{ |
|
|
|