|
|
|
@ -722,6 +722,18 @@ struct DataLayer : public Layer |
|
|
|
|
void setNames(const std::vector<String> &names) |
|
|
|
|
{ |
|
|
|
|
outNames.assign(names.begin(), names.end()); |
|
|
|
|
shapes.clear(); shapes.resize(outNames.size()); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void setInputShape(const String& tgtName, const MatShape& shape) |
|
|
|
|
{ |
|
|
|
|
std::vector<String>::const_iterator it = std::find(outNames.begin(), outNames.end(), tgtName); |
|
|
|
|
CV_Check(tgtName, it != outNames.end(), "Unknown input"); |
|
|
|
|
int idx = (int)(it - outNames.begin()); |
|
|
|
|
|
|
|
|
|
CV_Assert(idx < (int)shapes.size()); |
|
|
|
|
CV_Check(tgtName, shapes[idx].empty(), "Input shape redefinition is not allowed"); |
|
|
|
|
shapes[idx] = shape; |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
bool getMemoryShapes(const std::vector<MatShape> &inputs, |
|
|
|
@ -784,6 +796,7 @@ struct DataLayer : public Layer |
|
|
|
|
#endif // HAVE_INF_ENGINE
|
|
|
|
|
|
|
|
|
|
std::vector<String> outNames; |
|
|
|
|
std::vector<MatShape> shapes; |
|
|
|
|
// Preprocessing parameters for each network's input.
|
|
|
|
|
std::vector<double> scaleFactors; |
|
|
|
|
std::vector<Scalar> means; |
|
|
|
@ -2842,8 +2855,25 @@ struct Net::Impl |
|
|
|
|
} |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
inOutShapes[0].out.clear(); |
|
|
|
|
return; |
|
|
|
|
const std::vector<MatShape>& inputShapes = netInputLayer->shapes; |
|
|
|
|
bool none = true; |
|
|
|
|
for (size_t i = 0; i < inputShapes.size(); i++) |
|
|
|
|
{ |
|
|
|
|
if (!inputShapes[i].empty()) |
|
|
|
|
{ |
|
|
|
|
none = false; |
|
|
|
|
break; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
if (none) |
|
|
|
|
{ |
|
|
|
|
inOutShapes[0].out.clear(); |
|
|
|
|
return; |
|
|
|
|
} |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
inOutShapes[0].in = inputShapes; |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
@ -3069,7 +3099,7 @@ Net Net::Impl::createNetworkFromModelOptimizer(InferenceEngine::CNNNetwork& ieNe |
|
|
|
|
// set empty input to determine input shapes
|
|
|
|
|
for (int inp_id = 0; inp_id < inputsNames.size(); ++inp_id) |
|
|
|
|
{ |
|
|
|
|
cvNet.setInput(Mat(inp_shapes[inp_id], CV_32F), inputsNames[inp_id]); |
|
|
|
|
cvNet.setInputShape(inputsNames[inp_id], inp_shapes[inp_id]); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
Ptr<BackendNode> backendNode; |
|
|
|
@ -3494,6 +3524,13 @@ void Net::setInputsNames(const std::vector<String> &inputBlobNames) |
|
|
|
|
impl->netInputLayer->setNames(inputBlobNames); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void Net::setInputShape(const String &inputName, const MatShape& shape) |
|
|
|
|
{ |
|
|
|
|
CV_TRACE_FUNCTION(); |
|
|
|
|
|
|
|
|
|
impl->netInputLayer->setInputShape(inputName, shape); |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
void Net::setInput(InputArray blob, const String& name, double scalefactor, const Scalar& mean) |
|
|
|
|
{ |
|
|
|
|
CV_TRACE_FUNCTION(); |
|
|
|
@ -3506,6 +3543,33 @@ void Net::setInput(InputArray blob, const String& name, double scalefactor, cons |
|
|
|
|
if (!pin.valid()) |
|
|
|
|
CV_Error(Error::StsObjectNotFound, "Requested blob \"" + name + "\" not found"); |
|
|
|
|
|
|
|
|
|
Mat blob_ = blob.getMat(); // can't use InputArray directly due MatExpr stuff
|
|
|
|
|
MatShape blobShape = shape(blob_); |
|
|
|
|
|
|
|
|
|
if (pin.lid == 0) |
|
|
|
|
{ |
|
|
|
|
CV_Assert(!impl->netInputLayer.empty()); |
|
|
|
|
const DataLayer& netInputLayer = *impl->netInputLayer.get(); |
|
|
|
|
if (!netInputLayer.shapes.empty()) |
|
|
|
|
{ |
|
|
|
|
CV_CheckLT(pin.oid, (int)netInputLayer.shapes.size(), ""); |
|
|
|
|
const MatShape& inputShapeLimitation = netInputLayer.shapes[pin.oid]; |
|
|
|
|
if (!inputShapeLimitation.empty()) |
|
|
|
|
{ |
|
|
|
|
CV_CheckEQ(inputShapeLimitation.size(), blobShape.size(), ""); |
|
|
|
|
#if 0 // TODO: DNNTestNetwork.MobileNet_SSD_Caffe_Different_Width_Height/0
|
|
|
|
|
const size_t dims = inputShapeLimitation.size(); |
|
|
|
|
for (size_t dim = 0; dim < dims; dim++) |
|
|
|
|
{ |
|
|
|
|
if (dims >= 3 && dim == 0 && inputShapeLimitation[0] == 1) |
|
|
|
|
continue; // don't limit batch
|
|
|
|
|
CV_CheckEQ(inputShapeLimitation[dim], blobShape[dim], ""); |
|
|
|
|
} |
|
|
|
|
#endif |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
LayerData &ld = impl->layers[pin.lid]; |
|
|
|
|
const int numInputs = std::max(pin.oid+1, (int)ld.requiredOutputs.size()); |
|
|
|
|
ld.outputBlobs.resize(numInputs); |
|
|
|
@ -3515,17 +3579,11 @@ void Net::setInput(InputArray blob, const String& name, double scalefactor, cons |
|
|
|
|
impl->netInputLayer->means.resize(numInputs); |
|
|
|
|
|
|
|
|
|
MatShape prevShape = shape(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
|
Mat blob_ = blob.getMat(); |
|
|
|
|
bool oldShape = prevShape == shape(blob_); |
|
|
|
|
if (oldShape) |
|
|
|
|
{ |
|
|
|
|
blob_.copyTo(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
|
} |
|
|
|
|
else |
|
|
|
|
{ |
|
|
|
|
ld.outputBlobs[pin.oid] = blob_.clone(); |
|
|
|
|
impl->netInputLayer->inputsData[pin.oid] = ld.outputBlobs[pin.oid]; |
|
|
|
|
} |
|
|
|
|
bool oldShape = prevShape == blobShape; |
|
|
|
|
|
|
|
|
|
blob_.copyTo(impl->netInputLayer->inputsData[pin.oid]); |
|
|
|
|
if (!oldShape) |
|
|
|
|
ld.outputBlobs[pin.oid] = impl->netInputLayer->inputsData[pin.oid]; |
|
|
|
|
|
|
|
|
|
if (!ld.outputBlobsWrappers[pin.oid].empty()) |
|
|
|
|
{ |
|
|
|
|