Implement ambiguous blobs naming scheme like: "layerName[.OutputName]".

Old Caffe-like blob naming scheme was deleted.
pull/265/head
Vitaliy Lyudvichenko 10 years ago
parent 85ad43d325
commit 75e09fdc63
  1. 20
      modules/dnn/include/opencv2/dnn/dnn.hpp
  2. 89
      modules/dnn/src/caffe_importer.cpp
  3. 413
      modules/dnn/src/dnn.cpp
  4. 2
      modules/dnn/test/test_alexnet.cpp
  5. 2
      modules/dnn/test/test_googlenet.cpp
  6. 7
      modules/dnn/test/test_layers.cpp
  7. 32
      modules/dnn/testdata/dnn/bvlc_alexnet.prototxt
  8. 2
      modules/dnn/testdata/dnn/layers/lrn_channels.prototxt
  9. 3
      modules/dnn/testdata/dnn/layers/lrn_spatial.prototxt
  10. 2
      modules/dnn/testdata/dnn/layers/softmax.prototxt

@ -24,7 +24,7 @@ namespace dnn
class CV_EXPORTS Layer class CV_EXPORTS Layer
{ {
public: public:
//TODO: this field must be declared as public if we want support possibility to change these params in runtime //learned params of layer must be stored here to allow externally read them
std::vector<Blob> learnedParams; std::vector<Blob> learnedParams;
virtual ~Layer(); virtual ~Layer();
@ -34,11 +34,9 @@ namespace dnn
virtual void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs) = 0; virtual void forward(std::vector<Blob*> &inputs, std::vector<Blob> &outputs) = 0;
virtual int getNumInputs();
virtual int getNumOutputs();
//each input/output can be labeled to easily identify their using "layer_name.output_name" //each input/output can be labeled to easily identify their using "layer_name.output_name"
virtual String getInputName(int inputNum); virtual int inputNameToIndex(String inputName);
virtual String getOutputName(int outputNum); virtual int outputNameToIndex(String outputName);
}; };
//containers for String and int //containers for String and int
@ -56,14 +54,10 @@ namespace dnn
int getLayerId(LayerId layer); int getLayerId(LayerId layer);
void deleteLayer(LayerId layer); void deleteLayer(LayerId layer);
//each output of each layer can be labeled by unique string label (as in Caffe)
//if label not specified then %layer_name%.%layer_output_id% can be used
void setOutputNames(LayerId layer, const std::vector<String> &outputNames);
void setLayerInputs(const std::vector<String> &outputs, LayerId layer);
void setNetInputs(const std::vector<String> &inputBlobNames); void setNetInputs(const std::vector<String> &inputBlobNames);
void connect(BlobId input, BlobId output); void connect(String outPin, String inpPin);
void connect(const std::vector<BlobId> &outputs, const std::vector<BlobId> &inputs); void connect(int outLayerId, int outNum, int inLayerId, int inNum);
void forward(); void forward();
void forward(LayerId toLayer); void forward(LayerId toLayer);
@ -74,8 +68,8 @@ namespace dnn
void forwardOpt(LayerId toLayer); void forwardOpt(LayerId toLayer);
void forwardOpt(const std::vector<LayerId> &toLayers); void forwardOpt(const std::vector<LayerId> &toLayers);
void setBlob(BlobId outputName, const Blob &blob); void setBlob(String outputName, const Blob &blob);
Blob getBlob(BlobId outputName); Blob getBlob(String outputName);
void setParam(LayerId layer, int numParam, const Blob &blob); void setParam(LayerId layer, int numParam, const Blob &blob);
Blob getParam(LayerId layer, int numParam = 0); Blob getParam(LayerId layer, int numParam = 0);

@ -91,11 +91,9 @@ namespace
params.set(name, GET_FIRST(Bool)); params.set(name, GET_FIRST(Bool));
break; break;
default: default:
CV_Error(cv::Error::StsError, "Unknown type \"" + String(field->type_name()) + "\" in prototxt"); CV_Error(Error::StsError, "Unknown type \"" + String(field->type_name()) + "\" in prototxt");
break; break;
} }
std::cout << std::endl;
} }
void extractLayerParams(const Message &msg, cv::dnn::LayerParams &params) void extractLayerParams(const Message &msg, cv::dnn::LayerParams &params)
@ -146,7 +144,7 @@ namespace
} }
else else
{ {
CV_Error(cv::Error::StsAssert, "Unknown shape of input blob"); CV_Error(Error::StsError, "Unknown shape of input blob");
return BlobShape(-1); return BlobShape(-1);
} }
} }
@ -187,53 +185,100 @@ namespace
} }
} }
struct BlobNote
{
BlobNote(const std::string &_name, int _layerId, int _outNum) :
name(_name), layerId(_layerId), outNum(_outNum) {}
const std::string &name;
int layerId, outNum;
};
void populateNet(Net dstNet) void populateNet(Net dstNet)
{ {
int layersSize = net.layer_size();
std::vector<BlobNote> addedBlobs;
addedBlobs.reserve(layersSize + 1);
//setup input layer names //setup input layer names
{ {
std::vector<String> netInputs(net.input_size()); std::vector<String> netInputs(net.input_size());
for (int ii = 0; ii < net.input_size(); ii++) for (int inNum = 0; inNum < net.input_size(); inNum++)
netInputs[ii] = net.input(ii); {
addedBlobs.push_back(BlobNote(net.input(inNum), 0, inNum));
netInputs[inNum] = net.input(inNum);
}
dstNet.setNetInputs(netInputs); dstNet.setNetInputs(netInputs);
} }
int layersSize = net.layer_size();
std::vector<String> layersName(layersSize);
std::vector<int> layersId(layersSize);
std::vector<std::vector<String> > bottomsVec(layersSize);
for (int li = 0; li < layersSize; li++) for (int li = 0; li < layersSize; li++)
{ {
const caffe::LayerParameter layer = net.layer(li); const caffe::LayerParameter &layer = net.layer(li);
String name = layer.name(); String name = layer.name();
String type = layer.type(); String type = layer.type();
LayerParams layerParams; LayerParams layerParams;
std::vector<String> tops;
tops.assign(layer.top().begin(), layer.top().end());
bottomsVec[li].assign(layer.bottom().begin(), layer.bottom().end());
extractLayerParams(layer, layerParams); extractLayerParams(layer, layerParams);
extractBinaryLayerParms(layer, layerParams); extractBinaryLayerParms(layer, layerParams);
int id = dstNet.addLayer(name, type, layerParams); int id = dstNet.addLayer(name, type, layerParams);
dstNet.setOutputNames(id, tops);
for (int inNum = 0; inNum < layer.bottom_size(); inNum++)
addInput(layer.bottom(inNum), id, inNum, dstNet, addedBlobs);
layersName[li] = name; for (int outNum = 0; outNum < layer.top_size(); outNum++)
layersId[li] = id; addOutput(layer, id, outNum, dstNet, addedBlobs);
} }
}
for (int li = 0; li < layersSize; li++) void addOutput(const caffe::LayerParameter &layer, int layerId, int outNum, Net &dstNet, std::vector<BlobNote> &addedBlobs)
{
const std::string &name = layer.top(outNum);
bool haveDups = false;
for (int idx = (int)addedBlobs.size() - 1; idx >= 0; idx--)
{ {
dstNet.setLayerInputs(bottomsVec[li], layersId[li]); if (addedBlobs[idx].name == name)
{
haveDups = true;
break;
}
} }
if (haveDups)
{
bool isInplace = layer.bottom_size() > outNum && layer.bottom(outNum) == name;
if (!isInplace)
CV_Error(Error::StsBadArg, "Duplicate blobs produced by multiple sources");
}
addedBlobs.push_back(BlobNote(name, layerId, outNum));
}
void addInput(const std::string &name, int layerId, int inNum, Net &dstNet, std::vector<BlobNote> &addedBlobs)
{
int idx;
for (idx = (int)addedBlobs.size() - 1; idx >= 0; idx--)
{
if (addedBlobs[idx].name == name)
break;
}
if (idx < 0)
{
CV_Error(Error::StsError, "Can't found output blob \"" + name + "\"");
return;
}
dstNet.connect(addedBlobs[idx].layerId, addedBlobs[idx].outNum, layerId, inNum);
} }
~CaffeImporter() ~CaffeImporter()
{ {
} }
}; };
} }

@ -2,6 +2,7 @@
#include <set> #include <set>
#include <algorithm> #include <algorithm>
#include <iostream> #include <iostream>
#include <sstream>
using namespace cv; using namespace cv;
using namespace cv::dnn; using namespace cv::dnn;
@ -16,38 +17,45 @@ namespace cv
namespace dnn namespace dnn
{ {
struct LayerOutId template<typename T>
String toString(const T &v)
{
std::ostringstream ss;
ss << v;
return ss.str();
}
struct LayerPin
{ {
int lid; int lid;
int oid; int oid;
LayerOutId() {} LayerPin(int layerId = -1, int outputId = -1)
LayerOutId(int layerId, int outputId)
: lid(layerId), oid(outputId) {} : lid(layerId), oid(outputId) {}
bool valid() const
{
return (lid >= 0 && oid >= 0);
}
bool equal(const LayerPin &r) const
{
return (lid == r.lid && oid == r.oid);
}
}; };
struct LayerData struct LayerData
{ {
LayerData() {} LayerData() {}
LayerData(const String &_name, const String &_type, LayerParams &_params) LayerData(int _id, const String &_name, const String &_type, LayerParams &_params)
: name(_name), type(_type), params(_params) {} : id(_id), name(_name), type(_type), params(_params) {}
int id;
String name; String name;
String type; String type;
LayerParams params; LayerParams params;
std::vector<String> outputNames; std::vector<LayerPin> inputBlobsId;
std::vector<String> inputNames;
bool hasNamedOutput(const String &name)
{
return std::find(outputNames.begin(), outputNames.end(), name) != outputNames.end();
}
bool hasNemedInput(const String &name)
{
return std::find(inputNames.begin(), inputNames.end(), name) != inputNames.end();
}
std::vector<LayerOutId> inputBlobsId;
std::set<int> inputLayersId; std::set<int> inputLayersId;
std::set<int> requiredOutputs; std::set<int> requiredOutputs;
@ -56,23 +64,64 @@ struct LayerData
std::vector<Blob*> inputBlobs; std::vector<Blob*> inputBlobs;
int flag; int flag;
Ptr<Layer> getLayerInstance()
{
if (layerInstance)
return layerInstance;
layerInstance = LayerRegister::createLayerInstance(type, params);
if (!layerInstance)
{
CV_Error(Error::StsError, "Can't create layer \"" + name + "\" of type \"" + type + "\"");
}
return layerInstance;
}
};
//fake layer containing network input blobs
struct NetInputLayer : public Layer
{
void allocate(const std::vector<Blob*>&, std::vector<Blob>&) {}
void forward(std::vector<Blob*>&, std::vector<Blob>&) {}
int outputNameToIndex(String tgtName)
{
int idx = (int)(std::find(outNames.begin(), outNames.end(), tgtName) - outNames.begin());
return (idx < (int)outNames.size()) ? idx : -1;
}
void setNames(const std::vector<String> &names)
{
outNames.assign(names.begin(), names.end());
}
private:
std::vector<String> outNames;
}; };
struct Net::Impl struct Net::Impl
{ {
Impl() Impl()
{ {
LayerParams paramsEmpty; //allocate fake net input layer
layers.insert(make_pair(0, LayerData("_input", "_noType", paramsEmpty))); netInputLayer = Ptr<NetInputLayer>(new NetInputLayer());
LayerData &inpl = layers.insert( make_pair(0, LayerData()) ).first->second;
inpl.id = 0;
inpl.name = "_input";
inpl.type = "__NetInputLayer__";
inpl.layerInstance = netInputLayer;
lastLayerId = 1; lastLayerId = 1;
netWasAllocated = false; netWasAllocated = false;
} }
Ptr<NetInputLayer> netInputLayer;
std::vector<int> netOutputs; std::vector<int> netOutputs;
typedef std::map<int, LayerData> MapIdToLayerData; typedef std::map<int, LayerData> MapIdToLayerData;
std::map<int, LayerData> layers; std::map<int, LayerData> layers;
std::map<String, int> layerNameToId; std::map<String, int> layerNameToId;
int lastLayerId; int lastLayerId;
@ -83,9 +132,8 @@ struct Net::Impl
{ {
if (!netWasAllocated) if (!netWasAllocated)
{ {
connectInputs();
allocateLayers(); allocateLayers();
computeNetOutputs(); computeNetOutputLayers();
netWasAllocated = true; netWasAllocated = true;
} }
@ -97,124 +145,130 @@ struct Net::Impl
return (it != layerNameToId.end()) ? it->second : -1; return (it != layerNameToId.end()) ? it->second : -1;
} }
int getLayerId(const DictValue &v) int getLayerId(int id)
{
MapIdToLayerData::iterator it = layers.find(id);
return (it != layers.end()) ? id : -1;
}
int getLayerId(DictValue &layerDesc)
{
if (layerDesc.isInt())
return getLayerId(layerDesc.get<int>());
else if (layerDesc.isString())
return getLayerId(layerDesc.get<String>());
CV_Assert(layerDesc.isInt() || layerDesc.isString());
return -1;
}
String getLayerName(int id)
{
MapIdToLayerData::iterator it = layers.find(id);
return (it != layers.end()) ? it->second.name : "(unknown layer)";
}
LayerData& getLayerData(int id)
{
MapIdToLayerData::iterator it = layers.find(id);
if (it == layers.end())
CV_Error(Error::StsError, "Layer with requested id=" + toString(id) + " not found");
return it->second;
}
LayerData& getLayerData(const String &layerName)
{
int id = getLayerId(layerName);
if (id < 0)
CV_Error(Error::StsError, "Requsted layer \"" + layerName + "\" not found");
return getLayerData(id);
}
LayerData& getLayerData(const DictValue &layerDesc)
{
if (layerDesc.isInt())
return getLayerData(layerDesc.get<int>());
else if (layerDesc.isString())
return getLayerData(layerDesc.get<String>());
CV_Assert(layerDesc.isInt() || layerDesc.isString());
return *((LayerData*)NULL);
}
static void addLayerInput(LayerData &ld, int inNum, LayerPin from)
{ {
if (v.isString()) if ((int)ld.inputBlobsId.size() <= inNum)
return getLayerId(v.get<String>()); {
else if (v.isInt()) ld.inputBlobsId.resize(inNum + 1);
return v.get<int>(); }
else else
{ {
CV_Assert(v.isString() || v.isInt()); LayerPin storedFrom = ld.inputBlobsId[inNum];
return -1; if (storedFrom.valid() && !storedFrom.equal(from))
CV_Error(Error::StsError, "Input #" + toString(inNum) + "of layer \"" + ld.name + "\" already was connected");
} }
ld.inputBlobsId[inNum] = from;
} }
LayerData& getLayerData(const DictValue &v) static void splitPin(const String &pinAlias, String &layerName, String &outName)
{ {
int id = getLayerId(v); size_t delimPos = pinAlias.find('.');
std::map<int, LayerData>::iterator it = layers.find(id); layerName = pinAlias.substr(0, delimPos);
CV_Assert(id >= 0 && it != layers.end()); outName = (delimPos == String::npos) ? String() : pinAlias.substr(delimPos + 1);
return it->second;
} }
int findOutputsByName(const String &name, LayerOutId *found, int maxCount = 1) int resolvePinOutputName(LayerData &ld, const String &outName, bool isOutPin)
{ {
int count = 0; if (outName.empty())
return 0;
MapIdToLayerData::iterator it; if (std::isdigit(outName[0]))
for (it = layers.begin(); it != layers.end() && count < maxCount; it++)
{ {
int lid = it->first; char *lastChar;
LayerData &ld = it->second; long inum = std::strtol(outName.c_str(), &lastChar, 10);
for (size_t oi = 0; oi < ld.outputNames.size() && count < maxCount; oi++) if (*lastChar == 0)
{ {
if (ld.outputNames[oi] == name) CV_Assert(inum == (int)inum);
found[count++] = LayerOutId(lid, (int)oi); return (int)inum;
} }
} }
return count; if (isOutPin)
return ld.getLayerInstance()->outputNameToIndex(outName);
else
return ld.getLayerInstance()->inputNameToIndex(outName);
} }
void connectInputs() LayerPin getPinByAlias(const String &pinAlias, bool isOutPin = true)
{ {
LayerOutId foundOutputs[3], out; LayerPin pin;
String layerName, outName;
splitPin(pinAlias, layerName, outName);
MapIdToLayerData::iterator it; pin.lid = (layerName.empty()) ? 0 : getLayerId(layerName);
for (it = layers.begin(); it != layers.end(); it++)
{
LayerData &ld = it->second;
ld.inputBlobs.resize(ld.inputNames.size()); if (pin.lid >= 0)
ld.inputBlobsId.resize(ld.inputNames.size()); pin.oid = resolvePinOutputName(getLayerData(pin.lid), outName, isOutPin);
ld.inputLayersId.clear();
for (size_t ii = 0; ii < ld.inputNames.size(); ii++) return pin;
{ }
const String &tgtName = ld.inputNames[ii];
int foundCount = findOutputsByName(tgtName, foundOutputs, 3);
if (foundCount > 2)
{
CV_Error(cv::Error::StsNotImplemented, "Two or more non-inplace blobs have the same name \"" + tgtName + "\"");
}
else if (foundCount == 2)
{
bool inPlace[2];
inPlace[0] = layers[ foundOutputs[0].lid ].hasNemedInput(tgtName);
inPlace[1] = layers[ foundOutputs[1].lid ].hasNemedInput(tgtName);
if (!inPlace[0] && !inPlace[1])
{
CV_Error(cv::Error::StsNotImplemented, "Two or more non-inplace blobs have the same name \"" + tgtName + "\"");
}
else if (inPlace[0] && inPlace[1])
{
CV_Error(cv::Error::StsNotImplemented, "Two or more blobs has same in-place blob \"" + tgtName + "\"");
}
else
{
if (ld.hasNamedOutput(tgtName))
out = (inPlace[0]) ? foundOutputs[1] : foundOutputs[0];
else
out = (inPlace[0]) ? foundOutputs[0] : foundOutputs[1];
}
}
else if (foundCount == 0)
{
CV_Error(cv::Error::StsBadArg, "Can't find specified input blob \"" + tgtName + "\" for layer \"" + ld.name + "\"");
continue;
}
else
{
out = foundOutputs[0];
}
ld.inputBlobsId[ii] = out;
ld.inputLayersId.insert(out.lid);
layers[out.lid].requiredOutputs.insert(out.oid);
}
}
for (it = layers.begin(); it != layers.end(); it++) void connect(int outLayerId, int outNum, int inLayerId, int inNum)
{ {
LayerData& ld = it->second; LayerData &ldOut = getLayerData(outLayerId);
LayerData &ldInp = getLayerData(inLayerId);
std::cout << "Layer \"" << ld.name << "\"" << std::endl; addLayerInput(ldInp, inNum, LayerPin(outLayerId, outNum));
if (ld.inputBlobsId.size() > 0) ldOut.requiredOutputs.insert(outNum);
{
std::cout << "Connected to:" << std::endl;
for (std::set<int>::iterator j = ld.inputLayersId.begin(); j != ld.inputLayersId.end(); j++)
std::cout << layers[*j].name << std::endl;
}
std::cout << std::endl;
}
} }
void computeNetOutputs() void computeNetOutputLayers()
{ {
netOutputs.clear(); netOutputs.clear();
@ -245,29 +299,18 @@ struct Net::Impl
for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++) for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
allocateLayer(*i); allocateLayer(*i);
//create instance
if (ld.layerInstance == NULL && lid != 0)
{
ld.layerInstance = LayerRegister::createLayerInstance(ld.type, ld.params);
if (ld.layerInstance == NULL)
{
std::cerr << "Can't create layer \"" << ld.name << "\" of type \"" << ld.type << "\"" << std::endl;
}
}
//bind inputs //bind inputs
ld.inputBlobs.resize(ld.inputBlobsId.size()); ld.inputBlobs.resize(ld.inputBlobsId.size());
for (size_t i = 0; i < ld.inputBlobsId.size(); i++) for (size_t i = 0; i < ld.inputBlobsId.size(); i++)
{ {
int srcLId = ld.inputBlobsId[i].lid; LayerPin from = ld.inputBlobsId[i];
int srcOId = ld.inputBlobsId[i].oid; CV_Assert(from.valid());
ld.inputBlobs[i] = &layers[srcLId].outputBlobs[srcOId]; ld.inputBlobs[i] = &layers[from.lid].outputBlobs[from.oid];
} }
//allocate layer //allocate layer
ld.outputBlobs.resize(ld.outputNames.size()); ld.outputBlobs.resize(ld.requiredOutputs.size());
if (ld.layerInstance) ld.getLayerInstance()->allocate(ld.inputBlobs, ld.outputBlobs);
ld.layerInstance->allocate(ld.inputBlobs, ld.outputBlobs);
ld.flag = 1; ld.flag = 1;
} }
@ -285,7 +328,7 @@ struct Net::Impl
} }
} }
void forwardLayer(int layerId, bool clearFlags = true) void forwardLayer(LayerData &ld, bool clearFlags = true)
{ {
if (clearFlags) if (clearFlags)
{ {
@ -294,8 +337,6 @@ struct Net::Impl
it->second.flag = 0; it->second.flag = 0;
} }
LayerData &ld = layers[layerId];
//already was forwarded //already was forwarded
if (ld.flag) if (ld.flag)
return; return;
@ -303,15 +344,11 @@ struct Net::Impl
//forward parents //forward parents
for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++) for (set<int>::iterator i = ld.inputLayersId.begin(); i != ld.inputLayersId.end(); i++)
{ {
forwardLayer(*i, false); forwardLayer(layers[*i], false);
} }
//forward itself //forward itself
if (ld.layerInstance && layerId != 0) ld.layerInstance->forward(ld.inputBlobs, ld.outputBlobs);
{
//std::cout << ld.name << " shape:" << ld.outputBlobs[0].shape4() << std::endl;
ld.layerInstance->forward(ld.inputBlobs, ld.outputBlobs);
}
ld.flag = 1; ld.flag = 1;
} }
@ -323,7 +360,7 @@ struct Net::Impl
it->second.flag = 0; it->second.flag = 0;
for (it = layers.begin(); it != layers.end(); it++) for (it = layers.begin(); it != layers.end(); it++)
forwardLayer(it->first, false); forwardLayer(it->second, false);
} }
}; };
@ -339,35 +376,38 @@ Net::~Net()
int Net::addLayer(const String &name, const String &type, LayerParams &params) int Net::addLayer(const String &name, const String &type, LayerParams &params)
{ {
if (name.find('.') != String::npos)
{
CV_Error(Error::StsBadArg, "Added layer name \"" + name + "\" should not contain dot symbol");
return -1;
}
if (impl->getLayerId(name) >= 0) if (impl->getLayerId(name) >= 0)
{ {
CV_Error(cv::Error::StsBadArg, "Layer \"" + name + "\" already into net"); CV_Error(Error::StsBadArg, "Layer \"" + name + "\" already into net");
return -1; return -1;
} }
int id = ++impl->lastLayerId; int id = ++impl->lastLayerId;
impl->layerNameToId.insert(std::make_pair(name, id)); impl->layerNameToId.insert(std::make_pair(name, id));
impl->layers.insert(std::make_pair(id, LayerData(name, type, params))); impl->layers.insert(std::make_pair(id, LayerData(id, name, type, params)));
return id; return id;
} }
//void Net::connect(BlobId input, BlobId output) void Net::connect(int outLayerId, int outNum, int inLayerId, int inNum)
//{
//}
void Net::setOutputNames(LayerId layer, const std::vector<String> &outputNames)
{ {
LayerData &ld = impl->getLayerData(layer); impl->connect(outLayerId, outNum, inLayerId, inNum);
CV_Assert(ld.outputNames.size() == 0);
ld.outputNames.assign(outputNames.begin(), outputNames.end());
} }
void Net::setLayerInputs(const std::vector<String> &outputs, LayerId layer) void Net::connect(String _outPin, String _inPin)
{ {
LayerData &ld = impl->getLayerData(layer); LayerPin outPin = impl->getPinByAlias(_outPin);
ld.inputNames.assign(outputs.begin(), outputs.end()); LayerPin inpPin = impl->getPinByAlias(_inPin);
CV_Assert(outPin.valid() && inpPin.valid());
impl->connect(outPin.lid, outPin.oid, inpPin.lid, inpPin.oid);
} }
void Net::forward() void Net::forward()
@ -379,37 +419,34 @@ void Net::forward()
void Net::forward(LayerId toLayer) void Net::forward(LayerId toLayer)
{ {
impl->setUpNet(); impl->setUpNet();
impl->forwardLayer(impl->getLayerId(toLayer)); impl->forwardLayer(impl->getLayerData(toLayer));
} }
void Net::setNetInputs(const std::vector<String> &inputBlobNames) void Net::setNetInputs(const std::vector<String> &inputBlobNames)
{ {
setOutputNames(0, inputBlobNames); impl->netInputLayer->setNames(inputBlobNames);
} }
void Net::setBlob(BlobId outputName, const Blob &blob) void Net::setBlob(String outputName, const Blob &blob)
{ {
String name = outputName.get<String>(); LayerPin pin = impl->getPinByAlias(outputName);
LayerOutId found; if (!pin.valid())
CV_Error(Error::StsObjectNotFound, "Request blob \"" + outputName + "\" not found");
if (!impl->findOutputsByName(name, &found, 1))
CV_Error(cv::Error::StsObjectNotFound, "Request blob \"" + name + "\" not found");
LayerData &ld = impl->layers[found.lid]; LayerData &ld = impl->layers[pin.lid];
ld.outputBlobs.resize(ld.outputNames.size()); ld.outputBlobs.resize( std::max(pin.oid+1, (int)ld.requiredOutputs.size()) );
ld.outputBlobs[found.oid] = blob; ld.outputBlobs[pin.oid] = blob;
} }
Blob Net::getBlob(BlobId outputName) Blob Net::getBlob(String outputName)
{ {
String name = outputName.get<String>(); LayerPin pin = impl->getPinByAlias(outputName);
LayerOutId found; if (!pin.valid())
CV_Error(Error::StsObjectNotFound, "Request blob \"" + outputName + "\" not found");
if (!impl->findOutputsByName(name, &found, 1)) LayerData &ld = impl->layers[pin.lid];
CV_Error(cv::Error::StsObjectNotFound, "Request blob \"" + name + "\" not found"); CV_Assert(pin.oid < (int)ld.outputBlobs.size());
return ld.outputBlobs[pin.oid];
LayerData &ld = impl->layers[found.lid];
return ld.outputBlobs[found.oid];
} }
Blob Net::getParam(LayerId layer, int numParam) Blob Net::getParam(LayerId layer, int numParam)
@ -426,36 +463,14 @@ Importer::~Importer()
} }
////////////////////////////////////////////////////////////////////////// int Layer::inputNameToIndex(String inputName)
#include <sstream>
template<typename T>
String toString(const T &v)
{
std::stringstream ss;
ss << v;
return ss.str();
}
int Layer::getNumInputs()
{
return 1;
}
int Layer::getNumOutputs()
{ {
return 1; return -1;
} }
cv::String Layer::getInputName(int inputNum) int Layer::outputNameToIndex(String outputName)
{
return "input" + toString(inputNum);
}
cv::String Layer::getOutputName(int outputNum)
{ {
return "output" + toString(outputNum); return -1;
} }
Layer::~Layer() Layer::~Layer()

@ -29,7 +29,7 @@ TEST(Reproducibility_AlexNet, Accuracy)
inpMats.push_back( imread(getTestFile("alexnet_1.png")) ); inpMats.push_back( imread(getTestFile("alexnet_1.png")) );
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty()); ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
net.setBlob("data", Blob(inpMats)); net.setBlob(".data", Blob(inpMats));
net.forward(); net.forward();
Blob out = net.getBlob("prob"); Blob out = net.getBlob("prob");

@ -30,7 +30,7 @@ TEST(Reproducibility_GoogLeNet, Accuracy)
ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty()); ASSERT_TRUE(!inpMats[0].empty() && !inpMats[1].empty());
Blob inp(inpMats); Blob inp(inpMats);
net.setBlob("data", inp); net.setBlob(".data", inp);
net.forward(); net.forward();
Blob out = net.getBlob("prob"); Blob out = net.getBlob("prob");

@ -44,17 +44,16 @@ static void testLayer(String proto, String caffemodel = String())
importer->populateNet(net); importer->populateNet(net);
} }
net.setBlob("input", inp); net.setBlob(".input", inp);
net.forward(); net.forward();
Blob out = net.getBlob("output"); Blob out = net.getBlob("output");
EXPECT_TRUE(isEqual(ref.shape4(), out.shape4())); EXPECT_EQ(ref.shape(), out.shape());
Mat &mRef = ref.getMatRef(); Mat &mRef = ref.getMatRef();
Mat &mOut = out.getMatRef(); Mat &mOut = out.getMatRef();
size_t N = ref.total();
double normL1 = cvtest::norm(mRef, mOut, NORM_L1)/N; double normL1 = cvtest::norm(mRef, mOut, NORM_L1) / ref.total();
EXPECT_LE(normL1, 0.0001); EXPECT_LE(normL1, 0.0001);
double normInf = cvtest::norm(mRef, mOut, NORM_INF); double normInf = cvtest::norm(mRef, mOut, NORM_INF);

@ -27,12 +27,12 @@ layer {
name: "relu1" name: "relu1"
type: "ReLU" type: "ReLU"
bottom: "conv1" bottom: "conv1"
top: "relu1" top: "conv1"
} }
layer { layer {
name: "norm1" name: "norm1"
type: "LRN" type: "LRN"
bottom: "relu1" bottom: "conv1"
top: "norm1" top: "norm1"
lrn_param { lrn_param {
local_size: 5 local_size: 5
@ -75,12 +75,12 @@ layer {
name: "relu2" name: "relu2"
type: "ReLU" type: "ReLU"
bottom: "conv2" bottom: "conv2"
top: "relu2" top: "conv2"
} }
layer { layer {
name: "norm2" name: "norm2"
type: "LRN" type: "LRN"
bottom: "relu2" bottom: "conv2"
top: "norm2" top: "norm2"
lrn_param { lrn_param {
local_size: 5 local_size: 5
@ -191,7 +191,7 @@ layer {
name: "fc6" name: "fc6"
type: "InnerProduct" type: "InnerProduct"
bottom: "pool5" bottom: "pool5"
top: "fc6_0" top: "fc6"
param { param {
lr_mult: 1 lr_mult: 1
decay_mult: 1 decay_mult: 1
@ -207,14 +207,14 @@ layer {
layer { layer {
name: "relu6" name: "relu6"
type: "ReLU" type: "ReLU"
bottom: "fc6_0" bottom: "fc6"
top: "fc6_1" top: "fc6"
} }
layer { layer {
name: "drop6" name: "drop6"
type: "Dropout" type: "Dropout"
bottom: "fc6_1" bottom: "fc6"
top: "fc6_2" top: "fc6"
dropout_param { dropout_param {
dropout_ratio: 0.5 dropout_ratio: 0.5
} }
@ -222,8 +222,8 @@ layer {
layer { layer {
name: "fc7" name: "fc7"
type: "InnerProduct" type: "InnerProduct"
bottom: "fc6_2" bottom: "fc6"
top: "fc7_0" top: "fc7"
param { param {
lr_mult: 1 lr_mult: 1
decay_mult: 1 decay_mult: 1
@ -239,14 +239,14 @@ layer {
layer { layer {
name: "relu7" name: "relu7"
type: "ReLU" type: "ReLU"
bottom: "fc7_0" bottom: "fc7"
top: "fc7_1" top: "fc7"
} }
layer { layer {
name: "drop7" name: "drop7"
type: "Dropout" type: "Dropout"
bottom: "fc7_1" bottom: "fc7"
top: "fc7_2" top: "fc7"
dropout_param { dropout_param {
dropout_ratio: 0.5 dropout_ratio: 0.5
} }
@ -254,7 +254,7 @@ layer {
layer { layer {
name: "fc8" name: "fc8"
type: "InnerProduct" type: "InnerProduct"
bottom: "fc7_2" bottom: "fc7"
top: "fc8" top: "fc8"
param { param {
lr_mult: 1 lr_mult: 1

@ -7,7 +7,6 @@ input_dim: 75
input_dim: 113 input_dim: 113
layer { layer {
name: "norm1"
type: "LRN" type: "LRN"
lrn_param { lrn_param {
norm_region: ACROSS_CHANNELS; norm_region: ACROSS_CHANNELS;
@ -16,6 +15,7 @@ layer {
beta: 0.75 beta: 0.75
} }
name: "output"
bottom: "input" bottom: "input"
top: "output" top: "output"
} }

@ -7,8 +7,8 @@ input_dim: 75
input_dim: 113 input_dim: 113
layer { layer {
name: "norm1"
type: "LRN" type: "LRN"
lrn_param { lrn_param {
norm_region: WITHIN_CHANNEL; norm_region: WITHIN_CHANNEL;
local_size: 5 local_size: 5
@ -16,6 +16,7 @@ layer {
beta: 0.75 beta: 0.75
} }
name: "output"
bottom: "input" bottom: "input"
top: "output" top: "output"
} }

@ -7,9 +7,9 @@ input_dim: 75
input_dim: 113 input_dim: 113
layer { layer {
name: "Softmax"
type: "Softmax" type: "Softmax"
name: "output"
bottom: "input" bottom: "input"
top: "output" top: "output"
} }
Loading…
Cancel
Save